diff --git a/.travis.yml b/.travis.yml index 5fbb7f6e6b1..d9f846246cb 100644 --- a/.travis.yml +++ b/.travis.yml @@ -68,7 +68,7 @@ jobs: # - stage: testFuncOptional # env: TRAVIS_MODE=funcTests UA=firefox OS="OS X 10.11" - stage: testFuncOptional - env: TRAVIS_MODE=funcTests UA=safari OS="OS X 10.11" UA_VERSION="9.0" + env: TRAVIS_MODE=funcTests UA=safari OS="OS X 10.12" UA_VERSION="10.1" addons: sauce_connect: tunnel_domains: localhost diff --git a/demo/chart/timeline-chart.ts b/demo/chart/timeline-chart.ts index 36a60af28d4..0f8dcfdcc02 100644 --- a/demo/chart/timeline-chart.ts +++ b/demo/chart/timeline-chart.ts @@ -132,13 +132,26 @@ export class TimelineChart { this.hidden = true; } - updateLevels (levels: any[]) { + updateLevels (levels: any[], levelSwitched) { const { labels, datasets } = this.chart.data; + const { loadLevel, nextLoadLevel, nextAutoLevel } = self.hls; + const currentLevel = levelSwitched !== undefined ? levelSwitched : self.hls.currentLevel; levels.forEach((level, i) => { labels.push(getLevelName(level, level.level || level.id || i)); + let borderColor = null; + if (currentLevel === i) { + borderColor = 'rgba(32, 32, 240, 1.0)'; + } else if (loadLevel === i) { + borderColor = 'rgba(255, 128, 0, 1.0)'; + } else if (nextLoadLevel === i) { + borderColor = 'rgba(200, 200, 64, 1.0)'; + } else if (nextAutoLevel === i) { + borderColor = 'rgba(160, 0, 160, 1.0)'; + } datasets.push(datasetWithDefaults({ url: Array.isArray(level.url) ? level.url[0] : level.url, trackType: 'level', + borderColor, level: level.level })); if (level.details) { @@ -150,11 +163,13 @@ export class TimelineChart { updateAudioTracks (audioTracks: any[]) { const { labels, datasets } = this.chart.data; + const { audioTrack } = self.hls; audioTracks.forEach((track, i) => { labels.push(getAudioTrackName(track, i)); datasets.push(datasetWithDefaults({ url: Array.isArray(track.url) ? track.url[0] : track.url, trackType: 'audioTrack', + borderColor: audioTrack === i ? 'rgba(32, 32, 240, 1.0)' : null, audioTrack: i })); if (track.details) { @@ -166,11 +181,13 @@ export class TimelineChart { updateSubtitleTracks (subtitles: any[]) { const { labels, datasets } = this.chart.data; + const { subtitleTrack } = self.hls; subtitles.forEach((track, i) => { labels.push(getSubtitlesName(track, i)); datasets.push(datasetWithDefaults({ url: Array.isArray(track.url) ? track.url[0] : track.url, trackType: 'subtitleTrack', + borderColor: subtitleTrack === i ? 'rgba(32, 32, 240, 1.0)' : null, subtitleTrack: i })); if (track.details) { @@ -332,6 +349,14 @@ export class TimelineChart { this.resize(datasets); } + removeSourceBuffers () { + const { labels, datasets } = this.chart.data; + while ((labels[0] || '').indexOf('buffer') > -1) { + labels.shift(); + datasets.shift(); + } + } + setTextTracks (textTracks) { const { labels, datasets } = this.chart.data; this.removeType('textTrack'); @@ -346,6 +371,7 @@ export class TimelineChart { categoryPercentage: 0.5, url: '', trackType: 'textTrack', + borderColor: textTrack.mode !== 'hidden' === i ? 'rgba(32, 32, 240, 1.0)' : null, textTrack: i })); this.cuesChangeHandler = this.cuesChangeHandler || ((e) => this.updateTextTrackCues(e.currentTarget)); diff --git a/demo/index.html b/demo/index.html index d9a935eecac..317c9117eb7 100644 --- a/demo/index.html +++ b/demo/index.html @@ -121,15 +121,15 @@


 
       
- - - - - - + + + + + +
-
+

Playback

@@ -169,7 +169,7 @@

Playback

-
+

Quality-levels

@@ -209,7 +209,7 @@

Quality-levels

-
+

Audio-tracks

@@ -223,7 +223,7 @@

Audio-tracks

-
+

Real-time metrics


@@ -258,7 +258,7 @@

Real-time metrics

-
+

Buffer & Statistics


@@ -266,7 +266,7 @@ 

Buffer & Statistics


       
-
+
diff --git a/demo/main.js b/demo/main.js index 0573400326b..75c2b2daff9 100644 --- a/demo/main.js +++ b/demo/main.js @@ -12,7 +12,7 @@ const STORAGE_KEYS = { }; const testStreams = require('../tests/test-streams'); -const defaultTestStreamUrl = testStreams.bbb.url; +const defaultTestStreamUrl = testStreams[Object.keys(testStreams)[0]].url; const sourceURL = decodeURIComponent(getURLParam('src', defaultTestStreamUrl)); let demoConfig = getURLParam('demoConfig', null); @@ -138,8 +138,7 @@ $(document).ready(function () { video.volume = 0.05; - hideAllTabs(); - // $('#timelineTab').show(); + toggleTab($('.demo-tab-btn')[0]); $('#metricsButtonWindow').toggle(self.windowSliding); $('#metricsButtonFixed').toggle(!self.windowSliding); @@ -1305,13 +1304,35 @@ function addChartEventListeners (hls) { hls.on(Hls.Events.BUFFER_CREATED, (eventName, { tracks }) => { chart.updateSourceBuffers(tracks, hls.media); }, chart); + hls.on(Hls.Events.BUFFER_RESET, () => { + chart.removeSourceBuffers(); + }, chart); hls.on(Hls.Events.LEVELS_UPDATED, (eventName, { levels }) => { chart.removeType('level'); chart.updateLevels(levels); }); - hls.on(Hls.Events.LEVEL_UPDATED, (eventName, { details, level }) => { + hls.on(Hls.Events.LEVEL_SWITCHED, (eventName, { level }) => { + // TODO: mutate level datasets + // Update currentLevel + chart.removeType('level'); + chart.updateLevels(hls.levels, level); + }, chart); + hls.on(Hls.Events.LEVEL_LOADING, () => { + // TODO: mutate level datasets + // Update loadLevel + chart.removeType('level'); + chart.updateLevels(hls.levels); + }, chart); + hls.on(Hls.Events.FRAG_LOADING, () => { + // TODO: mutate level datasets + // Update loadLevel + chart.removeType('level'); + chart.updateLevels(hls.levels); + }, chart); + hls.on(Hls.Events.LEVEL_UPDATED, (eventName, { details }) => { chart.updateLevelOrTrack(details); }, chart); + hls.on(Hls.Events.AUDIO_TRACKS_UPDATED, (eventName, { audioTracks }) => { chart.removeType('audioTrack'); chart.updateAudioTracks(audioTracks); @@ -1320,6 +1341,17 @@ function addChartEventListeners (hls) { chart.removeType('subtitleTrack'); chart.updateSubtitleTracks(subtitleTracks); }, chart); + + hls.on(Hls.Events.AUDIO_TRACK_SWITCHED, (eventName) => { + // TODO: mutate level datasets + chart.removeType('audioTrack'); + chart.updateAudioTracks(hls.audioTracks); + }, chart); + hls.on(Hls.Events.SUBTITLE_TRACK_SWITCH, (eventName) => { + // TODO: mutate level datasets + chart.removeType('subtitleTrack'); + chart.updateSubtitleTracks(hls.subtitleTracks); + }, chart); hls.on(Hls.Events.AUDIO_TRACK_LOADED, updateLevelOrTrack, chart); hls.on(Hls.Events.SUBTITLE_TRACK_LOADED, updateLevelOrTrack, chart); hls.on(Hls.Events.LEVEL_PTS_UPDATED, updateLevelOrTrack, chart); @@ -1366,17 +1398,14 @@ function arrayConcat (inputArray) { } function hideAllTabs () { - $('#timelineTab').hide(); - $('#playbackControlTab').hide(); - $('#qualityLevelControlTab').hide(); - $('#audioTrackControlTab').hide(); - $('#metricsDisplayTab').hide(); - $('#statsDisplayTab').hide(); + $('.demo-tab-btn').css('background-color', ''); + $('.demo-tab').hide(); } -function toggleTab (tabElId) { +function toggleTab (btn) { hideAllTabs(); self.hideMetrics(); + const tabElId = $(btn).data('tab'); $('#' + tabElId).show(); if (hls) { if (tabElId === 'timelineTab') { @@ -1386,6 +1415,7 @@ function toggleTab (tabElId) { chart.hide(); } } + $(btn).css('background-color', 'orange'); } function appendLog (textElId, message) { diff --git a/package-lock.json b/package-lock.json index 9635a90d8e1..87096b38408 100644 --- a/package-lock.json +++ b/package-lock.json @@ -4775,9 +4775,9 @@ } }, "@types/mocha": { - "version": "7.0.2", - "resolved": "https://registry.npmjs.org/@types/mocha/-/mocha-7.0.2.tgz", - "integrity": "sha512-ZvO2tAcjmMi8V/5Z3JsyofMe3hasRcaw88cto5etSVMwVQfeivGAlEYmaQgceUSVYFofVjT+ioHsATjdWcFt1w==", + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/@types/mocha/-/mocha-8.0.0.tgz", + "integrity": "sha512-jWeYcTo3sCH/rMgsdYXDTO85GNRyTCII5dayMIu/ZO4zbEot1E3iNGaOwpLReLUHjeNQFkgeNNVYlY4dX6azQQ==", "dev": true }, "@types/node": { @@ -5186,6 +5186,15 @@ "integrity": "sha1-/ts5T58OAqqXaOcCvaI7UF+ufh8=", "dev": true }, + "agent-base": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-6.0.1.tgz", + "integrity": "sha512-01q25QQDwLSsyfhrKbn8yuur+JNw0H+0Y4JiGIKd3z9aYk/w/2kxD/Upc+t2ZBBSUNff50VjPsSW2YxM8QYKVg==", + "dev": true, + "requires": { + "debug": "4" + } + }, "aggregate-error": { "version": "3.0.1", "resolved": "https://registry.npmjs.org/aggregate-error/-/aggregate-error-3.0.1.tgz", @@ -6763,15 +6772,16 @@ } }, "chromedriver": { - "version": "83.0.0", - "resolved": "https://registry.npmjs.org/chromedriver/-/chromedriver-83.0.0.tgz", - "integrity": "sha512-AePp9ykma+z4aKPRqlbzvVlc22VsQ6+rgF+0aL3B5onHOncK18dWSkLrSSJMczP/mXILN9ohGsvpuTwoRSj6OQ==", + "version": "84.0.1", + "resolved": "https://registry.npmjs.org/chromedriver/-/chromedriver-84.0.1.tgz", + "integrity": "sha512-iJ6Y680yp58+KlAPS5YgYe3oePVFf8jY5k4YoczhXkT0p/mQZKfGNkGG/Xc0LjGWDQRTgZwXg66hOXoApIQecg==", "dev": true, "requires": { "@testim/chrome-version": "^1.0.7", "axios": "^0.19.2", "del": "^5.1.0", - "extract-zip": "^2.0.0", + "extract-zip": "^2.0.1", + "https-proxy-agent": "^5.0.0", "mkdirp": "^1.0.4", "tcp-port-used": "^1.0.1" }, @@ -10098,9 +10108,9 @@ "dev": true }, "extract-zip": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/extract-zip/-/extract-zip-2.0.0.tgz", - "integrity": "sha512-i42GQ498yibjdvIhivUsRslx608whtGoFIhF26Z7O4MYncBxp8CwalOs1lnHy21A9sIohWO2+uiE4SRtC9JXDg==", + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/extract-zip/-/extract-zip-2.0.1.tgz", + "integrity": "sha512-GDhU9ntwuKyGXdZBUgTIe+vXnWj0fppUEtMDL0+idd5Sta8TGpHssn/eusA9mrPr9qNDym6SxAYZjNvCn/9RBg==", "dev": true, "requires": { "@types/yauzl": "^2.9.1", @@ -11555,6 +11565,16 @@ "integrity": "sha1-7AbBDgo0wPL68Zn3/X/Hj//QPHM=", "dev": true }, + "https-proxy-agent": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-5.0.0.tgz", + "integrity": "sha512-EkYm5BcKUGiduxzSt3Eppko+PiNWNEpa4ySk9vTC6wDsQJW9rHSa+UhGNJoRYp7bz6Ht1eaRIa6QaJqO5rCFbA==", + "dev": true, + "requires": { + "agent-base": "6", + "debug": "4" + } + }, "human-signals": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/human-signals/-/human-signals-1.1.1.tgz", diff --git a/package.json b/package.json index d16d70072b5..510b64676cf 100644 --- a/package.json +++ b/package.json @@ -66,7 +66,7 @@ "@itsjamie/esdoc-standard-plugin": "^0.3.0", "@itsjamie/esdoc-typescript-plugin": "^0.3.0", "@types/chai": "^4.2.11", - "@types/mocha": "^7.0.2", + "@types/mocha": "^8.0.0", "@types/sinon-chai": "^3.2.4", "@typescript-eslint/eslint-plugin": "^2.34.0", "@typescript-eslint/parser": "^2.34.0", @@ -75,7 +75,7 @@ "chai": "^4.2.0", "chart.js": "^2.9.3", "chartjs-plugin-zoom": "^0.7.7", - "chromedriver": "^83.0.0", + "chromedriver": "^84.0.0", "eslint": "^6.8.0", "eslint-config-standard": "^14.1.1", "eslint-plugin-import": "^2.21.1", diff --git a/src/controller/audio-stream-controller.ts b/src/controller/audio-stream-controller.ts index 69f634634e0..bd047dcd25e 100644 --- a/src/controller/audio-stream-controller.ts +++ b/src/controller/audio-stream-controller.ts @@ -4,6 +4,7 @@ import { Events } from '../events'; import TimeRanges from '../utils/time-ranges'; import { ErrorDetails } from '../errors'; import { logger } from '../utils/logger'; +import { fragmentWithinToleranceTest } from './fragment-finders'; import { FragmentState, FragmentTracker } from './fragment-tracker'; import Fragment, { ElementaryStreamTypes } from '../loader/fragment'; import BaseStreamController, { State } from './base-stream-controller'; @@ -25,7 +26,7 @@ import { import { TrackSet } from '../types/track'; import { Level } from '../types/level'; import Hls from '../hls'; -import { ComponentAPI } from '../types/component-api'; +import type { ComponentAPI } from '../types/component-api'; const { performance } = self; @@ -39,6 +40,7 @@ class AudioStreamController extends BaseStreamController implements ComponentAPI private videoBuffer: any | null = null; private initPTS: any = []; private videoTrackCC: number = -1; + private waitingVideoCC: number = -1; private audioSwitch: boolean = false; private trackId: number = -1; private waitingData: { frag: Fragment, cache: ChunkCache, complete: boolean } | null = null; @@ -159,28 +161,34 @@ class AudioStreamController extends BaseStreamController implements ComponentAPI break; } case State.WAITING_INIT_PTS: { - const videoTrackCC = this.videoTrackCC; - if (Number.isFinite(this.initPTS[videoTrackCC])) { - // Ensure we don't get stuck in the WAITING_INIT_PTS state if the waiting frag CC doesn't match any initPTS - const waitingData = this.waitingData; - if (waitingData) { - const { frag, cache, complete } = waitingData; + // Ensure we don't get stuck in the WAITING_INIT_PTS state if the waiting frag CC doesn't match any initPTS + const waitingData = this.waitingData; + if (waitingData) { + const { frag, cache, complete } = waitingData; + if (this.initPTS[frag.cc] !== undefined) { this.waitingData = null; - if (videoTrackCC === frag.cc) { - this.state = State.FRAG_LOADING; - const payload = cache.flush(); - this._handleFragmentLoadProgress(frag, payload); - if (complete) { - super._handleFragmentLoadComplete(frag, payload); - } - } else { - this.state = State.IDLE; + this.state = State.FRAG_LOADING; + const payload = cache.flush(); + this._handleFragmentLoadProgress(frag, payload); + if (complete) { + super._handleFragmentLoadComplete(frag, payload); } + } else if (this.videoTrackCC !== this.waitingVideoCC) { + // Drop waiting fragment if videoTrackCC has changed since waitingFragment was set and initPTS was not found + logger.log(`Waiting fragment cc (${frag.cc}) cancelled because video is at cc ${this.videoTrackCC}`); + this.clearWaitingFragment(); } else { - this.state = State.IDLE; + // Drop waiting fragment if an earlier fragment is needed + const bufferInfo = BufferHelper.bufferInfo(this.mediaBuffer, this.media.currentTime, this.config.maxBufferHole); + const waitingFragmentAtPosition = fragmentWithinToleranceTest(bufferInfo.end, this.config.maxFragLookUpTolerance, frag); + if (waitingFragmentAtPosition < 0) { + logger.log(`Waiting fragment cc (${frag.cc}) @ ${frag.start} cancelled because another fragment at ${bufferInfo.end} is needed`); + this.clearWaitingFragment(); + } } + } else { + this.state = State.IDLE; } - break; } default: break; @@ -189,6 +197,16 @@ class AudioStreamController extends BaseStreamController implements ComponentAPI this.onTickEnd(); } + clearWaitingFragment () { + const waitingData = this.waitingData; + if (waitingData) { + this.fragmentTracker.removeFragment(waitingData.frag); + this.waitingData = null; + this.waitingVideoCC = -1; + this.state = State.IDLE; + } + } + protected onTickEnd () { const { media } = this; if (!media || !media.readyState) { @@ -343,7 +361,7 @@ class AudioStreamController extends BaseStreamController implements ComponentAPI fragCurrent.loader.abort(); } this.fragCurrent = null; - this.waitingData = null; + this.clearWaitingFragment(); // destroy useless transmuxer when switching audio to main if (!altAudio) { if (transmuxer) { @@ -427,15 +445,17 @@ class AudioStreamController extends BaseStreamController implements ComponentAPI // If not we need to wait for it const initPTS = this.initPTS[frag.cc]; const initSegmentData = details.initSegment ? details.initSegment.data : []; - if (details.initSegment || initPTS !== undefined) { + if (initPTS !== undefined) { // this.log(`Transmuxing ${sn} of [${details.startSN} ,${details.endSN}],track ${trackId}`); // time Offset is accurate if level PTS is known, or if playlist is not sliding (not live) const accurateTimeOffset = false; // details.PTSKnown || !details.live; const chunkMeta = new ChunkMetadata(frag.level, frag.sn, frag.stats.chunkCount, payload.byteLength); transmuxer.push(payload, initSegmentData, audioCodec, '', frag, details.totalduration, accurateTimeOffset, chunkMeta, initPTS); } else { + logger.log(`Unknown video PTS for cc ${frag.cc}, waiting for video PTS before demuxing audio frag ${frag.sn} of [${details.startSN} ,${details.endSN}],track ${trackId}`); const { cache } = this.waitingData = this.waitingData || { frag, cache: new ChunkCache(), complete: false }; cache.push(payload); + this.waitingVideoCC = this.videoTrackCC; this.state = State.WAITING_INIT_PTS; } } diff --git a/src/controller/base-stream-controller.ts b/src/controller/base-stream-controller.ts index 7876118e4fc..8fd7336cf96 100644 --- a/src/controller/base-stream-controller.ts +++ b/src/controller/base-stream-controller.ts @@ -112,9 +112,7 @@ export default class BaseStreamController extends TaskLoop { const currentTime = media ? media.currentTime : null; const bufferInfo = BufferHelper.bufferInfo(mediaBuffer || media, currentTime, this.config.maxBufferHole); - if (Number.isFinite(currentTime)) { - this.log(`media seeking to ${currentTime.toFixed(3)}, state: ${state}`); - } + this.log(`media seeking to ${Number.isFinite(currentTime) ? currentTime.toFixed(3) : currentTime}, state: ${state}`); if (state === State.ENDED) { // if seeking to unbuffered area, clean up fragPrevious diff --git a/src/controller/level-helper.ts b/src/controller/level-helper.ts index a862e35fdd2..5288dfe8d35 100644 --- a/src/controller/level-helper.ts +++ b/src/controller/level-helper.ts @@ -52,7 +52,7 @@ export function updatePTS (fragments: Fragment[], fromIdx: number, toIdx: number } else { // we dont know startPTS[toIdx] if (toIdx > fromIdx) { - fragTo.start = fragFrom.start + fragFrom.duration; + fragTo.start = fragFrom.start + (fragFrom.minEndPTS ? fragFrom.minEndPTS - fragFrom.start : fragFrom.duration); } else { fragTo.start = Math.max(fragFrom.start - fragTo.duration, 0); } @@ -61,6 +61,7 @@ export function updatePTS (fragments: Fragment[], fromIdx: number, toIdx: number export function updateFragPTSDTS (details: LevelDetails | undefined, frag: Fragment, startPTS: number, endPTS: number, startDTS: number, endDTS: number): number { let maxStartPTS = startPTS; + let minEndPTS = endPTS; if (Number.isFinite(frag.startPTS)) { // delta PTS between audio and video const deltaPTS = Math.abs(frag.startPTS - startPTS); @@ -72,6 +73,7 @@ export function updateFragPTSDTS (details: LevelDetails | undefined, frag: Fragm maxStartPTS = Math.max(startPTS, frag.startPTS); startPTS = Math.min(startPTS, frag.startPTS); + minEndPTS = Math.min(endPTS, frag.endPTS); endPTS = Math.max(endPTS, frag.endPTS); startDTS = Math.min(startDTS, frag.startDTS); endDTS = Math.max(endDTS, frag.endDTS); @@ -81,6 +83,7 @@ export function updateFragPTSDTS (details: LevelDetails | undefined, frag: Fragm frag.start = frag.startPTS = startPTS; frag.maxStartPTS = maxStartPTS; frag.endPTS = frag.appendedPTS = endPTS; + frag.minEndPTS = minEndPTS; frag.startDTS = startDTS; frag.endDTS = endDTS; frag.duration = endPTS - startPTS; diff --git a/src/controller/stream-controller.ts b/src/controller/stream-controller.ts index b4a78e60185..4ea861c1772 100644 --- a/src/controller/stream-controller.ts +++ b/src/controller/stream-controller.ts @@ -341,7 +341,9 @@ export default class StreamController extends BaseStreamController implements Ne let previouslyPaused; if (media) { previouslyPaused = media.paused; - media.pause(); + if (!previouslyPaused) { + media.pause(); + } } else { // don't restart playback after instant level switch in case media not attached previouslyPaused = true; @@ -367,7 +369,7 @@ export default class StreamController extends BaseStreamController implements Ne const media = this.media; if (media?.buffered.length) { this.immediateSwitch = false; - if (BufferHelper.isBuffered(media, media.currentTime)) { + if (media.currentTime > 0 && BufferHelper.isBuffered(media, media.currentTime)) { // only nudge if currentTime is buffered media.currentTime -= 0.0001; } @@ -388,7 +390,6 @@ export default class StreamController extends BaseStreamController implements Ne // ensure that media is defined and that metadata are available (to retrieve currentTime) if (media?.readyState) { let fetchdelay; - let nextBufferedFrag; const fragPlayingCurrent = this.getAppendedFrag(media.currentTime); if (fragPlayingCurrent && fragPlayingCurrent.start > 1) { // flush buffer preceding current fragment (flush until current fragment start offset) @@ -410,10 +411,10 @@ export default class StreamController extends BaseStreamController implements Ne } // this.log('fetchdelay:'+fetchdelay); // find buffer range that will be reached once new fragment will be fetched - nextBufferedFrag = this.getBufferedFrag(media.currentTime + fetchdelay); - if (nextBufferedFrag) { + const bufferedFrag = this.getBufferedFrag(media.currentTime + fetchdelay); + if (bufferedFrag) { // we can flush buffer range following this one without stalling playback - nextBufferedFrag = this.followingBufferedFrag(nextBufferedFrag); + const nextBufferedFrag = this.followingBufferedFrag(bufferedFrag); if (nextBufferedFrag) { // if we are here, we can also cancel any loading/demuxing in progress, as they are useless const fragCurrent = this.fragCurrent; @@ -425,7 +426,9 @@ export default class StreamController extends BaseStreamController implements Ne // start flush position is the start PTS of next buffered frag. // we use frag.naxStartPTS which is max(audio startPTS, video startPTS). // in case there is a small PTS Delta between audio and video, using maxStartPTS avoids flushing last samples from current fragment - this.flushMainBuffer(nextBufferedFrag.maxStartPTS, Number.POSITIVE_INFINITY); + const maxStart = nextBufferedFrag.maxStartPTS ? nextBufferedFrag.maxStartPTS : nextBufferedFrag.startPTS; + const startPts = Math.max(bufferedFrag.endPTS, maxStart + Math.min(this.config.maxFragLookUpTolerance, nextBufferedFrag.duration)); + this.flushMainBuffer(startPts, Number.POSITIVE_INFINITY); } } } @@ -888,13 +891,22 @@ export default class StreamController extends BaseStreamController implements Ne _seekToStartPos () { const { media } = this; const currentTime = media.currentTime; + let startPosition = this.startPosition; // only adjust currentTime if different from startPosition or if startPosition not buffered // at that stage, there should be only one buffered range, as we reach that code after first fragment has been buffered - const startPosition = media.seeking ? currentTime : this.startPosition; - // if currentTime not matching with expected startPosition or startPosition not buffered but close to first buffered if (currentTime !== startPosition && startPosition >= 0) { - // if startPosition not buffered, let's seek to buffered.start(0) - this.log(`Target start position not buffered, seek to buffered.start(0) ${startPosition} from current time ${currentTime} `); + if (media.seeking) { + logger.log(`could not seek to ${startPosition}, already seeking at ${currentTime}`); + return; + } + const bufferStart = media.buffered.length ? media.buffered.start(0) : 0; + const delta = bufferStart - startPosition; + if (delta > 0 && delta < this.config.maxBufferHole) { + logger.log(`adjusting start position by ${delta} to match buffer start`); + startPosition += delta; + this.startPosition = startPosition; + } + this.log(`seek to target start position ${startPosition} from current time ${currentTime}`); media.currentTime = startPosition; } } diff --git a/src/demux/transmuxer.ts b/src/demux/transmuxer.ts index 17533bf8210..b3e59f8184d 100644 --- a/src/demux/transmuxer.ts +++ b/src/demux/transmuxer.ts @@ -25,7 +25,13 @@ try { now = self.Date.now; } -const muxConfig = [ +type MuxConfig = + { demux: typeof TSDemuxer, remux: typeof MP4Remuxer } | + { demux: typeof MP4Demuxer, remux: typeof PassThroughRemuxer } | + { demux: typeof AACDemuxer, remux: typeof MP4Remuxer } | + { demux: typeof MP3Demuxer, remux: typeof MP4Remuxer }; + +const muxConfig: MuxConfig[] = [ { demux: TSDemuxer, remux: MP4Remuxer }, { demux: MP4Demuxer, remux: PassThroughRemuxer }, { demux: AACDemuxer, remux: MP4Remuxer }, @@ -265,35 +271,41 @@ export default class Transmuxer { ); } - private configureTransmuxer (data: Uint8Array, transmuxConfig: TransmuxConfig) { + private configureTransmuxer (data: Uint8Array, transmuxConfig: TransmuxConfig): { remuxer: Remuxer | undefined, demuxer: Demuxer | undefined } { const { config, observer, typeSupported, vendor } = this; const { audioCodec, defaultInitPts, duration, initSegmentData, videoCodec } = transmuxConfig; - let demuxer, remuxer; // probe for content type + let mux; for (let i = 0, len = muxConfig.length; i < len; i++) { - const mux = muxConfig[i]; - const probe = mux.demux.probe; - if (probe(data)) { - remuxer = this.remuxer = new mux.remux(observer, config, typeSupported, vendor); - demuxer = this.demuxer = new mux.demux(observer, config, typeSupported); - - // Ensure that muxers are always initialized with an initSegment - this.resetInitSegment(initSegmentData, audioCodec, videoCodec, duration); - this.resetInitialTimestamp(defaultInitPts); - logger.log(`[transmuxer.ts]: Probe succeeded with a data length of ${data.length}.`); - this.probe = probe; + mux = muxConfig[i]; + if (mux.demux.probe(data)) { break; } } - + if (!mux) { + return { remuxer: undefined, demuxer: undefined }; + } + // so let's check that current remuxer and demuxer are still valid + let demuxer = this.demuxer; + let remuxer = this.remuxer; + if (!remuxer || !(remuxer instanceof mux.remux)) { + remuxer = this.remuxer = new mux.remux(observer, config, typeSupported, vendor); + } + if (!demuxer || !(demuxer instanceof mux.demux)) { + demuxer = this.demuxer = new mux.demux(observer, config, typeSupported); + this.probe = mux.demux.probe; + } + // Ensure that muxers are always initialized with an initSegment + this.resetInitSegment(initSegmentData, audioCodec, videoCodec, duration); + this.resetInitialTimestamp(defaultInitPts); + logger.log(`[transmuxer.ts]: Probe succeeded with a data length of ${data.length}.`); return { demuxer, remuxer }; } private needsProbing (data: Uint8Array, discontinuity: boolean, trackSwitch: boolean) : boolean { // in case of continuity change, or track switch // we might switch from content type (AAC container to TS container, or TS to fmp4 for example) - // so let's check that current demuxer is still valid - return !this.demuxer || ((discontinuity || trackSwitch) && !this.probe(data)); + return !this.demuxer || ((discontinuity || trackSwitch)); } private getDecrypter () { diff --git a/src/loader/fragment.ts b/src/loader/fragment.ts index 67ec02defe8..8c623427e74 100644 --- a/src/loader/fragment.ts +++ b/src/loader/fragment.ts @@ -76,7 +76,10 @@ export default class Fragment { public backtracked: boolean = false; // Set by `updateFragPTSDTS` in level-helper public deltaPTS?: number; + // The maximum starting Presentation Time Stamp (audio/video PTS) of the fragment. Set after transmux complete. public maxStartPTS?: number; + // The minimum ending Presentation Time Stamp (audio/video PTS) of the fragment. Set after transmux complete. + public minEndPTS?: number; // Load/parse timing information public stats: LoadStats = new LoadStats(); public urlId: number = 0; diff --git a/src/remux/mp4-remuxer.ts b/src/remux/mp4-remuxer.ts index 606514efdbb..b35a248c2dd 100644 --- a/src/remux/mp4-remuxer.ts +++ b/src/remux/mp4-remuxer.ts @@ -16,6 +16,7 @@ import { TrackSet } from '../types/track'; import { SourceBufferName } from '../types/buffer'; import Fragment from '../loader/fragment'; import { HlsConfig } from '../config'; +import { toMpegTsClockFromTimescale, toMsFromMpegTsClock } from '../utils/timescale-conversion'; const MAX_SILENT_FRAME_DURATION = 10 * 1000; // 10 seconds const AAC_SAMPLES_PER_FRAME = 1024; @@ -42,7 +43,6 @@ export default class MP4Remuxer implements Remuxer { this.ISGenerated = false; const userAgent = navigator.userAgent || ''; - this.isSafari = !!(vendor.indexOf('Apple') > -1 && !userAgent.match('CriOS')); const chromeIndex = userAgent.indexOf('Chrome'); if (chromeIndex > -1) { // The version string starts immediately after the word "Chrome" @@ -112,9 +112,8 @@ export default class MP4Remuxer implements Remuxer { // if first audio DTS is not aligned with first video DTS then we need to take that into account // when providing timeOffset to remuxAudio / remuxVideo. if we don't do that, there might be a permanent / small // drift between audio and video streams - // Use pts at timeOffset 0 so that VOD streams begin at 0 - const tsDelta = timeOffset > 0 ? audioTrack.samples[0].dts - videoTrack.samples[0].dts - : audioTrack.samples[0].pts - videoTrack.samples[0].pts; + const startPTS = videoTrack.samples.reduce((minPTS, sample) => Math.min(minPTS, sample.pts), videoTrack.samples[0].pts); + const tsDelta = audioTrack.samples[0].pts - startPTS; const audiovideoTimestampDelta = tsDelta / videoTrack.inputTimeScale; audioTimeOffset += Math.max(0, audiovideoTimestampDelta); videoTimeOffset += Math.max(0, -audiovideoTimestampDelta); @@ -136,10 +135,10 @@ export default class MP4Remuxer implements Remuxer { logger.warn('[mp4-remuxer]: regenerate InitSegment as video detected'); initSegment = this.generateIS(audioTrack, videoTrack, timeOffset); } - video = this.remuxVideo(videoTrack, videoTimeOffset, isVideoContiguous, audioTrackLength, accurateTimeOffset); + video = this.remuxVideo(videoTrack, videoTimeOffset, isVideoContiguous, audioTrackLength); } } else if (enoughVideoSamples) { - video = this.remuxVideo(videoTrack, videoTimeOffset, isVideoContiguous, 0, accurateTimeOffset); + video = this.remuxVideo(videoTrack, videoTimeOffset, isVideoContiguous, 0); if (video && audioTrack.codec) { this.remuxEmptyAudio(audioTrack, audioTimeOffset, this.isAudioContiguous, video); } @@ -226,9 +225,10 @@ export default class MP4Remuxer implements Remuxer { } }; if (computePTSDTS) { - const startPTS = Math.round(inputTimeScale * timeOffset); - initPTS = Math.min(initPTS, videoSamples[0].pts - startPTS); - initDTS = Math.min(initDTS, videoSamples[0].dts - startPTS); + const startPTS = videoSamples.reduce((minPTS, sample) => Math.min(minPTS, sample.pts), videoSamples[0].pts); + const startOffset = Math.round(inputTimeScale * timeOffset); + initDTS = Math.min(initDTS, videoSamples[0].dts - startOffset); + initPTS = Math.min(initPTS, startPTS - startOffset); } } @@ -246,7 +246,7 @@ export default class MP4Remuxer implements Remuxer { } } - remuxVideo (track: DemuxedAvcTrack, timeOffset, contiguous, audioTrackLength, accurateTimeOffset) : RemuxedTrack | undefined { + remuxVideo (track: DemuxedAvcTrack, timeOffset, contiguous, audioTrackLength) : RemuxedTrack | undefined { const timeScale: number = track.inputTimeScale; const inputSamples: Array = track.samples; const outputSamples: Array = []; @@ -254,82 +254,95 @@ export default class MP4Remuxer implements Remuxer { const initPTS: number = this._initPTS; let nextAvcDts = this.nextAvcDts; let offset = 8; + let mp4SampleDuration!: number; + let firstDTS; + let lastDTS; let minPTS: number = Number.POSITIVE_INFINITY; let maxPTS: number = Number.NEGATIVE_INFINITY; - let mp4SampleDuration!: number; + let ptsDtsShift = 0; + let sortSamples = false; - // Safari does not like overlapping DTS on consecutive fragments. let's use nextAvcDts to overcome this if fragments are consecutive - const isSafari: boolean = this.isSafari; - if (isSafari) { - // also consider consecutive fragments as being contiguous (even if a level switch occurs), - // for sake of clarity: - // consecutive fragments are frags with - // - less than 100ms gaps between new time offset (if accurate) and next expected PTS OR - // - less than 200 ms PTS gaps (timeScale/5) - contiguous = contiguous || (inputSamples.length && nextAvcDts && - ((accurateTimeOffset && Math.abs(timeOffset - nextAvcDts / timeScale) < 0.1) || - Math.abs((inputSamples[0].pts - nextAvcDts - initPTS)) < timeScale / 5) - ); - } // if parsed fragment is contiguous with last one, let's use last DTS value as reference if (!contiguous || nextAvcDts === null) { + const pts = timeOffset * timeScale; + // TODO: Handle case where pts value is wrapped, but dts is not + const cts = Math.max(0, inputSamples[0].pts - inputSamples[0].dts); // if not contiguous, let's use target timeOffset - nextAvcDts = timeOffset * timeScale; - logger.log(`[mp4-remuxer]: nextAvcDts generated as ${nextAvcDts}`); + nextAvcDts = pts - cts; } // PTS is coded on 33bits, and can loop from -2^32 to 2^32 - // ptsNormalize will make PTS/DTS value monotonic, we use last known DTS value as reference value - inputSamples.forEach(function (sample) { + // PTSNormalize will make PTS/DTS value monotonic, we use last known DTS value as reference value + for (let i = 0; i < nbSamples; i++) { + const sample = inputSamples[i]; + if (sample.dts > sample.pts) { + const PTS_DTS_SHIFT_TOLERANCE_90KHZ = 90000 * 0.2; + ptsDtsShift = Math.max(Math.min(ptsDtsShift, sample.pts - sample.dts), -1 * PTS_DTS_SHIFT_TOLERANCE_90KHZ); + } + if (sample.dts < inputSamples[i > 0 ? i - 1 : i].dts) { + sortSamples = true; + } sample.pts = PTSNormalize(sample.pts - initPTS, nextAvcDts); sample.dts = PTSNormalize(sample.dts - initPTS, nextAvcDts); - minPTS = Math.min(sample.pts, minPTS); - maxPTS = Math.max(sample.pts, maxPTS); - }); + } + + // sort video samples by DTS then PTS then demux id order + if (sortSamples) { + inputSamples.sort(function (a, b) { + const deltadts = a.dts - b.dts; + const deltapts = a.pts - b.pts; + return deltadts || deltapts; + }); + } // Get first/last DTS - let firstDTS = inputSamples[0].dts; - const lastDTS = inputSamples[inputSamples.length - 1].dts; + firstDTS = inputSamples[0].dts; + lastDTS = inputSamples[inputSamples.length - 1].dts; // on Safari let's signal the same sample duration for all samples // sample duration (as expected by trun MP4 boxes), should be the delta between sample DTS // set this constant duration as being the avg delta between consecutive DTS. const averageSampleDuration = Math.round((lastDTS - firstDTS) / (nbSamples - 1)); + // handle broken streams with PTS < DTS, tolerance up 0.2 seconds + if (ptsDtsShift < 0) { + if (ptsDtsShift < averageSampleDuration * -2) { + // Fix for "CNN special report, with CC" in test-streams (including Safari browser) + logger.warn(`PTS < DTS detected in video samples, offsetting DTS to PTS ${toMsFromMpegTsClock(-averageSampleDuration, true)} ms`); + for (let i = 0; i < nbSamples; i++) { + inputSamples[i].dts = inputSamples[i].pts - averageSampleDuration; + } + } else { + // Fix for "Custom IV with bad PTS DTS" in test-streams + logger.warn(`PTS < DTS detected in video samples, shifting DTS by ${toMsFromMpegTsClock(ptsDtsShift, true)} ms to overcome this issue`); + for (let i = 0; i < nbSamples; i++) { + inputSamples[i].dts = inputSamples[i].dts + ptsDtsShift; + } + } + firstDTS = inputSamples[0].dts; + lastDTS = inputSamples[nbSamples - 1].dts; + } + // if fragment are contiguous, detect hole/overlapping between fragments if (contiguous) { - // Check timestamp continuity across consecutive fragments, and modify timing in order to remove gaps or overlaps. + // check timestamp continuity across consecutive fragments (this is to remove inter-fragment gap/hole) const delta = firstDTS - nextAvcDts; const foundHole = delta > averageSampleDuration; const foundOverlap = delta < -1; if (foundHole || foundOverlap) { - const millisecondDelta = Math.round(delta / 90); if (foundHole) { - logger.warn(`AVC: ${millisecondDelta}ms (${delta}dts) hole between fragments detected, filling it`); + logger.warn(`AVC: ${toMsFromMpegTsClock(delta, true)} ms (${delta}dts) hole between fragments detected, filling it`); } else { - logger.warn(`AVC: ${-millisecondDelta}ms (${delta}dts) overlapping between fragments detected`); + logger.warn(`AVC: ${toMsFromMpegTsClock(-delta, true)} ms (${delta}dts) overlapping between fragments detected`); } firstDTS = nextAvcDts; - minPTS -= delta; + const firstPTS = inputSamples[0].pts - delta; inputSamples[0].dts = firstDTS; - inputSamples[0].pts = minPTS; - logger.log(`Video: First PTS/DTS adjusted: ${Math.round(minPTS / 90)}/${Math.round(firstDTS / 90)}, delta: ${millisecondDelta} ms`); - } - } - - // handle broken streams with PTS < DTS, tolerance up 200ms (18000 in 90kHz timescale) - const PTSDTSshift = inputSamples.reduce((prev, curr) => Math.max(Math.min(prev, curr.pts - curr.dts), -18000), 0); - if (PTSDTSshift < 0) { - logger.warn(`[mp4-remuxer]: PTS < DTS detected in video samples, shifting DTS by ${Math.round(PTSDTSshift / 90)} ms to overcome this issue`); - for (let i = 0; i < inputSamples.length; i++) { - inputSamples[i].dts = Math.max(0, inputSamples[i].dts + PTSDTSshift); + inputSamples[0].pts = firstPTS; + logger.log(`Video: First PTS/DTS adjusted: ${toMsFromMpegTsClock(firstPTS, true)}/${toMsFromMpegTsClock(firstDTS, true)}, delta: ${toMsFromMpegTsClock(delta, true)} ms`); } } - // Clamp first DTS to 0 so that we're still aligning on initPTS, - // and not passing negative values to MP4.traf. This will change initial frame compositionTimeOffset! - firstDTS = Math.max(inputSamples[0].dts, 0); - let nbNalu = 0; let naluLen = 0; for (let i = 0; i < nbSamples; i++) { @@ -347,16 +360,14 @@ export default class MP4Remuxer implements Remuxer { sample.length = sampleLen; // normalize PTS/DTS - if (isSafari) { - // sample DTS is computed using a constant decoding offset (mp4SampleDuration) between samples - sample.dts = firstDTS + i * averageSampleDuration; - } else { - // ensure sample monotonic DTS - sample.dts = Math.max(sample.dts, firstDTS); - } + // ensure sample monotonic DTS + sample.dts = Math.max(sample.dts, firstDTS); // ensure that computed value is greater or equal than sample DTS - sample.pts = Math.max(sample.pts, sample.dts); + sample.pts = Math.max(sample.pts, sample.dts, 0); + minPTS = Math.min(sample.pts, minPTS); + maxPTS = Math.max(sample.pts, maxPTS); } + lastDTS = inputSamples[nbSamples - 1].dts; /* concatenate the video data and construct the mdat in place (need 8 more bytes to fill length and mpdat type) */ @@ -382,7 +393,6 @@ export default class MP4Remuxer implements Remuxer { const avcSample = inputSamples[i]; const avcSampleUnits = avcSample.units; let mp4SampleLength = 0; - let compositionTimeOffset; // convert NALU bitstream to MP4 format (prepend NALU with size field) for (let j = 0, nbUnits = avcSampleUnits.length; j < nbUnits; j++) { const unit = avcSampleUnits[j]; @@ -395,40 +405,36 @@ export default class MP4Remuxer implements Remuxer { mp4SampleLength += 4 + unitDataLen; } - if (!isSafari) { - // expected sample duration is the Decoding Timestamp diff of consecutive samples - if (i < nbSamples - 1) { - mp4SampleDuration = inputSamples[i + 1].dts - avcSample.dts; - } else { - const config = this.config; - const lastFrameDuration = avcSample.dts - inputSamples[i > 0 ? i - 1 : i].dts; - if (config.stretchShortVideoTrack && this.nextAudioPts !== null) { - // In some cases, a segment's audio track duration may exceed the video track duration. - // Since we've already remuxed audio, and we know how long the audio track is, we look to - // see if the delta to the next segment is longer than maxBufferHole. - // If so, playback would potentially get stuck, so we artificially inflate - // the duration of the last frame to minimize any potential gap between segments. - const gapTolerance = Math.floor(config.maxBufferHole * timeScale); - const deltaToFrameEnd = (audioTrackLength ? minPTS + audioTrackLength * timeScale : this.nextAudioPts) - avcSample.pts; - if (deltaToFrameEnd > gapTolerance) { - // We subtract lastFrameDuration from deltaToFrameEnd to try to prevent any video - // frame overlap. maxBufferHole should be >> lastFrameDuration anyway. - mp4SampleDuration = deltaToFrameEnd - lastFrameDuration; - if (mp4SampleDuration < 0) { - mp4SampleDuration = lastFrameDuration; - } - logger.log(`[mp4-remuxer]: It is approximately ${deltaToFrameEnd / 90} ms to the next segment; using duration ${mp4SampleDuration / 90} ms for the last video frame.`); - } else { + // expected sample duration is the Decoding Timestamp diff of consecutive samples + if (i < nbSamples - 1) { + mp4SampleDuration = inputSamples[i + 1].dts - avcSample.dts; + } else { + const config = this.config; + const lastFrameDuration = avcSample.dts - inputSamples[i > 0 ? i - 1 : i].dts; + if (config.stretchShortVideoTrack && this.nextAudioPts !== null) { + // In some cases, a segment's audio track duration may exceed the video track duration. + // Since we've already remuxed audio, and we know how long the audio track is, we look to + // see if the delta to the next segment is longer than maxBufferHole. + // If so, playback would potentially get stuck, so we artificially inflate + // the duration of the last frame to minimize any potential gap between segments. + const gapTolerance = Math.floor(config.maxBufferHole * timeScale); + const deltaToFrameEnd = (audioTrackLength ? minPTS + audioTrackLength * timeScale : this.nextAudioPts) - avcSample.pts; + if (deltaToFrameEnd > gapTolerance) { + // We subtract lastFrameDuration from deltaToFrameEnd to try to prevent any video + // frame overlap. maxBufferHole should be >> lastFrameDuration anyway. + mp4SampleDuration = deltaToFrameEnd - lastFrameDuration; + if (mp4SampleDuration < 0) { mp4SampleDuration = lastFrameDuration; } + logger.log(`[mp4-remuxer]: It is approximately ${deltaToFrameEnd / 90} ms to the next segment; using duration ${mp4SampleDuration / 90} ms for the last video frame.`); } else { mp4SampleDuration = lastFrameDuration; } + } else { + mp4SampleDuration = lastFrameDuration; } - compositionTimeOffset = Math.round(avcSample.pts - avcSample.dts); - } else { - compositionTimeOffset = Math.max(0, mp4SampleDuration * Math.round((avcSample.pts - avcSample.dts) / mp4SampleDuration)); } + const compositionTimeOffset = Math.round(avcSample.pts - avcSample.dts); outputSamples.push(new Mp4Sample(avcSample.key, mp4SampleDuration, mp4SampleLength, compositionTimeOffset)); } @@ -455,7 +461,7 @@ export default class MP4Remuxer implements Remuxer { startPTS: minPTS / timeScale, endPTS: (maxPTS + mp4SampleDuration) / timeScale, startDTS: firstDTS / timeScale, - endDTS: nextAvcDts / timeScale, + endDTS: nextAvcDts as number / timeScale, type, hasAudio: false, hasVideo: true, @@ -484,7 +490,7 @@ export default class MP4Remuxer implements Remuxer { let inputSamples: Array = track.samples; let offset: number = rawMPEG ? 0 : 8; let fillFrame: any; - let nextAudioPts = this.nextAudioPts; + let nextAudioPts: number = this.nextAudioPts || -1; // window.audioSamples ? window.audioSamples.push(inputSamples.map(s => s.pts)) : (window.audioSamples = [inputSamples.map(s => s.pts)]); @@ -496,7 +502,7 @@ export default class MP4Remuxer implements Remuxer { // contiguous fragments are consecutive fragments from same quality level (same level, new SN = old SN + 1) // this helps ensuring audio continuity // and this also avoids audio glitches/cut when switching quality, or reporting wrong duration on first audio frame - this.isAudioContiguous = contiguous = contiguous || (inputSamples.length && nextAudioPts && + this.isAudioContiguous = contiguous = contiguous || (inputSamples.length && nextAudioPts > 0 && ((accurateTimeOffset && Math.abs(timeOffset - nextAudioPts / inputTimeScale) < 0.1) || Math.abs((inputSamples[0].pts - nextAudioPts - initPTS)) < 20 * inputSampleDuration) ) as boolean; @@ -509,18 +515,21 @@ export default class MP4Remuxer implements Remuxer { // filter out sample with negative PTS that are not playable anyway // if we don't remove these negative samples, they will shift all audio samples forward. // leading to audio overlap between current / next fragment - inputSamples = inputSamples.filter(function (sample) { - return sample.pts >= 0; - }); + inputSamples = inputSamples.filter((sample) => sample.pts >= 0); // in case all samples have negative PTS, and have been filtered out, return now if (!inputSamples.length) { return; } - if (!contiguous || nextAudioPts === null) { - nextAudioPts = accurateTimeOffset ? timeOffset * inputTimeScale : inputSamples[0].pts as number; - this.nextAudioPts = nextAudioPts; + if (!contiguous || nextAudioPts < 0) { + if (!accurateTimeOffset) { + // if frag are mot contiguous and if we cant trust time offset, let's use first sample PTS as next audio PTS + nextAudioPts = inputSamples[0].pts; + } else { + // if timeOffset is accurate, let's use it as predicted next audio PTS + nextAudioPts = Math.max(0, timeOffset * inputTimeScale); + } } // If the audio track is missing samples, the frames seem to get "left-shifted" within the @@ -536,7 +545,6 @@ export default class MP4Remuxer implements Remuxer { const sample = inputSamples[i]; const pts = sample.pts; const delta = pts - nextPts; - const duration = Math.abs(1000 * delta / inputTimeScale); // If we're overlapping by more than a duration, drop this sample @@ -562,7 +570,7 @@ export default class MP4Remuxer implements Remuxer { const missing = Math.round(delta / inputSampleDuration); logger.warn(`[mp4-remuxer]: Injecting ${missing} audio frame @ ${(nextPts / inputTimeScale).toFixed(3)}s due to ${Math.round(1000 * delta / inputTimeScale)} ms gap.`); for (let j = 0; j < missing; j++) { - const newStamp = Math.max(nextPts, 0); + const newStamp = Math.max(nextPts as number, 0); fillFrame = AAC.getSilentFrame(track.manifestCodec || track.codec, track.channelCount); if (!fillFrame) { logger.log('[mp4-remuxer]: Unable to get silent frame for given audio codec; duplicating last frame instead.'); diff --git a/tests/functional/auto/setup.js b/tests/functional/auto/setup.js index 3e24361cb3a..a3e8be87607 100644 --- a/tests/functional/auto/setup.js +++ b/tests/functional/auto/setup.js @@ -63,6 +63,7 @@ HttpServer.createServer({ }).listen(8000, hostname); const wait = ms => new Promise(resolve => self.setTimeout(resolve, ms)); +const stringifyResult = (result) => JSON.stringify(result, Object.keys(result).filter(k => k !== 'logs'), 2); async function retry (attempt, numAttempts = 5, interval = 2000) { try { return await attempt(); @@ -77,205 +78,182 @@ async function retry (attempt, numAttempts = 5, interval = 2000) { } async function testLoadedData (url, config) { - const result = await browser.executeAsyncScript( - (url, config) => { - const callback = arguments[arguments.length - 1]; - self.startStream(url, config, callback); - const video = self.video; - video.onloadeddata = function () { - callback({ code: 'loadeddata', logs: self.logString }); - }; - }, - url, - config - ); - expect(result, JSON.stringify(result, null, 2)).to.have.property('code').which.equals('loadeddata'); + const result = await browser.executeAsyncScript(function (url, config) { + const callback = arguments[arguments.length - 1]; + self.startStream(url, config, callback); + const video = self.video; + video.onloadeddata = function () { + callback({ code: 'loadeddata', logs: self.logString }); + }; + }, url, config); + expect(result, stringifyResult(result)).to.have.property('code').which.equals('loadeddata'); } async function testIdleBufferLength (url, config) { - const result = await browser.executeAsyncScript( - (url, config) => { - const callback = arguments[arguments.length - 1]; - const autoplay = false; - self.startStream(url, config, callback, autoplay); - const video = self.video; - const maxBufferLength = self.hls.config.maxBufferLength; - video.onprogress = function () { - const buffered = video.buffered; - if (buffered.length) { - const bufferEnd = buffered.end(buffered.length - 1); - const duration = video.duration; - console.log(`[log] > progress: ${bufferEnd.toFixed(2)}/${duration.toFixed(2)} buffered.length: ${buffered.length}`); - if (bufferEnd >= maxBufferLength || bufferEnd > duration - 1) { - callback({ code: 'loadeddata', logs: self.logString }); - } + const result = await browser.executeAsyncScript(function (url, config) { + const callback = arguments[arguments.length - 1]; + const autoplay = false; + self.startStream(url, config, callback, autoplay); + const video = self.video; + const maxBufferLength = self.hls.config.maxBufferLength; + video.onprogress = function () { + const buffered = video.buffered; + if (buffered.length) { + const bufferEnd = buffered.end(buffered.length - 1); + const duration = video.duration; + console.log('[log] > progress: ' + bufferEnd.toFixed(2) + '/' + duration.toFixed(2) + + ' buffered.length: ' + buffered.length); + if (bufferEnd >= maxBufferLength || bufferEnd > duration - 1) { + callback({ code: 'loadeddata', logs: self.logString }); } - }; - }, - url, - config - ); - expect(result, JSON.stringify(result, null, 2)).to.have.property('code').which.equals('loadeddata'); + } + }; + }, url, config); + expect(result, stringifyResult(result)).to.have.property('code').which.equals('loadeddata'); } async function testSmoothSwitch (url, config) { - const result = await browser.executeAsyncScript( - (url, config) => { - const callback = arguments[arguments.length - 1]; - self.startStream(url, config, callback); - const video = self.video; - self.hls.once(self.Hls.Events.FRAG_CHANGED, (event, data) => { - self.switchToHighestLevel('next'); - }); - self.hls.on(self.Hls.Events.LEVEL_SWITCHED, (event, data) => { - console.log(`[test] > level switched: ${data.level}`); - const currentTime = video.currentTime; - if (data.level === self.hls.levels.length - 1) { - console.log(`[test] > switched on level: ${data.level}`); - self.setTimeout(function () { - const newCurrentTime = video.currentTime; - console.log( - `[test] > currentTime delta : ${newCurrentTime - currentTime}` - ); - callback({ - code: newCurrentTime > currentTime, - logs: self.logString - }); - }, 2000); - } - }); - }, - url, - config - ); - expect(result, JSON.stringify(result, null, 2)).to.have.property('code').which.equals(true); + const result = await browser.executeAsyncScript(function (url, config) { + const callback = arguments[arguments.length - 1]; + self.startStream(url, config, callback); + const video = self.video; + self.hls.once(self.Hls.Events.FRAG_CHANGED, (event, data) => { + self.switchToHighestLevel('next'); + }); + self.hls.on(self.Hls.Events.LEVEL_SWITCHED, (event, data) => { + console.log(`[test] > level switched: ${data.level}`); + const currentTime = video.currentTime; + if (data.level === self.hls.levels.length - 1) { + console.log(`[test] > switched on level: ${data.level}`); + self.setTimeout(function () { + const newCurrentTime = video.currentTime; + console.log( + `[test] > currentTime delta : ${newCurrentTime - currentTime}` + ); + callback({ + currentTimeDelta: newCurrentTime - currentTime, + logs: self.logString + }); + }, 2000); + } + }); + }, url, config); + expect(result, stringifyResult(result)).to.have.property('currentTimeDelta').which.is.gt(0); } async function testSeekOnLive (url, config) { - const result = await browser.executeAsyncScript( - (url, config) => { - const callback = arguments[arguments.length - 1]; - self.startStream(url, config, callback); - const video = self.video; - video.onloadeddata = function () { - self.setTimeout(function () { - video.currentTime = video.duration - 5; - }, 5000); - }; - video.onseeked = function () { - callback({ code: 'seeked', logs: self.logString }); - }; - }, - url, - config - ); - expect(result, JSON.stringify(result, null, 2)).to.have.property('code').which.equals('seeked'); + const result = await browser.executeAsyncScript(function (url, config) { + const callback = arguments[arguments.length - 1]; + self.startStream(url, config, callback); + const video = self.video; + video.onloadeddata = function () { + self.setTimeout(function () { + video.currentTime = video.duration - 5; + }, 5000); + }; + video.onseeked = function () { + callback({ code: 'seeked', logs: self.logString }); + }; + }, url, config); + expect(result, stringifyResult(result)).to.have.property('code').which.equals('seeked'); } async function testSeekOnVOD (url, config) { - const result = await browser.executeAsyncScript( - (url, config) => { - const callback = arguments[arguments.length - 1]; - self.startStream(url, config, callback); - const video = self.video; - video.onloadeddata = function () { - self.setTimeout(function () { - video.currentTime = video.duration - 5; - }, 5000); - }; - video.onended = function () { - callback({ code: 'ended', logs: self.logString }); - }; - }, - url, - config - ); - expect(result, JSON.stringify(result, null, 2)).to.have.property('code').which.equals('ended'); + const result = await browser.executeAsyncScript(function (url, config) { + const callback = arguments[arguments.length - 1]; + self.startStream(url, config, callback); + const video = self.video; + video.onloadeddata = function () { + self.setTimeout(function () { + video.currentTime = video.duration - 5; + // Fail test early if more than 2 buffered ranges are found + video.onprogress = function () { + if (video.buffered.length > 2) { + callback({ + code: 'buffer-gaps', + bufferedRanges: video.buffered.length, + logs: self.logString + }); + } + }; + }, 5000); + }; + video.onended = function () { + callback({ code: 'ended', logs: self.logString }); + }; + }, url, config); + expect(result, stringifyResult(result)).to.have.property('code').which.equals('ended'); } // async function testSeekEndVOD (url, config) { -// const result = await browser.executeAsyncScript( -// (url, config) => { -// const callback = arguments[arguments.length - 1]; -// self.startStream(url, config, callback); -// const video = self.video; -// video.onloadeddata = function () { -// self.setTimeout(function () { -// video.currentTime = video.duration; -// }, 5000); -// }; -// video.onended = function () { -// callback({ code: 'ended', logs: self.logString }); -// }; -// }, -// url, -// config -// ); -// expect(result, JSON.stringify(result, null, 2)).to.have.property('code').which.equals('ended'); +// const result = await browser.executeAsyncScript(function (url, config) { +// const callback = arguments[arguments.length - 1]; +// self.startStream(url, config, callback); +// const video = self.video; +// video.onloadeddata = function () { +// self.setTimeout(function () { +// video.currentTime = video.duration; +// }, 5000); +// }; +// video.onended = function () { +// callback({ code: 'ended', logs: self.logString }); +// }; +// }, url, config); +// expect(result, stringifyResult(result)).to.have.property('code').which.equals('ended'); // } async function testIsPlayingVOD (url, config) { - const result = await browser.executeAsyncScript( - (url, config) => { - const callback = arguments[arguments.length - 1]; - self.startStream(url, config, callback); - const video = self.video; - self.hls.once(self.Hls.Events.FRAG_CHANGED, function () { - const expectedPlaying = !( - video.paused || // not playing when video is paused - video.ended || // not playing when video is ended - video.buffered.length === 0 - ); // not playing if nothing buffered - const currentTime = video.currentTime; - if (expectedPlaying) { - self.setTimeout(function () { - console.log( - `[test] > video expected playing. [last currentTime/new currentTime]=[${currentTime}/${video.currentTime}]` - ); - callback({ playing: currentTime !== video.currentTime }); - }, 5000); - } else { - console.log( - `[test] > video not playing. [paused/ended/buffered.length]=[${video.paused}/${video.ended}/${video.buffered.length}]` - ); - callback({ playing: false }); - } - }); - }, - url, - config - ); - expect(result, JSON.stringify(result, null, 2)).to.have.property('playing').which.is.true; + const result = await browser.executeAsyncScript(function (url, config) { + const callback = arguments[arguments.length - 1]; + self.startStream(url, config, callback); + const video = self.video; + self.hls.once(self.Hls.Events.FRAG_CHANGED, function () { + const expectedPlaying = !( + video.paused || // not playing when video is paused + video.ended || // not playing when video is ended + video.buffered.length === 0 + ); // not playing if nothing buffered + const currentTime = video.currentTime; + if (expectedPlaying) { + self.setTimeout(function () { + console.log('[test] > video expected playing. last currentTime/new currentTime=' + + currentTime + '/' + video.currentTime); + callback({ playing: currentTime !== video.currentTime }); + }, 5000); + } else { + console.log('[test] > video not playing. paused/ended/buffered.length=' + + video.paused + '/' + video.ended + '/' + video.buffered.length); + callback({ playing: false }); + } + }); + }, url, config); + expect(result, stringifyResult(result)).to.have.property('playing').which.is.true; } async function testSeekBackToStart (url, config) { - const result = await browser.executeAsyncScript( - (url, config) => { - const callback = arguments[arguments.length - 1]; - self.startStream(url, config, callback); - const video = self.video; - video.ontimeupdate = function () { - if (video.currentTime > 0 && !video.paused) { - self.setTimeout(function () { - video.onseeked = function () { - delete video.onseeked; - video.ontimeupdate = function () { - if (video.currentTime > 0 && !video.paused) { - delete video.ontimeupdate; - callback({ playing: true }); - } - }; + const result = await browser.executeAsyncScript(function (url, config) { + const callback = arguments[arguments.length - 1]; + self.startStream(url, config, callback); + const video = self.video; + video.ontimeupdate = function () { + if (video.currentTime > 0 && !video.paused) { + self.setTimeout(function () { + video.onseeked = function () { + delete video.onseeked; + video.ontimeupdate = function () { + if (video.currentTime > 0 && !video.paused) { + delete video.ontimeupdate; + callback({ playing: true }); + } }; - video.currentTime = 0; - delete video.ontime; - }, 500); - } - }; - }, - url, - config - ); - expect(result, JSON.stringify(result, null, 2)).to.have.property('playing').which.is.true; + }; + video.currentTime = 0; + delete video.ontime; + }, 500); + } + }; + }, url, config); + expect(result, stringifyResult(result)).to.have.property('playing').which.is.true; } describe(`testing hls.js playback in the browser on "${browserDescription}"`, function () { @@ -421,7 +399,7 @@ describe(`testing hls.js playback in the browser on "${browserDescription}"`, fu testIsPlayingVOD.bind(null, url, config) ); it( - `should seek 5s from end and receive video ended event for ${stream.description}`, + `should seek 5s from end and receive video ended event for ${stream.description} with 2 or less buffered ranges`, testSeekOnVOD.bind(null, url, config) ); // TODO: Seeking to or past VOD duration should result in the video ending diff --git a/tests/functional/auto/testbench.js b/tests/functional/auto/testbench.js index d5a8a964355..a2f8b4a73ef 100644 --- a/tests/functional/auto/testbench.js +++ b/tests/functional/auto/testbench.js @@ -92,7 +92,7 @@ function startStream (streamUrl, config, callback, autoplay) { var playPromise = video.play(); if (playPromise) { playPromise.catch(function (error) { - console.log('[test] > video.play() failed with error:', error); + console.log('[test] > video.play() failed with error: ' + error.name + ' ' + error.message); if (error.name === 'NotAllowedError') { console.log('[test] > Attempting to play with video muted'); video.muted = true; diff --git a/tests/test-streams.js b/tests/test-streams.js index 2bec8b11581..8a3de598316 100644 --- a/tests/test-streams.js +++ b/tests/test-streams.js @@ -44,6 +44,12 @@ module.exports = { nudgeMaxRetry: 5 } ), + fdr: { + url: 'https://cdn.jwplayer.com/manifests/pZxWPRg4.m3u8', + description: 'FDR - CDN packaged, 4s segments, 180p - 1080p', + live: false, + abr: true + }, bigBuckBunny480p: { url: 'https://test-streams.mux.dev/x36xhzz/url_6/193039199_mp4_h264_aac_hq_7.m3u8', description: 'Big Buck Bunny - 480p only', @@ -65,8 +71,8 @@ module.exports = { blacklist_ua: ['internet explorer'] }, issue666: { - url: 'https://test-streams.mux.dev/issue666/playlists/cisq0gim60007xzvi505emlxx.m3u8', - description: 'hls.js/issues/666', + url: 'https://playertest.longtailvideo.com/adaptive/issue666/playlists/cisq0gim60007xzvi505emlxx.m3u8', + description: 'Surveillance footage - https://github.com/video-dev/hls.js/issues/666', live: false, abr: false, blacklist_ua: ['internet explorer'] @@ -83,8 +89,13 @@ module.exports = { url: 'https://playertest.longtailvideo.com/adaptive/captions/playlist.m3u8', description: 'CNN special report, with CC', live: false, - abr: false, - blacklist_ua: ['safari'] + abr: false + }, + customIvBadDts: { + url: 'https://playertest.longtailvideo.com/adaptive/customIV/prog_index.m3u8', + description: 'Custom IV with bad PTS DTS', + live: false, + abr: false }, oceansAES: { url: 'https://playertest.longtailvideo.com/adaptive/oceans_aes/oceans_aes.m3u8', diff --git a/tests/unit/controller/stream-controller.js b/tests/unit/controller/stream-controller.js index d35cc7ba189..75aeed2c956 100644 --- a/tests/unit/controller/stream-controller.js +++ b/tests/unit/controller/stream-controller.js @@ -182,6 +182,7 @@ describe('StreamController', function () { describe('checkBuffer', function () { const sandbox = sinon.createSandbox(); + const bufStart = 5; beforeEach(function () { streamController.gapController = { @@ -189,6 +190,9 @@ describe('StreamController', function () { }; streamController.media = { buffered: { + start () { + return bufStart; + }, length: 1 }, readyState: 4