diff --git a/src/dash/controllers/RepresentationController.js b/src/dash/controllers/RepresentationController.js index 581aceb7a4..4b365f1a8a 100644 --- a/src/dash/controllers/RepresentationController.js +++ b/src/dash/controllers/RepresentationController.js @@ -239,7 +239,7 @@ function RepresentationController(config) { if (r.adaptation.period.mpd.manifest.type === dashConstants.DYNAMIC && !r.adaptation.period.mpd.manifest.ignorePostponeTimePeriod) { let segmentAvailabilityTimePeriod = r.segmentAvailabilityRange.end - r.segmentAvailabilityRange.start; // We must put things to sleep unless till e.g. the startTime calculation in ScheduleController.onLiveEdgeSearchCompleted fall after the segmentAvailabilityRange.start - let liveDelay = playbackController.computeLiveDelay(currentVoRepresentation.segmentDuration, streamInfo.manifestInfo.DVRWindowSize); + let liveDelay = playbackController.getLiveDelay(); postponeTimePeriod = (liveDelay - segmentAvailabilityTimePeriod) * 1000; } diff --git a/src/streaming/StreamProcessor.js b/src/streaming/StreamProcessor.js index 55492d744e..5a9400d1fd 100644 --- a/src/streaming/StreamProcessor.js +++ b/src/streaming/StreamProcessor.js @@ -38,7 +38,7 @@ import ScheduleController from './controllers/ScheduleController'; import RepresentationController from '../dash/controllers/RepresentationController'; import LiveEdgeFinder from './utils/LiveEdgeFinder'; import FactoryMaker from '../core/FactoryMaker'; -import { checkInteger } from './utils/SupervisorTools'; +import {checkInteger} from './utils/SupervisorTools'; import EventBus from '../core/EventBus'; import Events from '../core/events/Events'; import DashHandler from '../dash/DashHandler'; @@ -49,7 +49,7 @@ import RequestModifier from './utils/RequestModifier'; import URLUtils from '../streaming/utils/URLUtils'; import BoxParser from './utils/BoxParser'; import FragmentRequest from './vo/FragmentRequest'; -import { PlayListTrace } from './vo/metrics/PlayList'; +import {PlayListTrace} from './vo/metrics/PlayList'; function StreamProcessor(config) { @@ -512,8 +512,8 @@ function StreamProcessor(config) { // Use time just whenever is strictly needed request = getFragmentRequest(representationInfo, hasSeekTarget || bufferIsDivided ? time : undefined, { - keepIdx: !hasSeekTarget && !bufferIsDivided - }); + keepIdx: !hasSeekTarget && !bufferIsDivided + }); // Then, check if this request was downloaded or not while (request && request.action !== FragmentRequest.ACTION_COMPLETE && fragmentModel.isFragmentLoaded(request)) { @@ -559,7 +559,7 @@ function StreamProcessor(config) { })[0]; const events = handleInbandEvents(bytes, request, eventStreamMedia, eventStreamTrack); - eventBus.trigger(Events.ADD_INBAND_EVENTS_REQUESTED, { sender: instance, events: events }); + eventBus.trigger(Events.ADD_INBAND_EVENTS_REQUESTED, {sender: instance, events: events}); } } @@ -647,12 +647,7 @@ function StreamProcessor(config) { const currentRepresentationInfo = getRepresentationInfo(); const liveEdge = liveEdgeFinder.getLiveEdge(currentRepresentationInfo); - const liveDelay = playbackController.computeLiveDelay(currentRepresentationInfo.fragmentDuration, currentRepresentationInfo.mediaInfo.streamInfo.manifestInfo.DVRWindowSize); - const startTime = liveEdge - liveDelay; - logger.debug('live edge: ' + liveEdge + ', live delay: ' + liveDelay + ', live target: ' + startTime); - const request = getFragmentRequest(currentRepresentationInfo, startTime, { - ignoreIsFinished: true - }); + const request = findRequestForLiveEdge(liveEdge, currentRepresentationInfo); if (request) { // When low latency mode is selected but browser doesn't support fetch @@ -684,6 +679,39 @@ function StreamProcessor(config) { }); } + function findRequestForLiveEdge(liveEdge, currentRepresentationInfo) { + try { + let request = null; + let liveDelay = playbackController.getLiveDelay(); + const dvrWindowSize = !isNaN(streamInfo.manifestInfo.DVRWindowSize) ? streamInfo.manifestInfo.DVRWindowSize : liveDelay; + const dvrWindowSafetyMargin = 0.1 * dvrWindowSize; + let startTime; + + // Make sure that we have at least a valid request for the end of the DVR window, otherwise we might try forever + if (getFragmentRequest(currentRepresentationInfo, liveEdge - dvrWindowSize + dvrWindowSafetyMargin, { + ignoreIsFinished: true + })) { + + // Try to find a request as close as possible to the targeted live edge + while (!request && liveDelay <= dvrWindowSize) { + startTime = liveEdge - liveDelay; + request = getFragmentRequest(currentRepresentationInfo, startTime, { + ignoreIsFinished: true + }); + liveDelay += 1; // Increase by one second for each iteration + } + } + + if (request) { + playbackController.setLiveDelay(liveDelay, true); + } + logger.debug('live edge: ' + liveEdge + ', live delay: ' + liveDelay + ', live target: ' + startTime); + return request; + } catch (e) { + return null; + } + } + function onSeekTarget(e) { if (e.mediaType !== type || e.streamId !== streamInfo.id) return; @@ -772,5 +800,6 @@ function StreamProcessor(config) { return instance; } + StreamProcessor.__dashjs_factory_name = 'StreamProcessor'; export default FactoryMaker.getClassFactory(StreamProcessor); diff --git a/src/streaming/controllers/PlaybackController.js b/src/streaming/controllers/PlaybackController.js index 0660765f71..dd4510a51c 100644 --- a/src/streaming/controllers/PlaybackController.js +++ b/src/streaming/controllers/PlaybackController.js @@ -230,13 +230,14 @@ function PlaybackController() { * @returns {number} object * @memberof PlaybackController# */ - function computeLiveDelay(fragmentDuration, dvrWindowSize, minBufferTime = NaN) { + function computeAndSetLiveDelay(fragmentDuration, dvrWindowSize, minBufferTime) { let delay, ret, startTime; const END_OF_PLAYLIST_PADDING = 10; const MIN_BUFFER_TIME_FACTOR = 4; const FRAGMENT_DURATION_FACTOR = 4; + const adjustedFragmentDuration = !isNaN(fragmentDuration) && isFinite(fragmentDuration) ? fragmentDuration : NaN; let suggestedPresentationDelay = adapter.getSuggestedPresentationDelay(); @@ -244,12 +245,12 @@ function PlaybackController() { delay = 0; } else if (mediaPlayerModel.getLiveDelay()) { delay = mediaPlayerModel.getLiveDelay(); // If set by user, this value takes precedence - } else if (settings.get().streaming.liveDelayFragmentCount !== null && !isNaN(settings.get().streaming.liveDelayFragmentCount) && !isNaN(fragmentDuration)) { - delay = fragmentDuration * settings.get().streaming.liveDelayFragmentCount; + } else if (settings.get().streaming.liveDelayFragmentCount !== null && !isNaN(settings.get().streaming.liveDelayFragmentCount) && !isNaN(adjustedFragmentDuration)) { + delay = adjustedFragmentDuration * settings.get().streaming.liveDelayFragmentCount; } else if (settings.get().streaming.useSuggestedPresentationDelay === true && suggestedPresentationDelay !== null && !isNaN(suggestedPresentationDelay) && suggestedPresentationDelay > 0) { delay = suggestedPresentationDelay; - } else if (!isNaN(fragmentDuration)) { - delay = fragmentDuration * FRAGMENT_DURATION_FACTOR; + } else if (!isNaN(adjustedFragmentDuration)) { + delay = adjustedFragmentDuration * FRAGMENT_DURATION_FACTOR; } else { delay = !isNaN(minBufferTime) ? minBufferTime * MIN_BUFFER_TIME_FACTOR : streamInfo.manifestInfo.minBufferTime * MIN_BUFFER_TIME_FACTOR; } @@ -277,6 +278,14 @@ function PlaybackController() { return liveDelay; } + function setLiveDelay(value, useMaxValue = false) { + if (useMaxValue && value < liveDelay) { + return; + } + + liveDelay = value; + } + function getCurrentLiveLatency() { if (!isDynamic || isNaN(availabilityStartTime)) { return NaN; @@ -876,8 +885,9 @@ function PlaybackController() { getStreamController: getStreamController, setLiveStartTime: setLiveStartTime, getLiveStartTime: getLiveStartTime, - computeLiveDelay: computeLiveDelay, + computeAndSetLiveDelay: computeAndSetLiveDelay, getLiveDelay: getLiveDelay, + setLiveDelay: setLiveDelay, getCurrentLiveLatency: getCurrentLiveLatency, play: play, isPaused: isPaused, diff --git a/src/streaming/controllers/StreamController.js b/src/streaming/controllers/StreamController.js index 842c2fc489..c09dabbdaa 100644 --- a/src/streaming/controllers/StreamController.js +++ b/src/streaming/controllers/StreamController.js @@ -660,11 +660,7 @@ function StreamController() { // For multiperiod streams we should avoid a switch of streams after the seek to the live edge. So we do a calculation of the expected seek time to find the right stream object. if (!initialStream && adapter.getIsDynamic() && streams.length) { logger.debug('Dynamic multi-period stream: Trying to find the correct starting period'); - const manifestInfo = adapter.getStreamsInfo(undefined, 1)[0].manifestInfo; - const liveEdge = timelineConverter.calcPresentationTimeFromWallTime(new Date(), adapter.getRegularPeriods()[0]); - const targetDelay = playbackController.computeLiveDelay(NaN, manifestInfo.DVRWindowSize, manifestInfo.minBufferTime); - const targetTime = liveEdge - targetDelay; - initialStream = getStreamForTime(targetTime); + initialStream = getInitialStream(); } switchStream(null, initialStream !== null ? initialStream : streams[0], NaN); } @@ -678,6 +674,78 @@ function StreamController() { } } + function getInitialStream() { + try { + const streamInfos = adapter.getStreamsInfo(undefined); + const manifestInfo = streamInfos[0].manifestInfo; + const liveEdge = timelineConverter.calcPresentationTimeFromWallTime(new Date(), adapter.getRegularPeriods()[0]); + const fragmentDuration = getFragmentDurationForLiveDelayCalculation(streamInfos, manifestInfo); + const targetDelay = playbackController.computeAndSetLiveDelay(fragmentDuration, manifestInfo.DVRWindowSize, manifestInfo.minBufferTime); + const targetTime = liveEdge - targetDelay; + + return getStreamForTime(targetTime); + } catch (e) { + return null; + } + } + + function getFragmentDurationForLiveDelayCalculation(streamInfos, manifestInfo) { + try { + let fragmentDuration = NaN; + + // For multiperiod manifests we use the maxFragmentDuration attribute as we do not know the correct starting period + if (streamInfos && streamInfos.length > 1) { + fragmentDuration = manifestInfo && !isNaN(manifestInfo.maxFragmentDuration) ? manifestInfo.maxFragmentDuration : NaN; + } + + // For single period manifests we iterate over all AS and use the maximum segment length + else if (streamInfos && streamInfos.length === 1) { + const streamInfo = streamInfos[0]; + const mediaTypes = [Constants.VIDEO, Constants.AUDIO, Constants.FRAGMENTED_TEXT]; + + + const fragmentDurations = mediaTypes + .reduce((acc, mediaType) => { + const mediaInfo = adapter.getMediaInfoForType(streamInfo, mediaType); + + if (mediaInfo) { + acc.push(mediaInfo); + } + + return acc; + }, []) + .reduce((acc, mediaInfo) => { + const voRepresentations = adapter.getVoRepresentations(mediaInfo); + + if (voRepresentations && voRepresentations.length > 0) { + voRepresentations.forEach((voRepresentation) => { + if (voRepresentation) { + acc.push(voRepresentation); + } + }); + } + + return acc; + }, []) + .reduce((acc, voRepresentation) => { + const representation = adapter.convertDataToRepresentationInfo(voRepresentation); + + if (representation && representation.fragmentDuration && !isNaN(representation.fragmentDuration)) { + acc.push(representation.fragmentDuration); + } + + return acc; + }, []); + + fragmentDuration = Math.max(...fragmentDurations); + } + + return isFinite(fragmentDuration) ? fragmentDuration : NaN; + } catch (e) { + return NaN; + } + } + function onTimeSyncCompleted( /*e*/) { const manifest = manifestModel.getValue(); //TODO check if we can move this to initialize?? diff --git a/test/unit/streaming.controllers.PlaybackControllers.js b/test/unit/streaming.controllers.PlaybackControllers.js index c74ed0cdbf..0048f3769a 100644 --- a/test/unit/streaming.controllers.PlaybackControllers.js +++ b/test/unit/streaming.controllers.PlaybackControllers.js @@ -95,7 +95,7 @@ describe('PlaybackController', function () { }); it('should return NaN when getLiveDelay is called after a call to computeLiveDelay with no parameter', function () { - expect(playbackController.computeLiveDelay.bind(playbackController)).not.to.throw(); + expect(playbackController.computeAndSetLiveDelay.bind(playbackController)).not.to.throw(); expect(playbackController.getLiveDelay()).to.be.NaN; // jshint ignore:line });