Skip to content

Commit

Permalink
Fix live delay computation (#3285)
Browse files Browse the repository at this point in the history
* Fix a bug with the live delay calculation for dynamic SegmentTimeline manifests which causes invalid requests.

* Calculate fragment duration for live delay
  • Loading branch information
dsilhavy committed Jun 12, 2020
1 parent 6f5b295 commit df3e5fb
Show file tree
Hide file tree
Showing 5 changed files with 131 additions and 24 deletions.
2 changes: 1 addition & 1 deletion src/dash/controllers/RepresentationController.js
Expand Up @@ -239,7 +239,7 @@ function RepresentationController(config) {
if (r.adaptation.period.mpd.manifest.type === dashConstants.DYNAMIC && !r.adaptation.period.mpd.manifest.ignorePostponeTimePeriod) {
let segmentAvailabilityTimePeriod = r.segmentAvailabilityRange.end - r.segmentAvailabilityRange.start;
// We must put things to sleep unless till e.g. the startTime calculation in ScheduleController.onLiveEdgeSearchCompleted fall after the segmentAvailabilityRange.start
let liveDelay = playbackController.computeLiveDelay(currentVoRepresentation.segmentDuration, streamInfo.manifestInfo.DVRWindowSize);
let liveDelay = playbackController.getLiveDelay();
postponeTimePeriod = (liveDelay - segmentAvailabilityTimePeriod) * 1000;
}

Expand Down
51 changes: 40 additions & 11 deletions src/streaming/StreamProcessor.js
Expand Up @@ -38,7 +38,7 @@ import ScheduleController from './controllers/ScheduleController';
import RepresentationController from '../dash/controllers/RepresentationController';
import LiveEdgeFinder from './utils/LiveEdgeFinder';
import FactoryMaker from '../core/FactoryMaker';
import { checkInteger } from './utils/SupervisorTools';
import {checkInteger} from './utils/SupervisorTools';
import EventBus from '../core/EventBus';
import Events from '../core/events/Events';
import DashHandler from '../dash/DashHandler';
Expand All @@ -49,7 +49,7 @@ import RequestModifier from './utils/RequestModifier';
import URLUtils from '../streaming/utils/URLUtils';
import BoxParser from './utils/BoxParser';
import FragmentRequest from './vo/FragmentRequest';
import { PlayListTrace } from './vo/metrics/PlayList';
import {PlayListTrace} from './vo/metrics/PlayList';

function StreamProcessor(config) {

Expand Down Expand Up @@ -512,8 +512,8 @@ function StreamProcessor(config) {
// Use time just whenever is strictly needed
request = getFragmentRequest(representationInfo,
hasSeekTarget || bufferIsDivided ? time : undefined, {
keepIdx: !hasSeekTarget && !bufferIsDivided
});
keepIdx: !hasSeekTarget && !bufferIsDivided
});

// Then, check if this request was downloaded or not
while (request && request.action !== FragmentRequest.ACTION_COMPLETE && fragmentModel.isFragmentLoaded(request)) {
Expand Down Expand Up @@ -556,7 +556,7 @@ function StreamProcessor(config) {
})[0];

const events = handleInbandEvents(bytes, request, eventStreamMedia, eventStreamTrack);
eventBus.trigger(Events.ADD_INBAND_EVENTS_REQUESTED, { sender: instance, events: events });
eventBus.trigger(Events.ADD_INBAND_EVENTS_REQUESTED, {sender: instance, events: events});
}
}

Expand Down Expand Up @@ -644,12 +644,7 @@ function StreamProcessor(config) {

const currentRepresentationInfo = getRepresentationInfo();
const liveEdge = liveEdgeFinder.getLiveEdge(currentRepresentationInfo);
const liveDelay = playbackController.computeLiveDelay(currentRepresentationInfo.fragmentDuration, currentRepresentationInfo.mediaInfo.streamInfo.manifestInfo.DVRWindowSize);
const startTime = liveEdge - liveDelay;
logger.debug('live edge: ' + liveEdge + ', live delay: ' + liveDelay + ', live target: ' + startTime);
const request = getFragmentRequest(currentRepresentationInfo, startTime, {
ignoreIsFinished: true
});
const request = findRequestForLiveEdge(liveEdge, currentRepresentationInfo);

if (request) {
// When low latency mode is selected but browser doesn't support fetch
Expand Down Expand Up @@ -681,6 +676,39 @@ function StreamProcessor(config) {
});
}

function findRequestForLiveEdge(liveEdge, currentRepresentationInfo) {
try {
let request = null;
let liveDelay = playbackController.getLiveDelay();
const dvrWindowSize = !isNaN(streamInfo.manifestInfo.DVRWindowSize) ? streamInfo.manifestInfo.DVRWindowSize : liveDelay;
const dvrWindowSafetyMargin = 0.1 * dvrWindowSize;
let startTime;

// Make sure that we have at least a valid request for the end of the DVR window, otherwise we might try forever
if (getFragmentRequest(currentRepresentationInfo, liveEdge - dvrWindowSize + dvrWindowSafetyMargin, {
ignoreIsFinished: true
})) {

// Try to find a request as close as possible to the targeted live edge
while (!request && liveDelay <= dvrWindowSize) {
startTime = liveEdge - liveDelay;
request = getFragmentRequest(currentRepresentationInfo, startTime, {
ignoreIsFinished: true
});
liveDelay += 1; // Increase by one second for each iteration
}
}

if (request) {
playbackController.setLiveDelay(liveDelay, true);
}
logger.debug('live edge: ' + liveEdge + ', live delay: ' + liveDelay + ', live target: ' + startTime);
return request;
} catch (e) {
return null;
}
}

function onSeekTarget(e) {
if (e.mediaType !== type || e.streamId !== streamInfo.id) return;

Expand Down Expand Up @@ -769,5 +797,6 @@ function StreamProcessor(config) {

return instance;
}

StreamProcessor.__dashjs_factory_name = 'StreamProcessor';
export default FactoryMaker.getClassFactory(StreamProcessor);
22 changes: 16 additions & 6 deletions src/streaming/controllers/PlaybackController.js
Expand Up @@ -230,26 +230,27 @@ function PlaybackController() {
* @returns {number} object
* @memberof PlaybackController#
*/
function computeLiveDelay(fragmentDuration, dvrWindowSize, minBufferTime = NaN) {
function computeAndSetLiveDelay(fragmentDuration, dvrWindowSize, minBufferTime) {
let delay,
ret,
startTime;
const END_OF_PLAYLIST_PADDING = 10;
const MIN_BUFFER_TIME_FACTOR = 4;
const FRAGMENT_DURATION_FACTOR = 4;
const adjustedFragmentDuration = !isNaN(fragmentDuration) && isFinite(fragmentDuration) ? fragmentDuration : NaN;

let suggestedPresentationDelay = adapter.getSuggestedPresentationDelay();

if (settings.get().streaming.lowLatencyEnabled) {
delay = 0;
} else if (mediaPlayerModel.getLiveDelay()) {
delay = mediaPlayerModel.getLiveDelay(); // If set by user, this value takes precedence
} else if (settings.get().streaming.liveDelayFragmentCount !== null && !isNaN(settings.get().streaming.liveDelayFragmentCount) && !isNaN(fragmentDuration)) {
delay = fragmentDuration * settings.get().streaming.liveDelayFragmentCount;
} else if (settings.get().streaming.liveDelayFragmentCount !== null && !isNaN(settings.get().streaming.liveDelayFragmentCount) && !isNaN(adjustedFragmentDuration)) {
delay = adjustedFragmentDuration * settings.get().streaming.liveDelayFragmentCount;
} else if (settings.get().streaming.useSuggestedPresentationDelay === true && suggestedPresentationDelay !== null && !isNaN(suggestedPresentationDelay) && suggestedPresentationDelay > 0) {
delay = suggestedPresentationDelay;
} else if (!isNaN(fragmentDuration)) {
delay = fragmentDuration * FRAGMENT_DURATION_FACTOR;
} else if (!isNaN(adjustedFragmentDuration)) {
delay = adjustedFragmentDuration * FRAGMENT_DURATION_FACTOR;
} else {
delay = !isNaN(minBufferTime) ? minBufferTime * MIN_BUFFER_TIME_FACTOR : streamInfo.manifestInfo.minBufferTime * MIN_BUFFER_TIME_FACTOR;
}
Expand Down Expand Up @@ -277,6 +278,14 @@ function PlaybackController() {
return liveDelay;
}

function setLiveDelay(value, useMaxValue = false) {
if (useMaxValue && value < liveDelay) {
return;
}

liveDelay = value;
}

function getCurrentLiveLatency() {
if (!isDynamic || isNaN(availabilityStartTime)) {
return NaN;
Expand Down Expand Up @@ -876,8 +885,9 @@ function PlaybackController() {
getStreamController: getStreamController,
setLiveStartTime: setLiveStartTime,
getLiveStartTime: getLiveStartTime,
computeLiveDelay: computeLiveDelay,
computeAndSetLiveDelay: computeAndSetLiveDelay,
getLiveDelay: getLiveDelay,
setLiveDelay: setLiveDelay,
getCurrentLiveLatency: getCurrentLiveLatency,
play: play,
isPaused: isPaused,
Expand Down
78 changes: 73 additions & 5 deletions src/streaming/controllers/StreamController.js
Expand Up @@ -660,11 +660,7 @@ function StreamController() {
// For multiperiod streams we should avoid a switch of streams after the seek to the live edge. So we do a calculation of the expected seek time to find the right stream object.
if (!initialStream && adapter.getIsDynamic() && streams.length) {
logger.debug('Dynamic multi-period stream: Trying to find the correct starting period');
const manifestInfo = adapter.getStreamsInfo(undefined, 1)[0].manifestInfo;
const liveEdge = timelineConverter.calcPresentationTimeFromWallTime(new Date(), adapter.getRegularPeriods()[0]);
const targetDelay = playbackController.computeLiveDelay(NaN, manifestInfo.DVRWindowSize, manifestInfo.minBufferTime);
const targetTime = liveEdge - targetDelay;
initialStream = getStreamForTime(targetTime);
initialStream = getInitialStream();
}
switchStream(null, initialStream !== null ? initialStream : streams[0], NaN);
}
Expand All @@ -678,6 +674,78 @@ function StreamController() {
}
}

function getInitialStream() {
try {
const streamInfos = adapter.getStreamsInfo(undefined);
const manifestInfo = streamInfos[0].manifestInfo;
const liveEdge = timelineConverter.calcPresentationTimeFromWallTime(new Date(), adapter.getRegularPeriods()[0]);
const fragmentDuration = getFragmentDurationForLiveDelayCalculation(streamInfos, manifestInfo);
const targetDelay = playbackController.computeAndSetLiveDelay(fragmentDuration, manifestInfo.DVRWindowSize, manifestInfo.minBufferTime);
const targetTime = liveEdge - targetDelay;

return getStreamForTime(targetTime);
} catch (e) {
return null;
}
}

function getFragmentDurationForLiveDelayCalculation(streamInfos, manifestInfo) {
try {
let fragmentDuration = NaN;

// For multiperiod manifests we use the maxFragmentDuration attribute as we do not know the correct starting period
if (streamInfos && streamInfos.length > 1) {
fragmentDuration = manifestInfo && !isNaN(manifestInfo.maxFragmentDuration) ? manifestInfo.maxFragmentDuration : NaN;
}

// For single period manifests we iterate over all AS and use the maximum segment length
else if (streamInfos && streamInfos.length === 1) {
const streamInfo = streamInfos[0];
const mediaTypes = [Constants.VIDEO, Constants.AUDIO, Constants.FRAGMENTED_TEXT];


const fragmentDurations = mediaTypes
.reduce((acc, mediaType) => {
const mediaInfo = adapter.getMediaInfoForType(streamInfo, mediaType);

if (mediaInfo) {
acc.push(mediaInfo);
}

return acc;
}, [])
.reduce((acc, mediaInfo) => {
const voRepresentations = adapter.getVoRepresentations(mediaInfo);

if (voRepresentations && voRepresentations.length > 0) {
voRepresentations.forEach((voRepresentation) => {
if (voRepresentation) {
acc.push(voRepresentation);
}
});
}

return acc;
}, [])
.reduce((acc, voRepresentation) => {
const representation = adapter.convertDataToRepresentationInfo(voRepresentation);

if (representation && representation.fragmentDuration && !isNaN(representation.fragmentDuration)) {
acc.push(representation.fragmentDuration);
}

return acc;
}, []);

fragmentDuration = Math.max(...fragmentDurations);
}

return isFinite(fragmentDuration) ? fragmentDuration : NaN;
} catch (e) {
return NaN;
}
}

function onTimeSyncCompleted( /*e*/) {
const manifest = manifestModel.getValue();
//TODO check if we can move this to initialize??
Expand Down
2 changes: 1 addition & 1 deletion test/unit/streaming.controllers.PlaybackControllers.js
Expand Up @@ -95,7 +95,7 @@ describe('PlaybackController', function () {
});

it('should return NaN when getLiveDelay is called after a call to computeLiveDelay with no parameter', function () {
expect(playbackController.computeLiveDelay.bind(playbackController)).not.to.throw();
expect(playbackController.computeAndSetLiveDelay.bind(playbackController)).not.to.throw();
expect(playbackController.getLiveDelay()).to.be.NaN; // jshint ignore:line
});

Expand Down

0 comments on commit df3e5fb

Please sign in to comment.