Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Fix Safari video playback, and fragment timing #2902

Merged
merged 2 commits into from Jul 20, 2020
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
20 changes: 19 additions & 1 deletion demo/chart/timeline-chart.ts
Expand Up @@ -132,13 +132,26 @@ export class TimelineChart {
this.hidden = true;
}

updateLevels (levels: any[]) {
updateLevels (levels: any[], levelSwitched) {
const { labels, datasets } = this.chart.data;
const { loadLevel, nextLoadLevel, nextAutoLevel } = self.hls;
const currentLevel = levelSwitched !== undefined ? levelSwitched : self.hls.currentLevel;
levels.forEach((level, i) => {
labels.push(getLevelName(level, level.level || level.id || i));
let borderColor = null;
if (currentLevel === i) {
borderColor = 'rgba(32, 32, 240, 1.0)';
} else if (loadLevel === i) {
borderColor = 'rgba(255, 128, 0, 1.0)';
} else if (nextLoadLevel === i) {
borderColor = 'rgba(200, 200, 64, 1.0)';
} else if (nextAutoLevel === i) {
borderColor = 'rgba(160, 0, 160, 1.0)';
}
datasets.push(datasetWithDefaults({
url: Array.isArray(level.url) ? level.url[0] : level.url,
trackType: 'level',
borderColor,
level: level.level
}));
if (level.details) {
Expand All @@ -150,11 +163,13 @@ export class TimelineChart {

updateAudioTracks (audioTracks: any[]) {
const { labels, datasets } = this.chart.data;
const { audioTrack } = self.hls;
audioTracks.forEach((track, i) => {
labels.push(getAudioTrackName(track, i));
datasets.push(datasetWithDefaults({
url: Array.isArray(track.url) ? track.url[0] : track.url,
trackType: 'audioTrack',
borderColor: audioTrack === i ? 'rgba(32, 32, 240, 1.0)' : null,
audioTrack: i
}));
if (track.details) {
Expand All @@ -166,11 +181,13 @@ export class TimelineChart {

updateSubtitleTracks (subtitles: any[]) {
const { labels, datasets } = this.chart.data;
const { subtitleTrack } = self.hls;
subtitles.forEach((track, i) => {
labels.push(getSubtitlesName(track, i));
datasets.push(datasetWithDefaults({
url: Array.isArray(track.url) ? track.url[0] : track.url,
trackType: 'subtitleTrack',
borderColor: subtitleTrack === i ? 'rgba(32, 32, 240, 1.0)' : null,
subtitleTrack: i
}));
if (track.details) {
Expand Down Expand Up @@ -346,6 +363,7 @@ export class TimelineChart {
categoryPercentage: 0.5,
url: '',
trackType: 'textTrack',
borderColor: textTrack.mode !== 'hidden' === i ? 'rgba(32, 32, 240, 1.0)' : null,
textTrack: i
}));
this.cuesChangeHandler = this.cuesChangeHandler || ((e) => this.updateTextTrackCues(e.currentTarget));
Expand Down
32 changes: 31 additions & 1 deletion demo/main.js
Expand Up @@ -1315,9 +1315,28 @@ function addChartEventListeners (hls) {
chart.removeType('level');
chart.updateLevels(levels);
});
hls.on(Hls.Events.LEVEL_UPDATED, (eventName, { details, level }) => {
hls.on(Hls.Events.LEVEL_SWITCHED, (eventName, { level }) => {
// TODO: mutate level datasets
// Update currentLevel
chart.removeType('level');
chart.updateLevels(hls.levels, level);
}, chart);
hls.on(Hls.Events.LEVEL_LOADING, () => {
// TODO: mutate level datasets
// Update loadLevel
chart.removeType('level');
chart.updateLevels(hls.levels);
}, chart);
hls.on(Hls.Events.FRAG_LOADING, () => {
// TODO: mutate level datasets
// Update loadLevel
chart.removeType('level');
chart.updateLevels(hls.levels);
}, chart);
hls.on(Hls.Events.LEVEL_UPDATED, (eventName, { details }) => {
chart.updateLevelOrTrack(details);
}, chart);

hls.on(Hls.Events.AUDIO_TRACKS_UPDATED, (eventName, { audioTracks }) => {
chart.removeType('audioTrack');
chart.updateAudioTracks(audioTracks);
Expand All @@ -1326,6 +1345,17 @@ function addChartEventListeners (hls) {
chart.removeType('subtitleTrack');
chart.updateSubtitleTracks(subtitleTracks);
}, chart);

hls.on(Hls.Events.AUDIO_TRACK_SWITCHED, (eventName) => {
// TODO: mutate level datasets
chart.removeType('audioTrack');
chart.updateAudioTracks(hls.audioTracks);
}, chart);
hls.on(Hls.Events.SUBTITLE_TRACK_SWITCH, (eventName) => {
// TODO: mutate level datasets
chart.removeType('subtitleTrack');
chart.updateSubtitleTracks(hls.subtitleTracks);
}, chart);
hls.on(Hls.Events.AUDIO_TRACK_LOADED, updateLevelOrTrack, chart);
hls.on(Hls.Events.SUBTITLE_TRACK_LOADED, updateLevelOrTrack, chart);
hls.on(Hls.Events.LEVEL_PTS_UPDATED, updateLevelOrTrack, chart);
Expand Down
5 changes: 4 additions & 1 deletion src/controller/level-helper.js
Expand Up @@ -46,7 +46,7 @@ export function updatePTS (fragments, fromIdx, toIdx) {
} else {
// we dont know startPTS[toIdx]
if (toIdx > fromIdx) {
fragTo.start = fragFrom.start + fragFrom.duration;
fragTo.start = fragFrom.start + (fragFrom.minEndPTS ? fragFrom.minEndPTS - fragFrom.start : fragFrom.duration);
} else {
fragTo.start = Math.max(fragFrom.start - fragTo.duration, 0);
}
Expand All @@ -56,6 +56,7 @@ export function updatePTS (fragments, fromIdx, toIdx) {
export function updateFragPTSDTS (details, frag, startPTS, endPTS, startDTS, endDTS) {
// update frag PTS/DTS
let maxStartPTS = startPTS;
let minEndPTS = endPTS;
if (Number.isFinite(frag.startPTS)) {
// delta PTS between audio and video
let deltaPTS = Math.abs(frag.startPTS - startPTS);
Expand All @@ -67,6 +68,7 @@ export function updateFragPTSDTS (details, frag, startPTS, endPTS, startDTS, end

maxStartPTS = Math.max(startPTS, frag.startPTS);
startPTS = Math.min(startPTS, frag.startPTS);
minEndPTS = Math.min(endPTS, frag.endPTS);
endPTS = Math.max(endPTS, frag.endPTS);
startDTS = Math.min(startDTS, frag.startDTS);
endDTS = Math.max(endDTS, frag.endDTS);
Expand All @@ -76,6 +78,7 @@ export function updateFragPTSDTS (details, frag, startPTS, endPTS, startDTS, end
frag.start = frag.startPTS = startPTS;
frag.maxStartPTS = maxStartPTS;
frag.endPTS = endPTS;
frag.minEndPTS = minEndPTS;
frag.startDTS = startDTS;
frag.endDTS = endDTS;
frag.duration = endPTS - startPTS;
Expand Down
13 changes: 7 additions & 6 deletions src/controller/stream-controller.js
Expand Up @@ -617,8 +617,8 @@ class StreamController extends BaseStreamController {
const media = this.media;
// ensure that media is defined and that metadata are available (to retrieve currentTime)
if (media && media.readyState) {
let fetchdelay, fragPlayingCurrent, nextBufferedFrag;
fragPlayingCurrent = this.getBufferedFrag(media.currentTime);
let fetchdelay;
const fragPlayingCurrent = this.getBufferedFrag(media.currentTime);
if (fragPlayingCurrent && fragPlayingCurrent.startPTS > 1) {
// flush buffer preceding current fragment (flush until current fragment start offset)
// minus 1s to avoid video freezing, that could happen if we flush keyframe of current video ...
Expand All @@ -637,10 +637,10 @@ class StreamController extends BaseStreamController {
}
// logger.log('fetchdelay:'+fetchdelay);
// find buffer range that will be reached once new fragment will be fetched
nextBufferedFrag = this.getBufferedFrag(media.currentTime + fetchdelay);
if (nextBufferedFrag) {
const bufferedFrag = this.getBufferedFrag(media.currentTime + fetchdelay);
if (bufferedFrag) {
// we can flush buffer range following this one without stalling playback
nextBufferedFrag = this.followingBufferedFrag(nextBufferedFrag);
const nextBufferedFrag = this.followingBufferedFrag(bufferedFrag);
if (nextBufferedFrag) {
// if we are here, we can also cancel any loading/demuxing in progress, as they are useless
let fragCurrent = this.fragCurrent;
Expand All @@ -652,7 +652,8 @@ class StreamController extends BaseStreamController {
// start flush position is the start PTS of next buffered frag.
// we use frag.naxStartPTS which is max(audio startPTS, video startPTS).
// in case there is a small PTS Delta between audio and video, using maxStartPTS avoids flushing last samples from current fragment
this.flushMainBuffer(nextBufferedFrag.maxStartPTS, Number.POSITIVE_INFINITY);
const startPts = Math.max(bufferedFrag.endPTS, nextBufferedFrag.maxStartPTS + Math.min(this.config.maxFragLookUpTolerance, nextBufferedFrag.duration));
this.flushMainBuffer(startPts, Number.POSITIVE_INFINITY);
}
}
}
Expand Down