Skip to content

Commit

Permalink
Merge pull request #3859 from elv-peter/v1.0.2/bugfix/multi-ext-x-map…
Browse files Browse the repository at this point in the history
…-init

Add support for multiple EXT-X-MAP tags (updated)
  • Loading branch information
robwalch committed May 17, 2021
2 parents aa9bac1 + 18973bc commit b34e8b8
Show file tree
Hide file tree
Showing 10 changed files with 208 additions and 165 deletions.
4 changes: 2 additions & 2 deletions api-extractor/report/hls.js.api.md
Expand Up @@ -715,6 +715,8 @@ export class Fragment extends BaseSegment {
// (undocumented)
endPTS?: number;
// (undocumented)
initSegment: Fragment | null;
// (undocumented)
level: number;
// (undocumented)
levelkey?: LevelKey;
Expand Down Expand Up @@ -1344,8 +1346,6 @@ export class LevelDetails {
// (undocumented)
holdBack: number;
// (undocumented)
initSegment: Fragment | null;
// (undocumented)
get lastPartIndex(): number;
// (undocumented)
get lastPartSn(): number;
Expand Down
101 changes: 47 additions & 54 deletions src/controller/audio-stream-controller.ts
Expand Up @@ -285,66 +285,59 @@ class AudioStreamController
return;
}

let frag = trackDetails.initSegment;
let targetBufferTime = 0;
if (!frag || frag.data) {
const mediaBuffer = this.mediaBuffer ? this.mediaBuffer : this.media;
const videoBuffer = this.videoBuffer ? this.videoBuffer : this.media;
const maxBufferHole =
pos < config.maxBufferHole
? Math.max(MAX_START_GAP_JUMP, config.maxBufferHole)
: config.maxBufferHole;
const bufferInfo = BufferHelper.bufferInfo(
mediaBuffer,
pos,
maxBufferHole
);
const mainBufferInfo = BufferHelper.bufferInfo(
videoBuffer,
pos,
maxBufferHole
);
const bufferLen = bufferInfo.len;
const maxConfigBuffer = Math.min(
config.maxBufferLength,
config.maxMaxBufferLength
);
const maxBufLen = Math.max(maxConfigBuffer, mainBufferInfo.len);
const audioSwitch = this.audioSwitch;
const mediaBuffer = this.mediaBuffer ? this.mediaBuffer : this.media;
const videoBuffer = this.videoBuffer ? this.videoBuffer : this.media;
const maxBufferHole =
pos < config.maxBufferHole
? Math.max(MAX_START_GAP_JUMP, config.maxBufferHole)
: config.maxBufferHole;
const bufferInfo = BufferHelper.bufferInfo(mediaBuffer, pos, maxBufferHole);
const mainBufferInfo = BufferHelper.bufferInfo(
videoBuffer,
pos,
maxBufferHole
);
const bufferLen = bufferInfo.len;
const maxConfigBuffer = Math.min(
config.maxBufferLength,
config.maxMaxBufferLength
);
const maxBufLen = Math.max(maxConfigBuffer, mainBufferInfo.len);
const audioSwitch = this.audioSwitch;

// if buffer length is less than maxBufLen try to load a new fragment
if (bufferLen >= maxBufLen && !audioSwitch) {
return;
}
// if buffer length is less than maxBufLen try to load a new fragment
if (bufferLen >= maxBufLen && !audioSwitch) {
return;
}

if (!audioSwitch && this._streamEnded(bufferInfo, trackDetails)) {
hls.trigger(Events.BUFFER_EOS, { type: 'audio' });
this.state = State.ENDED;
return;
}
if (!audioSwitch && this._streamEnded(bufferInfo, trackDetails)) {
hls.trigger(Events.BUFFER_EOS, { type: 'audio' });
this.state = State.ENDED;
return;
}

const fragments = trackDetails.fragments;
const start = fragments[0].start;
targetBufferTime = bufferInfo.end;

if (audioSwitch) {
targetBufferTime = pos;
// if currentTime (pos) is less than alt audio playlist start time, it means that alt audio is ahead of currentTime
if (trackDetails.PTSKnown && pos < start) {
// if everything is buffered from pos to start or if audio buffer upfront, let's seek to start
if (bufferInfo.end > start || bufferInfo.nextStart) {
this.log(
'Alt audio track ahead of main track, seek to start of alt audio track'
);
media.currentTime = start + 0.05;
}
const fragments = trackDetails.fragments;
const start = fragments[0].start;
targetBufferTime = bufferInfo.end;

if (audioSwitch) {
targetBufferTime = pos;
// if currentTime (pos) is less than alt audio playlist start time, it means that alt audio is ahead of currentTime
if (trackDetails.PTSKnown && pos < start) {
// if everything is buffered from pos to start or if audio buffer upfront, let's seek to start
if (bufferInfo.end > start || bufferInfo.nextStart) {
this.log(
'Alt audio track ahead of main track, seek to start of alt audio track'
);
media.currentTime = start + 0.05;
}
}
}

frag = this.getNextFragment(targetBufferTime, trackDetails);
if (!frag) {
return;
}
const frag = this.getNextFragment(targetBufferTime, trackDetails);
if (!frag) {
return;
}

if (frag.decryptdata?.keyFormat === 'identity' && !frag.decryptdata?.key) {
Expand Down Expand Up @@ -510,7 +503,7 @@ class AudioStreamController
// Check if we have video initPTS
// If not we need to wait for it
const initPTS = this.initPTS[frag.cc];
const initSegmentData = details.initSegment?.data;
const initSegmentData = frag.initSegment?.data;
if (initPTS !== undefined) {
// this.log(`Transmuxing ${sn} of [${details.startSN} ,${details.endSN}],track ${trackId}`);
// time Offset is accurate if level PTS is known, or if playlist is not sliding (not live)
Expand Down
21 changes: 7 additions & 14 deletions src/controller/base-stream-controller.ts
Expand Up @@ -435,16 +435,11 @@ export default class BaseStreamController
details,
'Level details are defined when init segment is loaded'
);
const initSegment = details.initSegment as Fragment;
console.assert(
initSegment,
'Fragment initSegment is defined when init segment is loaded'
);

const stats = frag.stats;
this.state = State.IDLE;
this.fragLoadError = 0;
initSegment.data = new Uint8Array(data.payload);
frag.data = new Uint8Array(data.payload);
stats.parsing.start = stats.buffering.start = self.performance.now();
stats.parsing.end = stats.buffering.end = self.performance.now();

Expand Down Expand Up @@ -762,14 +757,7 @@ export default class BaseStreamController
const start = fragments[0].start;
let frag;

// If an initSegment is present, it must be buffered first
if (
levelDetails.initSegment &&
!levelDetails.initSegment.data &&
!this.bitrateTest
) {
frag = levelDetails.initSegment;
} else if (levelDetails.live) {
if (levelDetails.live) {
const initialLiveManifestSize = config.initialLiveManifestSize;
if (fragLen < initialLiveManifestSize) {
this.warn(
Expand Down Expand Up @@ -804,6 +792,11 @@ export default class BaseStreamController
frag = this.getFragmentAtPosition(pos, end, levelDetails);
}

// If an initSegment is present, it must be buffered first
if (frag?.initSegment && !frag?.initSegment.data && !this.bitrateTest) {
frag = frag.initSegment;
}

return frag;
}

Expand Down
24 changes: 18 additions & 6 deletions src/controller/level-helper.ts
Expand Up @@ -165,9 +165,15 @@ export function mergeDetails(
oldDetails: LevelDetails,
newDetails: LevelDetails
): void {
// potentially retrieve cached initsegment
if (newDetails.initSegment && oldDetails.initSegment) {
newDetails.initSegment = oldDetails.initSegment;
// Track the last initSegment processed. Initialize it to the last one on the timeline.
let currentInitSegment: Fragment | null = null;
const oldFragments = oldDetails.fragments;
for (let i = oldFragments.length - 1; i >= 0; i--) {
const oldInit = oldFragments[i].initSegment;
if (oldInit) {
currentInitSegment = oldInit;
break;
}
}

if (oldDetails.fragmentHint) {
Expand Down Expand Up @@ -214,6 +220,15 @@ export function mergeDetails(
newFrag.loader = oldFrag.loader;
newFrag.stats = oldFrag.stats;
newFrag.urlId = oldFrag.urlId;
if (oldFrag.initSegment) {
newFrag.initSegment = oldFrag.initSegment;
currentInitSegment = oldFrag.initSegment;
} else if (
!newFrag.initSegment ||
newFrag.initSegment.relurl == currentInitSegment?.relurl
) {
newFrag.initSegment = currentInitSegment;
}
}
);

Expand All @@ -239,9 +254,6 @@ export function mergeDetails(
}
}
if (newDetails.skippedSegments) {
if (!newDetails.initSegment) {
newDetails.initSegment = oldDetails.initSegment;
}
newDetails.startCC = newDetails.fragments[0].cc;
}

Expand Down
130 changes: 65 additions & 65 deletions src/controller/stream-controller.ts
Expand Up @@ -249,77 +249,77 @@ export default class StreamController
return;
}

let frag = levelDetails.initSegment;
let targetBufferTime = 0;
if (!frag || frag.data || this.bitrateTest) {
// compute max Buffer Length that we could get from this load level, based on level bitrate. don't buffer more than 60 MB and more than 30s
const levelBitrate = levelInfo.maxBitrate;
let maxBufLen;
if (levelBitrate) {
maxBufLen = Math.max(
(8 * config.maxBufferSize) / levelBitrate,
config.maxBufferLength
);
} else {
maxBufLen = config.maxBufferLength;
}
maxBufLen = Math.min(maxBufLen, config.maxMaxBufferLength);

// determine next candidate fragment to be loaded, based on current position and end of buffer position
// ensure up to `config.maxMaxBufferLength` of buffer upfront
const maxBufferHole =
pos < config.maxBufferHole
? Math.max(MAX_START_GAP_JUMP, config.maxBufferHole)
: config.maxBufferHole;
const bufferInfo = BufferHelper.bufferInfo(
this.mediaBuffer ? this.mediaBuffer : media,
pos,
maxBufferHole
// compute max Buffer Length that we could get from this load level, based on level bitrate. don't buffer more than 60 MB and more than 30s
const levelBitrate = levelInfo.maxBitrate;
let maxBufLen;
if (levelBitrate) {
maxBufLen = Math.max(
(8 * config.maxBufferSize) / levelBitrate,
config.maxBufferLength
);
const bufferLen = bufferInfo.len;
// Stay idle if we are still with buffer margins
if (bufferLen >= maxBufLen) {
return;
}

if (this._streamEnded(bufferInfo, levelDetails)) {
const data: BufferEOSData = {};
if (this.altAudio) {
data.type = 'video';
}
} else {
maxBufLen = config.maxBufferLength;
}
maxBufLen = Math.min(maxBufLen, config.maxMaxBufferLength);

// determine next candidate fragment to be loaded, based on current position and end of buffer position
// ensure up to `config.maxMaxBufferLength` of buffer upfront
const maxBufferHole =
pos < config.maxBufferHole
? Math.max(MAX_START_GAP_JUMP, config.maxBufferHole)
: config.maxBufferHole;
const bufferInfo = BufferHelper.bufferInfo(
this.mediaBuffer ? this.mediaBuffer : media,
pos,
maxBufferHole
);
const bufferLen = bufferInfo.len;
// Stay idle if we are still with buffer margins
if (bufferLen >= maxBufLen) {
return;
}

this.hls.trigger(Events.BUFFER_EOS, data);
this.state = State.ENDED;
return;
if (this._streamEnded(bufferInfo, levelDetails)) {
const data: BufferEOSData = {};
if (this.altAudio) {
data.type = 'video';
}

targetBufferTime = bufferInfo.end;
frag = this.getNextFragment(targetBufferTime, levelDetails);
// Avoid backtracking after seeking or switching by loading an earlier segment in streams that could backtrack
if (
this.couldBacktrack &&
!this.fragPrevious &&
frag &&
frag.sn !== 'initSegment'
) {
const fragIdx = frag.sn - levelDetails.startSN;
if (fragIdx > 1) {
frag = levelDetails.fragments[fragIdx - 1];
this.fragmentTracker.removeFragment(frag);
}
}
// Avoid loop loading by using nextLoadPosition set for backtracking
if (
frag &&
this.fragmentTracker.getState(frag) === FragmentState.OK &&
this.nextLoadPosition > targetBufferTime
) {
frag = this.getNextFragment(this.nextLoadPosition, levelDetails);
}
if (!frag) {
return;
this.hls.trigger(Events.BUFFER_EOS, data);
this.state = State.ENDED;
return;
}

targetBufferTime = bufferInfo.end;
let frag = this.getNextFragment(targetBufferTime, levelDetails);
// Avoid backtracking after seeking or switching by loading an earlier segment in streams that could backtrack
if (
this.couldBacktrack &&
!this.fragPrevious &&
frag &&
frag.sn !== 'initSegment'
) {
const fragIdx = frag.sn - levelDetails.startSN;
if (fragIdx > 1) {
frag = levelDetails.fragments[fragIdx - 1];
this.fragmentTracker.removeFragment(frag);
}
}
// Avoid loop loading by using nextLoadPosition set for backtracking
if (
frag &&
this.fragmentTracker.getState(frag) === FragmentState.OK &&
this.nextLoadPosition > targetBufferTime
) {
frag = this.getNextFragment(this.nextLoadPosition, levelDetails);
}
if (!frag) {
return;
}
if (frag.initSegment && !frag.initSegment.data && !this.bitrateTest) {
frag = frag.initSegment;
}

// We want to load the key if we're dealing with an identity key, because we will decrypt
// this content using the key we fetch. Other keys will be handled by the DRM CDM via EME.
Expand Down Expand Up @@ -689,7 +689,7 @@ export default class StreamController

// time Offset is accurate if level PTS is known, or if playlist is not sliding (not live)
const accurateTimeOffset = details.PTSKnown || !details.live;
const initSegmentData = details.initSegment?.data;
const initSegmentData = frag.initSegment?.data;
const audioCodec = this._getAudioCodec(currentLevel);

// transmux the MPEG-TS data to ISO-BMFF segments
Expand Down
2 changes: 2 additions & 0 deletions src/loader/fragment.ts
Expand Up @@ -139,6 +139,8 @@ export class Fragment extends BaseSegment {
public bitrateTest: boolean = false;
// #EXTINF segment title
public title: string | null = null;
// The Media Initialization Section for this segment
public initSegment: Fragment | null = null;

constructor(type: PlaylistLevelType, baseurl: string) {
super(baseurl);
Expand Down
1 change: 0 additions & 1 deletion src/loader/level-details.ts
Expand Up @@ -13,7 +13,6 @@ export class LevelDetails {
public fragments: Fragment[];
public fragmentHint?: Fragment;
public partList: Part[] | null = null;
public initSegment: Fragment | null = null;
public live: boolean = true;
public ageHeader: number = 0;
public advancedDateTime?: number;
Expand Down

0 comments on commit b34e8b8

Please sign in to comment.