Skip to content

Commit

Permalink
Only use first frame stream start position check at stream transitions
Browse files Browse the repository at this point in the history
We currently only force the first frame if the frame timestamp is
greater than the stream *offset*.

This is wrong for two reasons:
 1. The timestamp and the offset are not comparable and it should be
    the stream start position.
 2. The check should only be applied at stream transitions where we
    need to make sure that a new first frame isn't rendered until we
    passed the transition point.

We have to fix both issues together, because fixing just issue (1)
causes seeks to before the start position to no longer render the
frame (and playback will be stuck). A new test covers this case.

We also amend the stream transition test case to actually test what it
promises to test and add a test for prerolling samples at the
beginning, to ensure the first frame is still renderered.

Issue: #291
PiperOrigin-RevId: 552858967
  • Loading branch information
tonihei authored and tianyif committed Aug 7, 2023
1 parent 79e05ad commit 6965677
Show file tree
Hide file tree
Showing 5 changed files with 104 additions and 13 deletions.
3 changes: 3 additions & 0 deletions RELEASENOTES.md
Original file line number Diff line number Diff line change
Expand Up @@ -95,6 +95,9 @@
* Allow `MediaCodecVideoRenderer` to use a custom
`VideoFrameProcessor.Factory`.
* H.265/HEVC: Fix parsing SPS short and long term reference picture info.
* Fix bug where the first frame couldn't be rendered if the audio stream
starts with negative timestamps
([#291](https://github.com/androidx/media/issues/291)).
* Text:
* CEA-608: Change cue truncation logic to only consider visible text.
Previously indent and tab offset were included when limiting the cue
Expand Down
11 changes: 8 additions & 3 deletions libraries/common/src/main/java/androidx/media3/common/C.java
Original file line number Diff line number Diff line change
Expand Up @@ -1510,8 +1510,9 @@ private C() {}
* State of the first frame in a video renderer.
*
* <p>One of {@link #FIRST_FRAME_NOT_RENDERED_ONLY_ALLOWED_IF_STARTED}, {@link
* #FIRST_FRAME_NOT_RENDERED} or {@link #FIRST_FRAME_RENDERED}. The stages are ordered and
* comparable, i.e., a value implies that all stages with higher values are not reached yet.
* #FIRST_FRAME_NOT_RENDERED}, {@link #FIRST_FRAME_NOT_RENDERED_AFTER_STREAM_CHANGE} or {@link
* #FIRST_FRAME_RENDERED}. The stages are ordered and comparable, i.e., a value implies that all
* stages with higher values are not reached yet.
*/
@Documented
@Retention(RetentionPolicy.SOURCE)
Expand All @@ -1520,6 +1521,7 @@ private C() {}
@IntDef({
FIRST_FRAME_NOT_RENDERED_ONLY_ALLOWED_IF_STARTED,
FIRST_FRAME_NOT_RENDERED,
FIRST_FRAME_NOT_RENDERED_AFTER_STREAM_CHANGE,
FIRST_FRAME_RENDERED
})
public @interface FirstFrameState {}
Expand All @@ -1533,8 +1535,11 @@ private C() {}
/** The first frame was not rendered after the last reset, output surface or stream change. */
@UnstableApi public static final int FIRST_FRAME_NOT_RENDERED = 1;

/** The first frame was not rendered after the last stream change. */
@UnstableApi public static final int FIRST_FRAME_NOT_RENDERED_AFTER_STREAM_CHANGE = 2;

/** The first frame was rendered. */
@UnstableApi public static final int FIRST_FRAME_RENDERED = 2;
@UnstableApi public static final int FIRST_FRAME_RENDERED = 3;

/**
* @deprecated Use {@link Util#usToMs(long)}.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2102,6 +2102,11 @@ protected final long getOutputStreamOffsetUs() {
return outputStreamInfo.streamOffsetUs;
}

/** Returns the start position of the current output stream in microseconds. */
protected final long getOutputStreamStartPositionUs() {
return outputStreamInfo.startPositionUs;
}

private void setOutputStreamInfo(OutputStreamInfo outputStreamInfo) {
this.outputStreamInfo = outputStreamInfo;
if (outputStreamInfo.streamOffsetUs != C.TIME_UNSET) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1341,18 +1341,14 @@ private boolean shouldForceRender(long positionUs, long earlyUs) {
// No force rendering during joining.
return false;
}
if (positionUs < getOutputStreamOffsetUs()) {
// No force rendering if we haven't reached the stream start position.
// TODO: b/160461756 - This is a bug because it compares against the offset and not the start
// position and also should only be applied when transitioning streams, not after every reset.
return false;
}
boolean isStarted = getState() == STATE_STARTED;
switch (firstFrameState) {
case C.FIRST_FRAME_NOT_RENDERED_ONLY_ALLOWED_IF_STARTED:
return isStarted;
case C.FIRST_FRAME_NOT_RENDERED:
return true;
case C.FIRST_FRAME_NOT_RENDERED_AFTER_STREAM_CHANGE:
return positionUs >= getOutputStreamStartPositionUs();
case C.FIRST_FRAME_RENDERED:
long elapsedSinceLastRenderUs = msToUs(getClock().elapsedRealtime()) - lastRenderRealtimeUs;
return isStarted && shouldForceRenderOutputBuffer(earlyUs, elapsedSinceLastRenderUs);
Expand Down Expand Up @@ -1431,7 +1427,7 @@ protected void onProcessedOutputBuffer(long presentationTimeUs) {
@Override
protected void onProcessedStreamChange() {
super.onProcessedStreamChange();
lowerFirstFrameState(C.FIRST_FRAME_NOT_RENDERED);
lowerFirstFrameState(C.FIRST_FRAME_NOT_RENDERED_AFTER_STREAM_CHANGE);
}

/**
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -509,6 +509,38 @@ public void enable_withoutMayRenderStartOfStream_rendersFirstFrameAfterStart() t
verify(eventListener).onRenderedFirstFrame(eq(surface), /* renderTimeMs= */ anyLong());
}

@Test
public void enable_withPrerollSamplesLessThanStartPosition_rendersFirstFrame() throws Exception {
FakeSampleStream fakeSampleStream =
new FakeSampleStream(
new DefaultAllocator(/* trimOnReset= */ true, /* individualAllocationSize= */ 1024),
/* mediaSourceEventDispatcher= */ null,
DrmSessionManager.DRM_UNSUPPORTED,
new DrmSessionEventListener.EventDispatcher(),
/* initialFormat= */ VIDEO_H264,
ImmutableList.of(
oneByteSample(/* timeUs= */ -500, C.BUFFER_FLAG_KEY_FRAME),
oneByteSample(/* timeUs= */ 500),
oneByteSample(/* timeUs= */ 1500)));
fakeSampleStream.writeData(/* startPositionUs= */ -500);

mediaCodecVideoRenderer.enable(
RendererConfiguration.DEFAULT,
new Format[] {VIDEO_H264},
fakeSampleStream,
/* positionUs= */ 2000,
/* joining= */ false,
/* mayRenderStartOfStream= */ true,
/* startPositionUs= */ 2000,
/* offsetUs= */ 1000);
for (int i = 0; i < 10; i++) {
mediaCodecVideoRenderer.render(/* positionUs= */ 0, SystemClock.elapsedRealtime() * 1000);
}
shadowOf(testMainLooper).idle();

verify(eventListener).onRenderedFirstFrame(eq(surface), /* renderTimeMs= */ anyLong());
}

@Test
public void replaceStream_rendersFirstFrameOnlyAfterStartPosition() throws Exception {
ShadowLooper shadowLooper = shadowOf(testMainLooper);
Expand Down Expand Up @@ -545,20 +577,28 @@ public void replaceStream_rendersFirstFrameOnlyAfterStartPosition() throws Excep
mediaCodecVideoRenderer.start();

boolean replacedStream = false;
for (int i = 0; i <= 10; i++) {
// Render to just before the specified start position.
for (int i = 0; i < 10; i++) {
mediaCodecVideoRenderer.render(
/* positionUs= */ i * 10, SystemClock.elapsedRealtime() * 1000);
if (!replacedStream && mediaCodecVideoRenderer.hasReadStreamToEnd()) {
mediaCodecVideoRenderer.replaceStream(
new Format[] {VIDEO_H264},
fakeSampleStream2,
/* startPositionUs= */ 100,
/* offsetUs= */ 100);
/* offsetUs= */ 50);
replacedStream = true;
}
}

// Expect only the first frame of the first stream to have been rendered.
// Assert that only one first frame was rendered so far.
shadowLooper.idle();
verify(eventListener).onRenderedFirstFrame(eq(surface), /* renderTimeMs= */ anyLong());

// Render at start position.
mediaCodecVideoRenderer.render(/* positionUs= */ 100, SystemClock.elapsedRealtime() * 1000);

// Assert the new first frame was rendered.
shadowLooper.idle();
verify(eventListener, times(2))
.onRenderedFirstFrame(eq(surface), /* renderTimeMs= */ anyLong());
Expand Down Expand Up @@ -622,6 +662,48 @@ public void replaceStream_whenNotStarted_doesNotRenderFirstFrameOfNewStream() th
.onRenderedFirstFrame(eq(surface), /* renderTimeMs= */ anyLong());
}

@Test
public void resetPosition_toBeforeOriginalStartPosition_rendersFirstFrame() throws Exception {
ShadowLooper shadowLooper = shadowOf(testMainLooper);
FakeSampleStream fakeSampleStream =
new FakeSampleStream(
new DefaultAllocator(/* trimOnReset= */ true, /* individualAllocationSize= */ 1024),
/* mediaSourceEventDispatcher= */ null,
DrmSessionManager.DRM_UNSUPPORTED,
new DrmSessionEventListener.EventDispatcher(),
/* initialFormat= */ VIDEO_H264,
ImmutableList.of(oneByteSample(/* timeUs= */ 1000, C.BUFFER_FLAG_KEY_FRAME)));
fakeSampleStream.writeData(/* startPositionUs= */ 0);
mediaCodecVideoRenderer.enable(
RendererConfiguration.DEFAULT,
new Format[] {VIDEO_H264},
fakeSampleStream,
/* positionUs= */ 1000,
/* joining= */ false,
/* mayRenderStartOfStream= */ true,
/* startPositionUs= */ 1000,
/* offsetUs= */ 0);
mediaCodecVideoRenderer.start();
// Render at the original start position.
for (int i = 0; i < 10; i++) {
mediaCodecVideoRenderer.render(/* positionUs= */ 1000, SystemClock.elapsedRealtime() * 1000);
}

// Reset the position to before the original start position and render at this position.
mediaCodecVideoRenderer.resetPosition(500);
fakeSampleStream.append(
ImmutableList.of(oneByteSample(/* timeUs= */ 500, C.BUFFER_FLAG_KEY_FRAME)));
fakeSampleStream.writeData(/* startPositionUs= */ 500);
for (int i = 0; i < 10; i++) {
mediaCodecVideoRenderer.render(/* positionUs= */ 500, SystemClock.elapsedRealtime() * 1000);
}

// Assert that we rendered the first frame after the reset.
shadowLooper.idle();
verify(eventListener, times(2))
.onRenderedFirstFrame(eq(surface), /* renderTimeMs= */ anyLong());
}

@Test
public void supportsFormat_withDolbyVisionMedia_returnsTrueWhenFallbackToH265orH264Allowed()
throws Exception {
Expand Down

0 comments on commit 6965677

Please sign in to comment.