Skip to content

Commit

Permalink
Improve DebugTraceUtil logging
Browse files Browse the repository at this point in the history
PiperOrigin-RevId: 554872827
  • Loading branch information
claincly authored and tianyif committed Aug 10, 2023
1 parent ef54364 commit e8a18e2
Show file tree
Hide file tree
Showing 7 changed files with 84 additions and 37 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -195,6 +195,10 @@ private void maybeQueueToShaderProgram() throws VideoFrameProcessingException {
downstreamShaderProgramCapacity--;
shaderProgram.queueInputFrame(
glObjectsProvider, checkNotNull(currentGlTextureInfo), round(currentPresentationTimeUs));
DebugTraceUtil.logEvent(
DebugTraceUtil.EVENT_VFP_QUEUE_BITMAP,
(long) currentPresentationTimeUs,
/* extra= */ currentBitmapInfo.frameInfo.width + "x" + currentBitmapInfo.frameInfo.height);
currentPresentationTimeUs += currentBitmapInfo.frameDurationUs;

if (framesToQueueForCurrentBitmap == 0) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -48,23 +48,27 @@ public final class DebugTraceUtil {
@Retention(RetentionPolicy.SOURCE)
@StringDef({
EVENT_VIDEO_INPUT_FORMAT,
EVENT_MUXER_INPUT_AUDIO,
EVENT_MUXER_INPUT_VIDEO,
EVENT_DECODER_DECODED_FRAME,
EVENT_VFP_RENDERED_TO_INPUT,
EVENT_VFP_FRAME_DEQUEUED,
EVENT_VFP_RENDERED_TO_OUTPUT,
EVENT_VFP_REGISTER_NEW_INPUT_STREAM,
EVENT_VFP_SURFACE_TEXTURE_INPUT,
EVENT_VFP_QUEUE_FRAME,
EVENT_VFP_QUEUE_BITMAP,
EVENT_VFP_QUEUE_TEXTURE,
EVENT_VFP_RENDERED_TO_OUTPUT_SURFACE,
EVENT_VFP_FINISH_PROCESSING_INPUT_STREAM,
EVENT_ENCODER_ENCODED_FRAME,
EVENT_MUXER_CAN_WRITE_SAMPLE_VIDEO,
EVENT_MUXER_WRITE_SAMPLE_VIDEO,
EVENT_MUXER_CAN_WRITE_SAMPLE_AUDIO,
EVENT_ENCODER_ENCODED_FRAME,
EVENT_MUXER_WRITE_SAMPLE_AUDIO,
EVENT_DECODER_RECEIVE_EOS,
EVENT_VFP_RECEIVE_DECODER_EOS,
EVENT_EXTERNAL_INPUT_MANAGER_SIGNAL_EOS,
EVENT_DECODER_SIGNAL_EOS,
EVENT_VFP_RECEIVE_END_OF_INPUT,
EVENT_EXTERNAL_TEXTURE_MANAGER_SIGNAL_EOS,
EVENT_BITMAP_TEXTURE_MANAGER_SIGNAL_EOS,
EVENT_TEX_ID_TEXTURE_MANAGER_SIGNAL_EOS,
EVENT_VFP_SIGNAL_EOS,
EVENT_VFP_SIGNAL_ENDED,
EVENT_ENCODER_RECEIVE_EOS,
EVENT_DECODER_SIGNAL_EOS,
EVENT_MUXER_TRACK_ENDED_AUDIO,
EVENT_MUXER_TRACK_ENDED_VIDEO
})
Expand All @@ -74,24 +78,28 @@ public final class DebugTraceUtil {

public static final String EVENT_VIDEO_INPUT_FORMAT = "VideoInputFormat";
public static final String EVENT_DECODER_DECODED_FRAME = "Decoder-DecodedFrame";
public static final String EVENT_VFP_RENDERED_TO_INPUT = "VFP-RenderedToInput";
public static final String EVENT_VFP_FRAME_DEQUEUED = "VFP-FrameDequeued";
public static final String EVENT_VFP_RENDERED_TO_OUTPUT = "VFP-RenderedToOutput";
public static final String EVENT_VFP_REGISTER_NEW_INPUT_STREAM = "VFP-RegisterNewInputStream";
public static final String EVENT_VFP_SURFACE_TEXTURE_INPUT = "VFP-SurfaceTextureInput";
public static final String EVENT_VFP_QUEUE_FRAME = "VFP-QueueFrame";
public static final String EVENT_VFP_QUEUE_BITMAP = "VFP-QueueBitmap";
public static final String EVENT_VFP_QUEUE_TEXTURE = "VFP-QueueTexture";
public static final String EVENT_VFP_RENDERED_TO_OUTPUT_SURFACE = "VFP-RenderedToOutputSurface";
public static final String EVENT_VFP_FINISH_PROCESSING_INPUT_STREAM = "VFP-FinishOneInputStream";
public static final String EVENT_ENCODER_ENCODED_FRAME = "Encoder-EncodedFrame";
public static final String EVENT_MUXER_CAN_WRITE_SAMPLE_VIDEO = "Muxer-CanWriteSample_Video";
public static final String EVENT_MUXER_INPUT_VIDEO = "Muxer-Input_Video";
public static final String EVENT_MUXER_WRITE_SAMPLE_VIDEO = "Muxer-WriteSample_Video";
public static final String EVENT_MUXER_CAN_WRITE_SAMPLE_AUDIO = "Muxer-CanWriteSample_Audio";
public static final String EVENT_MUXER_INPUT_AUDIO = "Muxer-Input_Audio";
public static final String EVENT_MUXER_WRITE_SAMPLE_AUDIO = "Muxer-WriteSample_Audio";
public static final String EVENT_DECODER_RECEIVE_EOS = "Decoder-ReceiveEOS";
public static final String EVENT_DECODER_SIGNAL_EOS = "Decoder-SignalEOS";
public static final String EVENT_VFP_RECEIVE_DECODER_EOS = "VFP-ReceiveDecoderEOS";
public static final String EVENT_EXTERNAL_INPUT_MANAGER_SIGNAL_EOS =
"ExternalInputManager-SignalEOS";
public static final String EVENT_VFP_RECEIVE_END_OF_INPUT = "VFP-ReceiveEndOfAllInput";
public static final String EVENT_EXTERNAL_TEXTURE_MANAGER_SIGNAL_EOS =
"ExternalTextureManager-SignalEOS";
public static final String EVENT_BITMAP_TEXTURE_MANAGER_SIGNAL_EOS =
"BitmapTextureManager-SignalEOS";
public static final String EVENT_TEX_ID_TEXTURE_MANAGER_SIGNAL_EOS =
"TexIdTextureManager-SignalEOS";
public static final String EVENT_VFP_SIGNAL_EOS = "VFP-SignalEOS";
public static final String EVENT_VFP_SIGNAL_ENDED = "VFP-SignalEnded";
public static final String EVENT_ENCODER_RECEIVE_EOS = "Encoder-ReceiveEOS";
public static final String EVENT_MUXER_TRACK_ENDED_AUDIO = "Muxer-TrackEnded_Audio";
public static final String EVENT_MUXER_TRACK_ENDED_VIDEO = "Muxer-TrackEnded_Video";
Expand All @@ -101,21 +109,25 @@ public final class DebugTraceUtil {
ImmutableList.of(
EVENT_VIDEO_INPUT_FORMAT,
EVENT_DECODER_DECODED_FRAME,
EVENT_VFP_RENDERED_TO_INPUT,
EVENT_VFP_FRAME_DEQUEUED,
EVENT_VFP_RENDERED_TO_OUTPUT,
EVENT_VFP_REGISTER_NEW_INPUT_STREAM,
EVENT_VFP_SURFACE_TEXTURE_INPUT,
EVENT_VFP_QUEUE_FRAME,
EVENT_VFP_QUEUE_BITMAP,
EVENT_VFP_QUEUE_TEXTURE,
EVENT_VFP_RENDERED_TO_OUTPUT_SURFACE,
EVENT_VFP_FINISH_PROCESSING_INPUT_STREAM,
EVENT_ENCODER_ENCODED_FRAME,
EVENT_MUXER_CAN_WRITE_SAMPLE_VIDEO,
EVENT_MUXER_INPUT_VIDEO,
EVENT_MUXER_WRITE_SAMPLE_VIDEO,
EVENT_MUXER_CAN_WRITE_SAMPLE_AUDIO,
EVENT_MUXER_INPUT_AUDIO,
EVENT_MUXER_WRITE_SAMPLE_AUDIO,
EVENT_DECODER_RECEIVE_EOS,
EVENT_DECODER_SIGNAL_EOS,
EVENT_VFP_RECEIVE_DECODER_EOS,
EVENT_EXTERNAL_INPUT_MANAGER_SIGNAL_EOS,
EVENT_VFP_RECEIVE_END_OF_INPUT,
EVENT_EXTERNAL_TEXTURE_MANAGER_SIGNAL_EOS,
EVENT_BITMAP_TEXTURE_MANAGER_SIGNAL_EOS,
EVENT_TEX_ID_TEXTURE_MANAGER_SIGNAL_EOS,
EVENT_VFP_SIGNAL_EOS,
EVENT_VFP_SIGNAL_ENDED,
EVENT_ENCODER_RECEIVE_EOS,
EVENT_MUXER_TRACK_ENDED_AUDIO,
EVENT_MUXER_TRACK_ENDED_VIDEO);
Expand Down Expand Up @@ -160,7 +172,7 @@ public static synchronized void logEvent(
*/
public static synchronized void logEvent(
@DebugTraceEvent String eventName, long presentationTimeUs) {
logEvent(eventName, presentationTimeUs, null);
logEvent(eventName, presentationTimeUs, /* extra= */ null);
}

/**
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,11 @@
import static androidx.media3.common.util.Assertions.checkNotNull;
import static androidx.media3.common.util.Assertions.checkState;
import static androidx.media3.common.util.Assertions.checkStateNotNull;
import static androidx.media3.effect.DebugTraceUtil.EVENT_VFP_FINISH_PROCESSING_INPUT_STREAM;
import static androidx.media3.effect.DebugTraceUtil.EVENT_VFP_RECEIVE_END_OF_INPUT;
import static androidx.media3.effect.DebugTraceUtil.EVENT_VFP_REGISTER_NEW_INPUT_STREAM;
import static androidx.media3.effect.DebugTraceUtil.EVENT_VFP_SIGNAL_ENDED;
import static androidx.media3.effect.DebugTraceUtil.logEvent;
import static com.google.common.collect.Iterables.getFirst;

import android.content.Context;
Expand Down Expand Up @@ -383,10 +388,11 @@ private DefaultVideoFrameProcessor(
}
if (inputEndedAfterThisInputStream) {
listenerExecutor.execute(listener::onEnded);
DebugTraceUtil.logEvent(DebugTraceUtil.EVENT_VFP_SIGNAL_EOS, C.TIME_END_OF_SOURCE);
logEvent(EVENT_VFP_SIGNAL_ENDED, C.TIME_END_OF_SOURCE);
}
});
this.intermediateGlShaderPrograms = new ArrayList<>();
logEvent(EVENT_VFP_FINISH_PROCESSING_INPUT_STREAM, C.TIME_END_OF_SOURCE);
}

/** Returns the task executor that runs video frame processing tasks. */
Expand Down Expand Up @@ -448,6 +454,12 @@ public Surface getInputSurface() {
@Override
public void registerInputStream(
@InputType int inputType, List<Effect> effects, FrameInfo frameInfo) {
logEvent(
EVENT_VFP_REGISTER_NEW_INPUT_STREAM,
/* presentationTimeUs= */ frameInfo.offsetToAddUs,
/* extra= */ Util.formatInvariant(
"InputType %s - %dx%d",
getInputTypeString(inputType), frameInfo.width, frameInfo.height));
nextInputFrameInfo = adjustForPixelWidthHeightRatio(frameInfo);
hasRefreshedNextInputFrameInfo = true;
synchronized (lock) {
Expand Down Expand Up @@ -527,7 +539,7 @@ public void renderOutputFrame(long renderTimeNs) {

@Override
public void signalEndOfInput() {
DebugTraceUtil.logEvent(DebugTraceUtil.EVENT_VFP_RECEIVE_DECODER_EOS, C.TIME_END_OF_SOURCE);
logEvent(EVENT_VFP_RECEIVE_END_OF_INPUT, C.TIME_END_OF_SOURCE);
checkState(!inputStreamEnded);
inputStreamEnded = true;
inputSwitcher.signalEndOfCurrentInputStream();
Expand Down Expand Up @@ -771,6 +783,19 @@ private static void chainShaderProgramsWithListeners(
}
}

private static String getInputTypeString(@InputType int inputType) {
switch (inputType) {
case INPUT_TYPE_SURFACE:
return "Surface";
case INPUT_TYPE_BITMAP:
return "Bitmap";
case INPUT_TYPE_TEXTURE_ID:
return "Texture ID";
default:
throw new IllegalArgumentException(String.valueOf(inputType));
}
}

/** Configures the {@link GlShaderProgram} instances for {@code effects}. */
private void configureEffects(List<Effect> effects) throws VideoFrameProcessingException {
if (!intermediateGlShaderPrograms.isEmpty()) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -123,7 +123,8 @@ public ExternalTextureManager(
unused ->
videoFrameProcessingTaskExecutor.submit(
() -> {
DebugTraceUtil.logEvent(DebugTraceUtil.EVENT_VFP_RENDERED_TO_INPUT, C.TIME_UNSET);
DebugTraceUtil.logEvent(
DebugTraceUtil.EVENT_VFP_SURFACE_TEXTURE_INPUT, C.TIME_UNSET);
if (numberOfFramesToDropOnBecomingAvailable > 0) {
numberOfFramesToDropOnBecomingAvailable--;
surfaceTexture.updateTexImage();
Expand Down Expand Up @@ -174,7 +175,7 @@ public void onInputFrameProcessed(GlTextureInfo inputTexture) {
currentInputStreamEnded = false;
externalShaderProgram.signalEndOfCurrentInputStream();
DebugTraceUtil.logEvent(
DebugTraceUtil.EVENT_EXTERNAL_INPUT_MANAGER_SIGNAL_EOS, C.TIME_END_OF_SOURCE);
DebugTraceUtil.EVENT_EXTERNAL_TEXTURE_MANAGER_SIGNAL_EOS, C.TIME_END_OF_SOURCE);
cancelForceSignalEndOfStreamTimer();
} else {
maybeQueueFrameToExternalShaderProgram();
Expand Down Expand Up @@ -223,7 +224,7 @@ public void signalEndOfCurrentInputStream() {
if (pendingFrames.isEmpty() && currentFrame == null) {
externalShaderProgram.signalEndOfCurrentInputStream();
DebugTraceUtil.logEvent(
DebugTraceUtil.EVENT_EXTERNAL_INPUT_MANAGER_SIGNAL_EOS, C.TIME_END_OF_SOURCE);
DebugTraceUtil.EVENT_EXTERNAL_TEXTURE_MANAGER_SIGNAL_EOS, C.TIME_END_OF_SOURCE);
cancelForceSignalEndOfStreamTimer();
} else {
currentInputStreamEnded = true;
Expand Down Expand Up @@ -331,7 +332,7 @@ private void maybeQueueFrameToExternalShaderProgram() {
currentFrame.height),
presentationTimeUs);
checkStateNotNull(pendingFrames.remove());
DebugTraceUtil.logEvent(DebugTraceUtil.EVENT_VFP_FRAME_DEQUEUED, presentationTimeUs);
DebugTraceUtil.logEvent(DebugTraceUtil.EVENT_VFP_QUEUE_FRAME, presentationTimeUs);
// If the queued frame is the last frame, end of stream will be signaled onInputFrameProcessed.
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -375,7 +375,8 @@ private synchronized void renderFrameToOutputSurface(
? System.nanoTime()
: renderTimeNs);
EGL14.eglSwapBuffers(eglDisplay, outputEglSurface);
DebugTraceUtil.logEvent(DebugTraceUtil.EVENT_VFP_RENDERED_TO_OUTPUT, presentationTimeUs);
DebugTraceUtil.logEvent(
DebugTraceUtil.EVENT_VFP_RENDERED_TO_OUTPUT_SURFACE, presentationTimeUs);
}

private void renderFrameToOutputTexture(GlTextureInfo inputTexture, long presentationTimeUs)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -91,6 +91,10 @@ public void queueInputTexture(int inputTexId, long presentationTimeUs) {
frameInfo.width,
frameInfo.height);
frameConsumptionManager.queueInputFrame(inputTexture, presentationTimeUs);
DebugTraceUtil.logEvent(
DebugTraceUtil.EVENT_VFP_QUEUE_TEXTURE,
presentationTimeUs,
/* extra= */ frameInfo.width + "x" + frameInfo.height);
});
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -245,9 +245,9 @@ public boolean writeSample(
muxer.writeSampleData(
trackInfo.index, data, presentationTimeUs, isKeyFrame ? C.BUFFER_FLAG_KEY_FRAME : 0);
if (trackType == C.TRACK_TYPE_VIDEO) {
DebugTraceUtil.logEvent(DebugTraceUtil.EVENT_MUXER_INPUT_VIDEO, presentationTimeUs);
DebugTraceUtil.logEvent(DebugTraceUtil.EVENT_MUXER_WRITE_SAMPLE_VIDEO, presentationTimeUs);
} else if (trackType == C.TRACK_TYPE_AUDIO) {
DebugTraceUtil.logEvent(DebugTraceUtil.EVENT_MUXER_INPUT_AUDIO, presentationTimeUs);
DebugTraceUtil.logEvent(DebugTraceUtil.EVENT_MUXER_WRITE_SAMPLE_AUDIO, presentationTimeUs);
}
previousTrackType = trackType;
return true;
Expand Down

0 comments on commit e8a18e2

Please sign in to comment.