Skip to content
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@

package androidx.media3.common;

import androidx.annotation.IntRange;
import androidx.annotation.Nullable;
import androidx.media3.common.util.UnstableApi;

Expand Down Expand Up @@ -73,19 +74,23 @@ interface Listener {
* <p>A underlying processing {@link VideoFrameProcessor} is created every time this method is
* called.
*
* <p>All inputs must be registered before rendering frames to the underlying
* {@link #getProcessor(int) VideoFrameProcessor}.
*
* <p>If the method throws, the caller must call {@link #release}.
*
* @return The id of the registered input, which can be used to get the underlying {@link
* VideoFrameProcessor} via {@link #getProcessor(int)}.
* @param inputIndex The index of the input which could be used to order the inputs.
* The index must start from 0.
*/
int registerInput() throws VideoFrameProcessingException;
void registerInput(@IntRange(from = 0) int inputIndex) throws VideoFrameProcessingException;

/**
* Returns the {@link VideoFrameProcessor} that handles the processing for an input registered via
* {@link #registerInput()}. If the {@code inputId} is not {@linkplain #registerInput()
* registered} before, this method will throw an {@link IllegalStateException}.
* {@link #registerInput(int)}. If the {@code inputIndex} is not {@linkplain
* #registerInput(int) registered} before, this method will throw an {@link
* IllegalStateException}.
*/
VideoFrameProcessor getProcessor(int inputId);
VideoFrameProcessor getProcessor(int inputIndex);

/**
* Sets the output surface and supporting information.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@
import static androidx.media3.common.util.Assertions.checkNotNull;
import static androidx.media3.common.util.Assertions.checkState;
import static androidx.media3.common.util.Assertions.checkStateNotNull;
import static androidx.media3.common.util.Util.contains;
import static java.lang.Math.abs;
import static java.lang.Math.max;

Expand All @@ -26,6 +27,7 @@
import android.opengl.EGLDisplay;
import android.opengl.EGLSurface;
import android.opengl.GLES20;
import android.util.SparseArray;
import androidx.annotation.GuardedBy;
import androidx.annotation.IntRange;
import androidx.annotation.Nullable;
Expand All @@ -45,7 +47,6 @@
import com.google.common.collect.Iterables;
import java.io.IOException;
import java.util.ArrayDeque;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.Queue;
Expand Down Expand Up @@ -78,7 +79,8 @@ public final class DefaultVideoCompositor implements VideoCompositor {

private static final String THREAD_NAME = "Effect:DefaultVideoCompositor:GlThread";
private static final String TAG = "DefaultVideoCompositor";
private static final int PRIMARY_INPUT_ID = 0;
// TODO: b/338579287: Use the first registered index instead of a constant value.
private static final int PRIMARY_INPUT_INDEX = 0;

private final VideoCompositor.Listener listener;
private final GlTextureProducer.Listener textureOutputListener;
Expand All @@ -88,7 +90,7 @@ public final class DefaultVideoCompositor implements VideoCompositor {
private final VideoFrameProcessingTaskExecutor videoFrameProcessingTaskExecutor;

@GuardedBy("this")
private final List<InputSource> inputSources;
private final SparseArray<InputSource> inputSources;

@GuardedBy("this")
private boolean allInputsEnded; // Whether all inputSources have signaled end of input.
Expand Down Expand Up @@ -124,7 +126,7 @@ public DefaultVideoCompositor(
this.settings = settings;
this.compositorGlProgram = new CompositorGlProgram(context);

inputSources = new ArrayList<>();
inputSources = new SparseArray<>();
outputTexturePool =
new TexturePool(/* useHighPrecisionColorComponents= */ false, textureOutputCapacity);
outputTextureTimestamps = new LongArrayQueue(textureOutputCapacity);
Expand All @@ -142,33 +144,35 @@ public DefaultVideoCompositor(
}

@Override
public synchronized int registerInputSource() {
inputSources.add(new InputSource());
return inputSources.size() - 1;
public synchronized void registerInputSource(@IntRange(from = 0) int inputIndex) {
checkState(!contains(inputSources, inputIndex));
inputSources.put(inputIndex, new InputSource());
}

@Override
public synchronized void signalEndOfInputSource(int inputId) {
inputSources.get(inputId).isInputEnded = true;
public synchronized void signalEndOfInputSource(int inputIndex) {
checkState(contains(inputSources, inputIndex));
inputSources.get(inputIndex).isInputEnded = true;
boolean allInputsEnded = true;
for (int i = 0; i < inputSources.size(); i++) {
if (!inputSources.get(i).isInputEnded) {
if (!inputSources.valueAt(i).isInputEnded) {
allInputsEnded = false;
break;
}
}

this.allInputsEnded = allInputsEnded;
if (inputSources.get(PRIMARY_INPUT_ID).frameInfos.isEmpty()) {
if (inputId == PRIMARY_INPUT_ID) {
if (inputSources.get(PRIMARY_INPUT_INDEX).frameInfos.isEmpty()) {
if (inputIndex == PRIMARY_INPUT_INDEX) {
releaseExcessFramesInAllSecondaryStreams();
}
if (allInputsEnded) {
listener.onEnded();
return;
}
}
if (inputId != PRIMARY_INPUT_ID && inputSources.get(inputId).frameInfos.size() == 1) {
if (inputIndex != PRIMARY_INPUT_INDEX
&& inputSources.get(inputIndex).frameInfos.size() == 1) {
// When a secondary stream ends input, composite if there was only one pending frame in the
// stream.
videoFrameProcessingTaskExecutor.submit(this::maybeComposite);
Expand All @@ -177,12 +181,13 @@ public synchronized void signalEndOfInputSource(int inputId) {

@Override
public synchronized void queueInputTexture(
int inputId,
int inputIndex,
GlTextureProducer textureProducer,
GlTextureInfo inputTexture,
ColorInfo colorInfo,
long presentationTimeUs) {
InputSource inputSource = inputSources.get(inputId);
checkState(contains(inputSources, inputIndex));
InputSource inputSource = inputSources.get(inputIndex);
checkState(!inputSource.isInputEnded);
checkStateNotNull(!ColorInfo.isTransferHdr(colorInfo), "HDR input is not supported.");
if (configuredColorInfo == null) {
Expand All @@ -196,10 +201,10 @@ public synchronized void queueInputTexture(
textureProducer,
inputTexture,
presentationTimeUs,
settings.getOverlaySettings(inputId, presentationTimeUs));
settings.getOverlaySettings(inputIndex, presentationTimeUs));
inputSource.frameInfos.add(inputFrameInfo);

if (inputId == PRIMARY_INPUT_ID) {
if (inputIndex == PRIMARY_INPUT_INDEX) {
releaseExcessFramesInAllSecondaryStreams();
} else {
releaseExcessFramesInSecondaryStream(inputSource);
Expand All @@ -225,11 +230,11 @@ public void releaseOutputTexture(long presentationTimeUs) {
}

private synchronized void releaseExcessFramesInAllSecondaryStreams() {
for (int i = 0; i < inputSources.size(); i++) {
if (i == PRIMARY_INPUT_ID) {
for (int inputIndex = 0; inputIndex < inputSources.size(); inputIndex++) {
if (inputIndex == PRIMARY_INPUT_INDEX) {
continue;
}
releaseExcessFramesInSecondaryStream(inputSources.get(i));
releaseExcessFramesInSecondaryStream(inputSources.valueAt(inputIndex));
}
}

Expand All @@ -241,7 +246,7 @@ private synchronized void releaseExcessFramesInAllSecondaryStreams() {
* began.
*/
private synchronized void releaseExcessFramesInSecondaryStream(InputSource secondaryInputSource) {
InputSource primaryInputSource = inputSources.get(PRIMARY_INPUT_ID);
InputSource primaryInputSource = inputSources.get(PRIMARY_INPUT_INDEX);
// If the primary stream output is ended, all secondary frames can be released.
if (primaryInputSource.frameInfos.isEmpty() && primaryInputSource.isInputEnded) {
releaseFrames(
Expand Down Expand Up @@ -292,7 +297,7 @@ private synchronized void maybeComposite()
return;
}

InputFrameInfo primaryInputFrame = framesToComposite.get(PRIMARY_INPUT_ID);
InputFrameInfo primaryInputFrame = framesToComposite.get(PRIMARY_INPUT_INDEX);

ImmutableList.Builder<Size> inputSizes = new ImmutableList.Builder<>();
for (int i = 0; i < framesToComposite.size(); i++) {
Expand All @@ -313,7 +318,7 @@ private synchronized void maybeComposite()
textureOutputListener.onTextureRendered(
/* textureProducer= */ this, outputTexture, outputPresentationTimestampUs, syncObject);

InputSource primaryInputSource = inputSources.get(PRIMARY_INPUT_ID);
InputSource primaryInputSource = inputSources.get(PRIMARY_INPUT_INDEX);
releaseFrames(primaryInputSource, /* numberOfFramesToRelease= */ 1);
releaseExcessFramesInAllSecondaryStreams();

Expand All @@ -333,18 +338,18 @@ private synchronized ImmutableList<InputFrameInfo> getFramesToComposite() {
if (outputTexturePool.freeTextureCount() == 0) {
return ImmutableList.of();
}
for (int inputId = 0; inputId < inputSources.size(); inputId++) {
if (inputSources.get(inputId).frameInfos.isEmpty()) {
for (int i = 0; i < inputSources.size(); i++) {
if (inputSources.valueAt(i).frameInfos.isEmpty()) {
return ImmutableList.of();
}
}
ImmutableList.Builder<InputFrameInfo> framesToComposite = new ImmutableList.Builder<>();
InputFrameInfo primaryFrameToComposite =
inputSources.get(PRIMARY_INPUT_ID).frameInfos.element();
inputSources.get(PRIMARY_INPUT_INDEX).frameInfos.element();
framesToComposite.add(primaryFrameToComposite);

for (int inputId = 0; inputId < inputSources.size(); inputId++) {
if (inputId == PRIMARY_INPUT_ID) {
for (int i = 0; i < inputSources.size(); i++) {
if (i == PRIMARY_INPUT_INDEX) {
continue;
}
// Select the secondary streams' frame that would be composited next. The frame selected is
Expand All @@ -353,7 +358,7 @@ private synchronized ImmutableList<InputFrameInfo> getFramesToComposite() {
// 2. Two or more frames, and at least one frame has timestamp greater than the target
// timestamp.
// The smaller timestamp is taken if two timestamps have the same distance from the primary.
InputSource secondaryInputSource = inputSources.get(inputId);
InputSource secondaryInputSource = inputSources.valueAt(i);
if (secondaryInputSource.frameInfos.size() == 1 && !secondaryInputSource.isInputEnded) {
return ImmutableList.of();
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,7 @@
import android.opengl.EGLDisplay;
import android.opengl.EGLSurface;
import android.util.SparseArray;
import androidx.annotation.IntRange;
import androidx.annotation.Nullable;
import androidx.media3.common.C;
import androidx.media3.common.ColorInfo;
Expand Down Expand Up @@ -75,7 +76,7 @@ public abstract class MultipleInputVideoGraph implements VideoGraph {
private final Executor listenerExecutor;
private final VideoCompositorSettings videoCompositorSettings;
private final List<Effect> compositionEffects;
private final List<VideoFrameProcessor> preProcessors;
private final SparseArray<VideoFrameProcessor> preProcessors;

private final ExecutorService sharedExecutorService;

Expand Down Expand Up @@ -114,7 +115,7 @@ protected MultipleInputVideoGraph(
this.compositionEffects = new ArrayList<>(compositionEffects);
this.initialTimestampOffsetUs = initialTimestampOffsetUs;
lastRenderedPresentationTimeUs = C.TIME_UNSET;
preProcessors = new ArrayList<>();
preProcessors = new SparseArray<>();
sharedExecutorService = newSingleThreadScheduledExecutor(SHARED_EXECUTOR_NAME);
glObjectsProvider = new SingleContextGlObjectsProvider();
// TODO - b/289986435: Support injecting VideoFrameProcessor.Factory.
Expand All @@ -136,7 +137,7 @@ protected MultipleInputVideoGraph(
@Override
public void initialize() throws VideoFrameProcessingException {
checkState(
preProcessors.isEmpty()
preProcessors.size() == 0
&& videoCompositor == null
&& compositionVideoFrameProcessor == null
&& !released);
Expand Down Expand Up @@ -211,10 +212,10 @@ public void onEnded() {
}

@Override
public int registerInput() throws VideoFrameProcessingException {
checkStateNotNull(videoCompositor);

int videoCompositorInputId = videoCompositor.registerInputSource();
public void registerInput(@IntRange(from = 0) int inputIndex)
throws VideoFrameProcessingException {
checkState(!contains(preProcessors, inputIndex));
checkNotNull(videoCompositor).registerInputSource(inputIndex);
// Creating a new VideoFrameProcessor for the input.
VideoFrameProcessor preProcessor =
videoFrameProcessorFactory
Expand All @@ -223,7 +224,7 @@ public int registerInput() throws VideoFrameProcessingException {
// Texture output to compositor.
(textureProducer, texture, presentationTimeUs, syncObject) ->
queuePreProcessingOutputToCompositor(
videoCompositorInputId, textureProducer, texture, presentationTimeUs),
inputIndex, textureProducer, texture, presentationTimeUs),
PRE_COMPOSITOR_TEXTURE_OUTPUT_CAPACITY)
.build()
.create(
Expand Down Expand Up @@ -254,17 +255,16 @@ public void onError(VideoFrameProcessingException exception) {

@Override
public void onEnded() {
onPreProcessingVideoFrameProcessorEnded(videoCompositorInputId);
onPreProcessingVideoFrameProcessorEnded(inputIndex);
}
});
preProcessors.add(preProcessor);
return videoCompositorInputId;
preProcessors.put(inputIndex, preProcessor);
}

@Override
public VideoFrameProcessor getProcessor(int inputId) {
checkState(inputId < preProcessors.size());
return preProcessors.get(inputId);
public VideoFrameProcessor getProcessor(int inputIndex) {
checkState(contains(preProcessors, inputIndex));
return preProcessors.get(inputIndex);
}

@Override
Expand All @@ -285,7 +285,7 @@ public void release() {

// Needs to release the frame processors before their internal executor services are released.
for (int i = 0; i < preProcessors.size(); i++) {
preProcessors.get(i).release();
preProcessors.get(preProcessors.keyAt(i)).release();
}
preProcessors.clear();

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -109,6 +109,6 @@ private PreviewingSingleInputVideoGraph(

@Override
public void renderOutputFrame(long renderTimeNs) {
getProcessor(SINGLE_INPUT_INDEX).renderOutputFrame(renderTimeNs);
getProcessor(getInputIndex()).renderOutputFrame(renderTimeNs);
}
}
Loading