Commit b18fb368 by kimvde Committed by Marc Baechinger

Fix and simplify timestamps handling

Handling of the stream offset and start position was unnecessarily
complex and even incorrect. It was going to be an issue for
concatenation of video and image input.

The stream offset is the offset added before decoding/encoding to
make sure it doesn’t fail in case of negative timestamps (which do
rarely occur).
The start position is equal to the stream offset, plus the clipping
start time if the media is clipped.

Before this change:
- Samples were offset by the start position before decoding, and this
offset was removed before muxing.
- The startPosition of the first MediaItem in a sequence was used for
all the MediaItems in this sequence (which is incorrect).
- The stream offset was removed before applying the GL effects and
added back before encoding so that it was not visible to the OpenGL
processing.

After this change:
- The start position is subtracted in the AssetLoader, so that the
downstream components don’t have to deal with the stream offsets and
start positions.
- Decoded samples with negative timestamps are not passed to the
SamplePipelines. The MediaMuxer doesn’t handle negative timestamps
well. If a stream is 10 secondes long and starts at timestamp -2
seconds, the output will only contain the samples corresponding to the
first 8 (10 - 2) seconds. It won’t contain the last 2 seconds of the
stream. It seems acceptable to remove the first 2 seconds instead.

PiperOrigin-RevId: 520916464
parent c82a6fcd
Showing with 94 additions and 119 deletions
...@@ -96,18 +96,10 @@ public interface AssetLoader { ...@@ -96,18 +96,10 @@ public interface AssetLoader {
* AssetLoader} (prior to video slow motion flattening or to decoding). * AssetLoader} (prior to video slow motion flattening or to decoding).
* @param supportedOutputTypes The output {@linkplain SupportedOutputTypes types} supported by * @param supportedOutputTypes The output {@linkplain SupportedOutputTypes types} supported by
* this {@link AssetLoader} for the track added. At least one output type must be supported. * this {@link AssetLoader} for the track added. At least one output type must be supported.
* @param streamStartPositionUs The start position of the stream (offset by {@code
* streamOffsetUs}), in microseconds.
* @param streamOffsetUs The offset that will be added to the timestamps to make sure they are
* non-negative, in microseconds.
* @return Whether the {@link AssetLoader} needs to provide decoded data to the {@link * @return Whether the {@link AssetLoader} needs to provide decoded data to the {@link
* SampleConsumer}. * SampleConsumer}.
*/ */
boolean onTrackAdded( boolean onTrackAdded(Format inputFormat, @SupportedOutputTypes int supportedOutputTypes);
Format inputFormat,
@SupportedOutputTypes int supportedOutputTypes,
long streamStartPositionUs,
long streamOffsetUs);
/** /**
* Called when the {@link Format} of samples that will be output by the {@link AssetLoader} is * Called when the {@link Format} of samples that will be output by the {@link AssetLoader} is
......
...@@ -62,7 +62,6 @@ import org.checkerframework.dataflow.qual.Pure; ...@@ -62,7 +62,6 @@ import org.checkerframework.dataflow.qual.Pure;
public AudioSamplePipeline( public AudioSamplePipeline(
Format firstAssetLoaderInputFormat, Format firstAssetLoaderInputFormat,
Format firstPipelineInputFormat, Format firstPipelineInputFormat,
long streamOffsetUs,
TransformationRequest transformationRequest, TransformationRequest transformationRequest,
boolean flattenForSlowMotion, boolean flattenForSlowMotion,
ImmutableList<AudioProcessor> audioProcessors, ImmutableList<AudioProcessor> audioProcessors,
...@@ -70,7 +69,7 @@ import org.checkerframework.dataflow.qual.Pure; ...@@ -70,7 +69,7 @@ import org.checkerframework.dataflow.qual.Pure;
MuxerWrapper muxerWrapper, MuxerWrapper muxerWrapper,
FallbackListener fallbackListener) FallbackListener fallbackListener)
throws ExportException { throws ExportException {
super(firstAssetLoaderInputFormat, /* streamStartPositionUs= */ streamOffsetUs, muxerWrapper); super(firstAssetLoaderInputFormat, muxerWrapper);
silentAudioGenerator = new SilentAudioGenerator(firstPipelineInputFormat); silentAudioGenerator = new SilentAudioGenerator(firstPipelineInputFormat);
availableInputBuffers = new ConcurrentLinkedDeque<>(); availableInputBuffers = new ConcurrentLinkedDeque<>();
...@@ -141,9 +140,6 @@ import org.checkerframework.dataflow.qual.Pure; ...@@ -141,9 +140,6 @@ import org.checkerframework.dataflow.qual.Pure;
transformationRequest, transformationRequest,
requestedEncoderFormat, requestedEncoderFormat,
/* actualFormat= */ encoder.getConfigurationFormat())); /* actualFormat= */ encoder.getConfigurationFormat()));
// Use the same stream offset as the input stream for encoder input buffers.
nextEncoderInputBufferTimeUs = streamOffsetUs;
} }
@Override @Override
......
...@@ -214,12 +214,33 @@ public interface Codec { ...@@ -214,12 +214,33 @@ public interface Codec {
* <p>This should be called after the buffer has been processed. The next output buffer will not * <p>This should be called after the buffer has been processed. The next output buffer will not
* be available until the current output buffer has been released. * be available until the current output buffer has been released.
* *
* <p>Calling this method with {@code render} set to {@code true} is equivalent to calling {@link
* #releaseOutputBuffer(long)} with the presentation timestamp of the {@link
* #getOutputBufferInfo() output buffer info}.
*
* @param render Whether the buffer needs to be rendered to the output {@link Surface}. * @param render Whether the buffer needs to be rendered to the output {@link Surface}.
* @throws ExportException If the underlying decoder or encoder encounters a problem. * @throws ExportException If the underlying decoder or encoder encounters a problem.
*/ */
void releaseOutputBuffer(boolean render) throws ExportException; void releaseOutputBuffer(boolean render) throws ExportException;
/** /**
* Renders and releases the current output buffer.
*
* <p>This method must only be called on video decoders.
*
* <p>This method will first render the buffer to the output surface. The surface will then
* release the buffer back to the {@code Codec} once it is no longer used/displayed.
*
* <p>This should be called after the buffer has been processed. The next output buffer will not
* be available until the current output buffer has been released.
*
* @param renderPresentationTimeUs The presentation timestamp to associate with this buffer, in
* microseconds.
* @throws ExportException If the underlying decoder or encoder encounters a problem.
*/
void releaseOutputBuffer(long renderPresentationTimeUs) throws ExportException;
/**
* Returns whether the {@code Codec}'s output stream has ended, and no more data can be dequeued. * Returns whether the {@code Codec}'s output stream has ended, and no more data can be dequeued.
*/ */
boolean isEnded(); boolean isEnded();
......
...@@ -251,12 +251,21 @@ public final class DefaultCodec implements Codec { ...@@ -251,12 +251,21 @@ public final class DefaultCodec implements Codec {
@Override @Override
public void releaseOutputBuffer(boolean render) throws ExportException { public void releaseOutputBuffer(boolean render) throws ExportException {
releaseOutputBuffer(render, checkStateNotNull(outputBufferInfo).presentationTimeUs);
}
@Override
public void releaseOutputBuffer(long renderPresentationTimeUs) throws ExportException {
releaseOutputBuffer(/* render= */ true, renderPresentationTimeUs);
}
private void releaseOutputBuffer(boolean render, long renderPresentationTimeUs)
throws ExportException {
outputBuffer = null; outputBuffer = null;
try { try {
if (render) { if (render) {
mediaCodec.releaseOutputBuffer( mediaCodec.releaseOutputBuffer(
outputBufferIndex, outputBufferIndex, /* renderTimestampNs= */ renderPresentationTimeUs * 1000);
/* renderTimestampNs= */ checkStateNotNull(outputBufferInfo).presentationTimeUs * 1000);
} else { } else {
mediaCodec.releaseOutputBuffer(outputBufferIndex, /* render= */ false); mediaCodec.releaseOutputBuffer(outputBufferIndex, /* render= */ false);
} }
......
...@@ -43,11 +43,10 @@ import java.util.concurrent.atomic.AtomicLong; ...@@ -43,11 +43,10 @@ import java.util.concurrent.atomic.AtomicLong;
public EncodedSamplePipeline( public EncodedSamplePipeline(
Format format, Format format,
long streamStartPositionUs,
TransformationRequest transformationRequest, TransformationRequest transformationRequest,
MuxerWrapper muxerWrapper, MuxerWrapper muxerWrapper,
FallbackListener fallbackListener) { FallbackListener fallbackListener) {
super(format, streamStartPositionUs, muxerWrapper); super(format, muxerWrapper);
this.format = format; this.format = format;
nextMediaItemOffsetUs = new AtomicLong(); nextMediaItemOffsetUs = new AtomicLong();
availableInputBuffers = new ConcurrentLinkedDeque<>(); availableInputBuffers = new ConcurrentLinkedDeque<>();
......
...@@ -51,6 +51,22 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull; ...@@ -51,6 +51,22 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull;
decoder = decoderFactory.createForAudioDecoding(inputFormat); decoder = decoderFactory.createForAudioDecoding(inputFormat);
} }
@Override
protected boolean shouldDropInputBuffer(DecoderInputBuffer inputBuffer) {
if (inputBuffer.isEndOfStream()) {
return false;
}
inputBuffer.timeUs -= streamStartPositionUs;
// Drop samples with negative timestamp in the transcoding case, to prevent encoder failures.
if (decoder != null && inputBuffer.timeUs < 0) {
inputBuffer.clear();
return true;
}
return false;
}
/** /**
* Attempts to get decoded audio data and pass it to the sample consumer. * Attempts to get decoded audio data and pass it to the sample consumer.
* *
......
...@@ -40,6 +40,7 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull; ...@@ -40,6 +40,7 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull;
/* package */ abstract class ExoAssetLoaderBaseRenderer extends BaseRenderer { /* package */ abstract class ExoAssetLoaderBaseRenderer extends BaseRenderer {
protected long streamStartPositionUs;
protected long streamOffsetUs; protected long streamOffsetUs;
protected @MonotonicNonNull SampleConsumer sampleConsumer; protected @MonotonicNonNull SampleConsumer sampleConsumer;
protected @MonotonicNonNull Codec decoder; protected @MonotonicNonNull Codec decoder;
...@@ -52,7 +53,6 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull; ...@@ -52,7 +53,6 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull;
private final DecoderInputBuffer decoderInputBuffer; private final DecoderInputBuffer decoderInputBuffer;
private boolean isRunning; private boolean isRunning;
private long streamStartPositionUs;
private boolean shouldInitDecoder; private boolean shouldInitDecoder;
private boolean hasPendingConsumerInput; private boolean hasPendingConsumerInput;
...@@ -213,10 +213,7 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull; ...@@ -213,10 +213,7 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull;
onInputFormatRead(inputFormat); onInputFormatRead(inputFormat);
shouldInitDecoder = shouldInitDecoder =
assetLoaderListener.onTrackAdded( assetLoaderListener.onTrackAdded(
inputFormat, inputFormat, SUPPORTED_OUTPUT_TYPE_DECODED | SUPPORTED_OUTPUT_TYPE_ENCODED);
SUPPORTED_OUTPUT_TYPE_DECODED | SUPPORTED_OUTPUT_TYPE_ENCODED,
streamStartPositionUs,
streamOffsetUs);
} }
if (shouldInitDecoder) { if (shouldInitDecoder) {
......
...@@ -93,22 +93,27 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull; ...@@ -93,22 +93,27 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull;
@Override @Override
protected boolean shouldDropInputBuffer(DecoderInputBuffer inputBuffer) { protected boolean shouldDropInputBuffer(DecoderInputBuffer inputBuffer) {
ByteBuffer inputBytes = checkNotNull(inputBuffer.data); if (inputBuffer.isEndOfStream()) {
if (sefVideoSlowMotionFlattener == null || inputBuffer.isEndOfStream()) {
return false; return false;
} }
long presentationTimeUs = inputBuffer.timeUs - streamOffsetUs; ByteBuffer inputBytes = checkNotNull(inputBuffer.data);
boolean shouldDropInputBuffer = if (sefVideoSlowMotionFlattener != null) {
sefVideoSlowMotionFlattener.dropOrTransformSample(inputBytes, presentationTimeUs); long presentationTimeUs = inputBuffer.timeUs - streamOffsetUs;
if (shouldDropInputBuffer) { boolean shouldDropInputBuffer =
inputBytes.clear(); sefVideoSlowMotionFlattener.dropOrTransformSample(inputBytes, presentationTimeUs);
} else { if (shouldDropInputBuffer) {
inputBytes.clear();
return true;
}
inputBuffer.timeUs = inputBuffer.timeUs =
streamOffsetUs + sefVideoSlowMotionFlattener.getSamplePresentationTimeUs(); streamOffsetUs + sefVideoSlowMotionFlattener.getSamplePresentationTimeUs();
} }
return shouldDropInputBuffer;
if (decoder == null) {
inputBuffer.timeUs -= streamStartPositionUs;
}
return false;
} }
@Override @Override
...@@ -132,7 +137,9 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull; ...@@ -132,7 +137,9 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull;
return false; return false;
} }
if (isDecodeOnlyBuffer(decoderOutputBufferInfo.presentationTimeUs)) { long presentationTimeUs = decoderOutputBufferInfo.presentationTimeUs - streamStartPositionUs;
// Drop samples with negative timestamp in the transcoding case, to prevent encoder failures.
if (presentationTimeUs < 0 || isDecodeOnlyBuffer(decoderOutputBufferInfo.presentationTimeUs)) {
decoder.releaseOutputBuffer(/* render= */ false); decoder.releaseOutputBuffer(/* render= */ false);
return true; return true;
} }
...@@ -142,11 +149,11 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull; ...@@ -142,11 +149,11 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull;
return false; return false;
} }
if (!sampleConsumer.registerVideoFrame(decoderOutputBufferInfo.presentationTimeUs)) { if (!sampleConsumer.registerVideoFrame(presentationTimeUs)) {
return false; return false;
} }
decoder.releaseOutputBuffer(/* render= */ true); decoder.releaseOutputBuffer(presentationTimeUs);
return true; return true;
} }
......
...@@ -114,11 +114,7 @@ public final class ImageAssetLoader implements AssetLoader { ...@@ -114,11 +114,7 @@ public final class ImageAssetLoader implements AssetLoader {
.setWidth(bitmap.getWidth()) .setWidth(bitmap.getWidth())
.setSampleMimeType(MIME_TYPE_IMAGE_ALL) .setSampleMimeType(MIME_TYPE_IMAGE_ALL)
.build(); .build();
listener.onTrackAdded( listener.onTrackAdded(format, SUPPORTED_OUTPUT_TYPE_DECODED);
format,
SUPPORTED_OUTPUT_TYPE_DECODED,
/* streamStartPositionUs= */ 0,
/* streamOffsetUs= */ 0);
scheduledExecutorService.submit(() -> queueBitmapInternal(bitmap, format)); scheduledExecutorService.submit(() -> queueBitmapInternal(bitmap, format));
} catch (RuntimeException e) { } catch (RuntimeException e) {
listener.onError(ExportException.createForAssetLoader(e, ERROR_CODE_UNSPECIFIED)); listener.onError(ExportException.createForAssetLoader(e, ERROR_CODE_UNSPECIFIED));
......
...@@ -47,16 +47,13 @@ import java.util.List; ...@@ -47,16 +47,13 @@ import java.util.List;
*/ */
/* package */ abstract class SamplePipeline implements SampleConsumer, OnMediaItemChangedListener { /* package */ abstract class SamplePipeline implements SampleConsumer, OnMediaItemChangedListener {
private final long streamStartPositionUs;
private final MuxerWrapper muxerWrapper; private final MuxerWrapper muxerWrapper;
private final @C.TrackType int outputTrackType; private final @C.TrackType int outputTrackType;
@Nullable private final Metadata metadata; @Nullable private final Metadata metadata;
private boolean muxerWrapperTrackAdded; private boolean muxerWrapperTrackAdded;
public SamplePipeline( public SamplePipeline(Format firstInputFormat, MuxerWrapper muxerWrapper) {
Format firstInputFormat, long streamStartPositionUs, MuxerWrapper muxerWrapper) {
this.streamStartPositionUs = streamStartPositionUs;
this.muxerWrapper = muxerWrapper; this.muxerWrapper = muxerWrapper;
this.metadata = firstInputFormat.metadata; this.metadata = firstInputFormat.metadata;
outputTrackType = getProcessedTrackType(firstInputFormat.sampleMimeType); outputTrackType = getProcessedTrackType(firstInputFormat.sampleMimeType);
...@@ -118,15 +115,12 @@ import java.util.List; ...@@ -118,15 +115,12 @@ import java.util.List;
return false; return false;
} }
long samplePresentationTimeUs = muxerInputBuffer.timeUs - streamStartPositionUs;
// TODO(b/204892224): Consider subtracting the first sample timestamp from the sample pipeline
// buffer from all samples so that they are guaranteed to start from zero in the output file.
try { try {
if (!muxerWrapper.writeSample( if (!muxerWrapper.writeSample(
outputTrackType, outputTrackType,
checkStateNotNull(muxerInputBuffer.data), checkStateNotNull(muxerInputBuffer.data),
muxerInputBuffer.isKeyFrame(), muxerInputBuffer.isKeyFrame(),
samplePresentationTimeUs)) { muxerInputBuffer.timeUs)) {
return false; return false;
} }
} catch (Muxer.MuxerException e) { } catch (Muxer.MuxerException e) {
......
...@@ -83,7 +83,6 @@ import java.util.concurrent.atomic.AtomicInteger; ...@@ -83,7 +83,6 @@ import java.util.concurrent.atomic.AtomicInteger;
private int currentMediaItemIndex; private int currentMediaItemIndex;
private AssetLoader currentAssetLoader; private AssetLoader currentAssetLoader;
private boolean trackCountReported; private boolean trackCountReported;
private long currentAssetStartTimeUs;
private boolean decodeAudio; private boolean decodeAudio;
private boolean decodeVideo; private boolean decodeVideo;
private long totalDurationUs; private long totalDurationUs;
...@@ -207,13 +206,7 @@ import java.util.concurrent.atomic.AtomicInteger; ...@@ -207,13 +206,7 @@ import java.util.concurrent.atomic.AtomicInteger;
} }
@Override @Override
public boolean onTrackAdded( public boolean onTrackAdded(Format inputFormat, @SupportedOutputTypes int supportedOutputTypes) {
Format inputFormat,
@SupportedOutputTypes int supportedOutputTypes,
long streamStartPositionUs,
long streamOffsetUs) {
currentAssetStartTimeUs = streamStartPositionUs;
boolean isAudio = getProcessedTrackType(inputFormat.sampleMimeType) == C.TRACK_TYPE_AUDIO; boolean isAudio = getProcessedTrackType(inputFormat.sampleMimeType) == C.TRACK_TYPE_AUDIO;
if (!isCurrentAssetFirstAsset) { if (!isCurrentAssetFirstAsset) {
return isAudio ? decodeAudio : decodeVideo; return isAudio ? decodeAudio : decodeVideo;
...@@ -228,8 +221,7 @@ import java.util.concurrent.atomic.AtomicInteger; ...@@ -228,8 +221,7 @@ import java.util.concurrent.atomic.AtomicInteger;
} }
boolean decodeOutput = boolean decodeOutput =
sequenceAssetLoaderListener.onTrackAdded( sequenceAssetLoaderListener.onTrackAdded(inputFormat, supportedOutputTypes);
inputFormat, supportedOutputTypes, streamStartPositionUs, streamOffsetUs);
if (isAudio) { if (isAudio) {
decodeAudio = decodeOutput; decodeAudio = decodeOutput;
...@@ -239,10 +231,7 @@ import java.util.concurrent.atomic.AtomicInteger; ...@@ -239,10 +231,7 @@ import java.util.concurrent.atomic.AtomicInteger;
if (addForcedAudioTrack) { if (addForcedAudioTrack) {
sequenceAssetLoaderListener.onTrackAdded( sequenceAssetLoaderListener.onTrackAdded(
FORCE_AUDIO_TRACK_FORMAT, FORCE_AUDIO_TRACK_FORMAT, SUPPORTED_OUTPUT_TYPE_DECODED);
SUPPORTED_OUTPUT_TYPE_DECODED,
streamStartPositionUs,
streamOffsetUs);
} }
return decodeOutput; return decodeOutput;
...@@ -374,7 +363,7 @@ import java.util.concurrent.atomic.AtomicInteger; ...@@ -374,7 +363,7 @@ import java.util.concurrent.atomic.AtomicInteger;
@Override @Override
public boolean queueInputBuffer() { public boolean queueInputBuffer() {
DecoderInputBuffer inputBuffer = checkStateNotNull(sampleConsumer.getInputBuffer()); DecoderInputBuffer inputBuffer = checkStateNotNull(sampleConsumer.getInputBuffer());
long globalTimestampUs = totalDurationUs + inputBuffer.timeUs - currentAssetStartTimeUs; long globalTimestampUs = totalDurationUs + inputBuffer.timeUs;
if (isLooping && globalTimestampUs >= maxSequenceDurationUs) { if (isLooping && globalTimestampUs >= maxSequenceDurationUs) {
if (isMaxSequenceDurationUsFinal && !audioLoopingEnded) { if (isMaxSequenceDurationUsFinal && !audioLoopingEnded) {
checkNotNull(inputBuffer.data).limit(0); checkNotNull(inputBuffer.data).limit(0);
...@@ -450,7 +439,7 @@ import java.util.concurrent.atomic.AtomicInteger; ...@@ -450,7 +439,7 @@ import java.util.concurrent.atomic.AtomicInteger;
@Override @Override
public boolean registerVideoFrame(long presentationTimeUs) { public boolean registerVideoFrame(long presentationTimeUs) {
long globalTimestampUs = totalDurationUs + presentationTimeUs - currentAssetStartTimeUs; long globalTimestampUs = totalDurationUs + presentationTimeUs;
if (isLooping && globalTimestampUs >= maxSequenceDurationUs) { if (isLooping && globalTimestampUs >= maxSequenceDurationUs) {
if (isMaxSequenceDurationUsFinal && !videoLoopingEnded) { if (isMaxSequenceDurationUsFinal && !videoLoopingEnded) {
videoLoopingEnded = true; videoLoopingEnded = true;
......
...@@ -481,17 +481,11 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull; ...@@ -481,17 +481,11 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
@Override @Override
public boolean onTrackAdded( public boolean onTrackAdded(
Format firstAssetLoaderInputFormat, Format firstAssetLoaderInputFormat,
@AssetLoader.SupportedOutputTypes int supportedOutputTypes, @AssetLoader.SupportedOutputTypes int supportedOutputTypes) {
long streamStartPositionUs,
long streamOffsetUs) {
@C.TrackType @C.TrackType
int trackType = getProcessedTrackType(firstAssetLoaderInputFormat.sampleMimeType); int trackType = getProcessedTrackType(firstAssetLoaderInputFormat.sampleMimeType);
AddedTrackInfo trackInfo = AddedTrackInfo trackInfo =
new AddedTrackInfo( new AddedTrackInfo(firstAssetLoaderInputFormat, supportedOutputTypes);
firstAssetLoaderInputFormat,
supportedOutputTypes,
streamStartPositionUs,
streamOffsetUs);
addedTrackInfoByTrackType.put(trackType, trackInfo); addedTrackInfoByTrackType.put(trackType, trackInfo);
if (trackType == C.TRACK_TYPE_AUDIO) { if (trackType == C.TRACK_TYPE_AUDIO) {
...@@ -547,7 +541,6 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull; ...@@ -547,7 +541,6 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
return new AudioSamplePipeline( return new AudioSamplePipeline(
addedTrackInfo.firstAssetLoaderInputFormat, addedTrackInfo.firstAssetLoaderInputFormat,
/* firstPipelineInputFormat= */ firstAssetLoaderOutputFormat, /* firstPipelineInputFormat= */ firstAssetLoaderOutputFormat,
addedTrackInfo.streamOffsetUs,
transformationRequest, transformationRequest,
firstEditedMediaItem.flattenForSlowMotion, firstEditedMediaItem.flattenForSlowMotion,
firstEditedMediaItem.effects.audioProcessors, firstEditedMediaItem.effects.audioProcessors,
...@@ -566,8 +559,6 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull; ...@@ -566,8 +559,6 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
return new VideoSamplePipeline( return new VideoSamplePipeline(
context, context,
addedTrackInfo.firstAssetLoaderInputFormat, addedTrackInfo.firstAssetLoaderInputFormat,
addedTrackInfo.streamStartPositionUs,
addedTrackInfo.streamOffsetUs,
transformationRequest, transformationRequest,
firstEditedMediaItem.effects.videoEffects, firstEditedMediaItem.effects.videoEffects,
compositionPresentation, compositionPresentation,
...@@ -582,11 +573,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull; ...@@ -582,11 +573,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
} }
return new EncodedSamplePipeline( return new EncodedSamplePipeline(
firstAssetLoaderOutputFormat, firstAssetLoaderOutputFormat, transformationRequest, muxerWrapper, fallbackListener);
addedTrackInfo.streamStartPositionUs,
transformationRequest,
muxerWrapper,
fallbackListener);
} }
/** /**
...@@ -631,31 +618,17 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull; ...@@ -631,31 +618,17 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
private final class AddedTrackInfo { private final class AddedTrackInfo {
public final Format firstAssetLoaderInputFormat; public final Format firstAssetLoaderInputFormat;
public final long streamStartPositionUs;
public final long streamOffsetUs;
public final boolean shouldTranscode; public final boolean shouldTranscode;
public AddedTrackInfo( public AddedTrackInfo(
Format firstAssetLoaderInputFormat, Format firstAssetLoaderInputFormat,
@AssetLoader.SupportedOutputTypes int supportedOutputTypes, @AssetLoader.SupportedOutputTypes int supportedOutputTypes) {
long streamStartPositionUs,
long streamOffsetUs) {
this.firstAssetLoaderInputFormat = firstAssetLoaderInputFormat; this.firstAssetLoaderInputFormat = firstAssetLoaderInputFormat;
this.streamStartPositionUs = streamStartPositionUs; shouldTranscode = shouldTranscode(firstAssetLoaderInputFormat, supportedOutputTypes);
this.streamOffsetUs = streamOffsetUs;
shouldTranscode =
shouldTranscode(
firstAssetLoaderInputFormat,
supportedOutputTypes,
streamStartPositionUs,
streamOffsetUs);
} }
private boolean shouldTranscode( private boolean shouldTranscode(
Format inputFormat, Format inputFormat, @AssetLoader.SupportedOutputTypes int supportedOutputTypes) {
@AssetLoader.SupportedOutputTypes int supportedOutputTypes,
long streamStartPositionUs,
long streamOffsetUs) {
boolean assetLoaderCanOutputDecoded = boolean assetLoaderCanOutputDecoded =
(supportedOutputTypes & SUPPORTED_OUTPUT_TYPE_DECODED) != 0; (supportedOutputTypes & SUPPORTED_OUTPUT_TYPE_DECODED) != 0;
boolean assetLoaderCanOutputEncoded = boolean assetLoaderCanOutputEncoded =
...@@ -670,8 +643,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull; ...@@ -670,8 +643,7 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
} else if (trackType == C.TRACK_TYPE_AUDIO) { } else if (trackType == C.TRACK_TYPE_AUDIO) {
shouldTranscode = shouldTranscodeAudio(inputFormat); shouldTranscode = shouldTranscodeAudio(inputFormat);
} else if (trackType == C.TRACK_TYPE_VIDEO) { } else if (trackType == C.TRACK_TYPE_VIDEO) {
shouldTranscode = shouldTranscode = shouldTranscodeVideo(inputFormat);
shouldTranscodeVideo(inputFormat, streamStartPositionUs, streamOffsetUs);
} }
checkState(!shouldTranscode || assetLoaderCanOutputDecoded); checkState(!shouldTranscode || assetLoaderCanOutputDecoded);
...@@ -704,13 +676,12 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull; ...@@ -704,13 +676,12 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
return false; return false;
} }
private boolean shouldTranscodeVideo( private boolean shouldTranscodeVideo(Format inputFormat) {
Format inputFormat, long streamStartPositionUs, long streamOffsetUs) {
if (composition.sequences.size() > 1 || editedMediaItems.size() > 1) { if (composition.sequences.size() > 1 || editedMediaItems.size() > 1) {
return !composition.transmuxVideo; return !composition.transmuxVideo;
} }
EditedMediaItem firstEditedMediaItem = editedMediaItems.get(0); EditedMediaItem firstEditedMediaItem = editedMediaItems.get(0);
if ((streamStartPositionUs - streamOffsetUs) != 0 if (firstEditedMediaItem.mediaItem.clippingConfiguration.startPositionMs > 0
&& !firstEditedMediaItem.mediaItem.clippingConfiguration.startsAtKeyFrame) { && !firstEditedMediaItem.mediaItem.clippingConfiguration.startsAtKeyFrame) {
return true; return true;
} }
......
...@@ -61,7 +61,6 @@ import org.checkerframework.dataflow.qual.Pure; ...@@ -61,7 +61,6 @@ import org.checkerframework.dataflow.qual.Pure;
/** MIME type to use for output video if the input type is not a video. */ /** MIME type to use for output video if the input type is not a video. */
private static final String DEFAULT_OUTPUT_MIME_TYPE = MimeTypes.VIDEO_H265; private static final String DEFAULT_OUTPUT_MIME_TYPE = MimeTypes.VIDEO_H265;
private final long streamOffsetUs;
private final AtomicLong mediaItemOffsetUs; private final AtomicLong mediaItemOffsetUs;
private final VideoFrameProcessor videoFrameProcessor; private final VideoFrameProcessor videoFrameProcessor;
private final ColorInfo videoFrameProcessorInputColor; private final ColorInfo videoFrameProcessorInputColor;
...@@ -77,8 +76,6 @@ import org.checkerframework.dataflow.qual.Pure; ...@@ -77,8 +76,6 @@ import org.checkerframework.dataflow.qual.Pure;
public VideoSamplePipeline( public VideoSamplePipeline(
Context context, Context context,
Format firstInputFormat, Format firstInputFormat,
long streamStartPositionUs,
long streamOffsetUs,
TransformationRequest transformationRequest, TransformationRequest transformationRequest,
ImmutableList<Effect> effects, ImmutableList<Effect> effects,
@Nullable Presentation presentation, @Nullable Presentation presentation,
...@@ -91,8 +88,7 @@ import org.checkerframework.dataflow.qual.Pure; ...@@ -91,8 +88,7 @@ import org.checkerframework.dataflow.qual.Pure;
DebugViewProvider debugViewProvider) DebugViewProvider debugViewProvider)
throws ExportException { throws ExportException {
// TODO(b/262693177) Add tests for input format change. // TODO(b/262693177) Add tests for input format change.
super(firstInputFormat, streamStartPositionUs, muxerWrapper); super(firstInputFormat, muxerWrapper);
this.streamOffsetUs = streamOffsetUs;
mediaItemOffsetUs = new AtomicLong(); mediaItemOffsetUs = new AtomicLong();
finalFramePresentationTimeUs = C.TIME_UNSET; finalFramePresentationTimeUs = C.TIME_UNSET;
...@@ -199,7 +195,6 @@ import org.checkerframework.dataflow.qual.Pure; ...@@ -199,7 +195,6 @@ import org.checkerframework.dataflow.qual.Pure;
new FrameInfo.Builder(decodedSize.getWidth(), decodedSize.getHeight()) new FrameInfo.Builder(decodedSize.getWidth(), decodedSize.getHeight())
.setPixelWidthHeightRatio(trackFormat.pixelWidthHeightRatio) .setPixelWidthHeightRatio(trackFormat.pixelWidthHeightRatio)
.setOffsetToAddUs(mediaItemOffsetUs.get()) .setOffsetToAddUs(mediaItemOffsetUs.get())
.setStreamOffsetUs(streamOffsetUs)
.build()); .build());
} }
mediaItemOffsetUs.addAndGet(durationUs); mediaItemOffsetUs.addAndGet(durationUs);
......
...@@ -75,10 +75,7 @@ public class ExoPlayerAssetLoaderTest { ...@@ -75,10 +75,7 @@ public class ExoPlayerAssetLoaderTest {
@Override @Override
public boolean onTrackAdded( public boolean onTrackAdded(
Format inputFormat, Format inputFormat, @AssetLoader.SupportedOutputTypes int supportedOutputTypes) {
@AssetLoader.SupportedOutputTypes int supportedOutputTypes,
long streamStartPositionUs,
long streamOffsetUs) {
if (!isDurationSet) { if (!isDurationSet) {
exceptionRef.set( exceptionRef.set(
new IllegalStateException("onTrackAdded() called before onDurationUs()")); new IllegalStateException("onTrackAdded() called before onDurationUs()"));
......
...@@ -67,10 +67,7 @@ public class ImageAssetLoaderTest { ...@@ -67,10 +67,7 @@ public class ImageAssetLoaderTest {
@Override @Override
public boolean onTrackAdded( public boolean onTrackAdded(
Format inputFormat, Format inputFormat, @AssetLoader.SupportedOutputTypes int supportedOutputTypes) {
@AssetLoader.SupportedOutputTypes int supportedOutputTypes,
long streamStartPositionUs,
long streamOffsetUs) {
if (!isDurationSet) { if (!isDurationSet) {
exceptionRef.set( exceptionRef.set(
new IllegalStateException("onTrackAdded() called before onDurationUs()")); new IllegalStateException("onTrackAdded() called before onDurationUs()"));
......
...@@ -115,8 +115,7 @@ public final class TestUtil { ...@@ -115,8 +115,7 @@ public final class TestUtil {
.setChannelCount(2) .setChannelCount(2)
.build(); .build();
try { try {
listener.onTrackAdded( listener.onTrackAdded(format, supportedOutputTypes);
format, supportedOutputTypes, /* streamStartPositionUs= */ 0, /* streamOffsetUs= */ 0);
SampleConsumer sampleConsumer = listener.onOutputFormat(format); SampleConsumer sampleConsumer = listener.onOutputFormat(format);
if (sampleConsumerRef != null) { if (sampleConsumerRef != null) {
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or sign in to comment