Commit 88d7b14b by hschlueter Committed by kim-vde

Do not queue empty input buffers.

Follow-up to a comment on https://github.com/google/ExoPlayer/commit/6f0f7dd1be40c81cb056d82215ceabdd5a8a2a1a: Buffers that are useful to pass
to the sample/passthrough pipeline should either contain data or the
end of input flag. Otherwise, passing these buffers along is unnecessary
and may even cause the decoder to allocate a new input buffer which is
wasteful.

PiperOrigin-RevId: 411060709
parent 4a69e166
...@@ -122,7 +122,7 @@ import java.nio.ByteBuffer; ...@@ -122,7 +122,7 @@ import java.nio.ByteBuffer;
* Attempts to write a sample to the muxer. * Attempts to write a sample to the muxer.
* *
* @param trackType The {@link C.TrackType track type} of the sample. * @param trackType The {@link C.TrackType track type} of the sample.
* @param data The sample to write, or {@code null} if the sample is empty. * @param data The sample to write.
* @param isKeyFrame Whether the sample is a key frame. * @param isKeyFrame Whether the sample is a key frame.
* @param presentationTimeUs The presentation time of the sample in microseconds. * @param presentationTimeUs The presentation time of the sample in microseconds.
* @return Whether the sample was successfully written. This is {@code false} if the muxer hasn't * @return Whether the sample was successfully written. This is {@code false} if the muxer hasn't
...@@ -133,10 +133,7 @@ import java.nio.ByteBuffer; ...@@ -133,10 +133,7 @@ import java.nio.ByteBuffer;
* track of the given track type. * track of the given track type.
*/ */
public boolean writeSample( public boolean writeSample(
@C.TrackType int trackType, @C.TrackType int trackType, ByteBuffer data, boolean isKeyFrame, long presentationTimeUs) {
@Nullable ByteBuffer data,
boolean isKeyFrame,
long presentationTimeUs) {
int trackIndex = trackTypeToIndex.get(trackType, /* valueIfKeyNotFound= */ C.INDEX_UNSET); int trackIndex = trackTypeToIndex.get(trackType, /* valueIfKeyNotFound= */ C.INDEX_UNSET);
checkState( checkState(
trackIndex != C.INDEX_UNSET, trackIndex != C.INDEX_UNSET,
...@@ -144,8 +141,6 @@ import java.nio.ByteBuffer; ...@@ -144,8 +141,6 @@ import java.nio.ByteBuffer;
if (!canWriteSampleOfType(trackType)) { if (!canWriteSampleOfType(trackType)) {
return false; return false;
} else if (data == null) {
return true;
} }
muxer.writeSampleData(trackIndex, data, isKeyFrame, presentationTimeUs); muxer.writeSampleData(trackIndex, data, isKeyFrame, presentationTimeUs);
......
/*
* Copyright 2020 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer2.transformer;
import com.google.android.exoplayer2.decoder.DecoderInputBuffer;
/** A sample transformer for a given track. */
/* package */ interface SampleTransformer {
/**
* Transforms the data and metadata of the sample contained in {@code buffer}.
*
* @param buffer The sample to transform. If the sample {@link DecoderInputBuffer#data data} is
* {@code null} after the execution of this method, the sample must be discarded.
*/
void transformSample(DecoderInputBuffer buffer);
}
...@@ -19,7 +19,6 @@ package com.google.android.exoplayer2.transformer; ...@@ -19,7 +19,6 @@ package com.google.android.exoplayer2.transformer;
import static com.google.android.exoplayer2.util.Assertions.checkArgument; import static com.google.android.exoplayer2.util.Assertions.checkArgument;
import static com.google.android.exoplayer2.util.Assertions.checkState; import static com.google.android.exoplayer2.util.Assertions.checkState;
import static com.google.android.exoplayer2.util.NalUnitUtil.NAL_START_CODE; import static com.google.android.exoplayer2.util.NalUnitUtil.NAL_START_CODE;
import static com.google.android.exoplayer2.util.Util.castNonNull;
import static java.lang.Math.min; import static java.lang.Math.min;
import androidx.annotation.Nullable; import androidx.annotation.Nullable;
...@@ -40,7 +39,7 @@ import java.util.List; ...@@ -40,7 +39,7 @@ import java.util.List;
import org.checkerframework.checker.nullness.qual.RequiresNonNull; import org.checkerframework.checker.nullness.qual.RequiresNonNull;
/** /**
* {@link SampleTransformer} that flattens SEF slow motion video samples. * Sample transformer that flattens SEF slow motion video samples.
* *
* <p>Such samples follow the ITU-T Recommendation H.264 with temporal SVC. * <p>Such samples follow the ITU-T Recommendation H.264 with temporal SVC.
* *
...@@ -50,7 +49,7 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull; ...@@ -50,7 +49,7 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull;
* <p>The mathematical formulas used in this class are explained in [Internal ref: * <p>The mathematical formulas used in this class are explained in [Internal ref:
* http://go/exoplayer-sef-slomo-video-flattening]. * http://go/exoplayer-sef-slomo-video-flattening].
*/ */
/* package */ final class SefSlowMotionVideoSampleTransformer implements SampleTransformer { /* package */ final class SefSlowMotionFlattener {
/** /**
* The frame rate of SEF slow motion videos, in fps. * The frame rate of SEF slow motion videos, in fps.
...@@ -109,7 +108,7 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull; ...@@ -109,7 +108,7 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull;
*/ */
private long frameTimeDeltaUs; private long frameTimeDeltaUs;
public SefSlowMotionVideoSampleTransformer(Format format) { public SefSlowMotionFlattener(Format format) {
scratch = new byte[NAL_START_CODE_LENGTH]; scratch = new byte[NAL_START_CODE_LENGTH];
MetadataInfo metadataInfo = getMetadataInfo(format.metadata); MetadataInfo metadataInfo = getMetadataInfo(format.metadata);
slowMotionData = metadataInfo.slowMotionData; slowMotionData = metadataInfo.slowMotionData;
...@@ -130,14 +129,20 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull; ...@@ -130,14 +129,20 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull;
} }
} }
@Override /**
public void transformSample(DecoderInputBuffer buffer) { * Applies slow motion flattening by either indicating that the buffer's data should be dropped or
* transforming it in place.
*
* @return Whether the buffer should be dropped.
*/
@RequiresNonNull("#1.data")
public boolean dropOrTransformSample(DecoderInputBuffer buffer) {
if (slowMotionData == null) { if (slowMotionData == null) {
// The input is not an SEF slow motion video. // The input is not an SEF slow motion video.
return; return false;
} }
ByteBuffer data = castNonNull(buffer.data); ByteBuffer data = buffer.data;
int originalPosition = data.position(); int originalPosition = data.position();
data.position(originalPosition + NAL_START_CODE_LENGTH); data.position(originalPosition + NAL_START_CODE_LENGTH);
data.get(scratch, 0, 4); // Read nal_unit_header_svc_extension. data.get(scratch, 0, 4); // Read nal_unit_header_svc_extension.
...@@ -148,14 +153,14 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull; ...@@ -148,14 +153,14 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull;
"Missing SVC extension prefix NAL unit."); "Missing SVC extension prefix NAL unit.");
int layer = (scratch[3] & 0xFF) >> 5; int layer = (scratch[3] & 0xFF) >> 5;
boolean shouldKeepFrame = processCurrentFrame(layer, buffer.timeUs); boolean shouldKeepFrame = processCurrentFrame(layer, buffer.timeUs);
// Update buffer timestamp regardless of whether the frame is dropped because the buffer might // Update the timestamp regardless of whether the buffer is dropped as the timestamp may be
// still be passed to a decoder if it contains an end of stream flag. // reused for the empty end-of-stream buffer.
buffer.timeUs = getCurrentFrameOutputTimeUs(/* inputTimeUs= */ buffer.timeUs); buffer.timeUs = getCurrentFrameOutputTimeUs(/* inputTimeUs= */ buffer.timeUs);
if (shouldKeepFrame) { if (shouldKeepFrame) {
skipToNextNalUnit(data); // Skip over prefix_nal_unit_svc. skipToNextNalUnit(data); // Skip over prefix_nal_unit_svc.
} else { return false;
buffer.data = null;
} }
return true;
} }
/** /**
......
...@@ -18,6 +18,7 @@ package com.google.android.exoplayer2.transformer; ...@@ -18,6 +18,7 @@ package com.google.android.exoplayer2.transformer;
import static com.google.android.exoplayer2.source.SampleStream.FLAG_REQUIRE_FORMAT; import static com.google.android.exoplayer2.source.SampleStream.FLAG_REQUIRE_FORMAT;
import static com.google.android.exoplayer2.util.Assertions.checkNotNull; import static com.google.android.exoplayer2.util.Assertions.checkNotNull;
import static com.google.android.exoplayer2.util.Assertions.checkStateNotNull;
import androidx.annotation.Nullable; import androidx.annotation.Nullable;
import androidx.annotation.RequiresApi; import androidx.annotation.RequiresApi;
...@@ -127,7 +128,7 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull; ...@@ -127,7 +128,7 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull;
} }
if (!muxerWrapper.writeSample( if (!muxerWrapper.writeSample(
getTrackType(), getTrackType(),
samplePipelineOutputBuffer.data, checkStateNotNull(samplePipelineOutputBuffer.data),
/* isKeyFrame= */ true, /* isKeyFrame= */ true,
samplePipelineOutputBuffer.timeUs)) { samplePipelineOutputBuffer.timeUs)) {
return false; return false;
...@@ -152,11 +153,15 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull; ...@@ -152,11 +153,15 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull;
int result = readSource(getFormatHolder(), samplePipelineInputBuffer, /* readFlags= */ 0); int result = readSource(getFormatHolder(), samplePipelineInputBuffer, /* readFlags= */ 0);
switch (result) { switch (result) {
case C.RESULT_BUFFER_READ: case C.RESULT_BUFFER_READ:
if (samplePipelineInputBuffer.isEndOfStream()) {
samplePipeline.queueInputBuffer();
return false;
}
mediaClock.updateTimeForTrackType(getTrackType(), samplePipelineInputBuffer.timeUs); mediaClock.updateTimeForTrackType(getTrackType(), samplePipelineInputBuffer.timeUs);
samplePipelineInputBuffer.timeUs -= streamOffsetUs; samplePipelineInputBuffer.timeUs -= streamOffsetUs;
samplePipelineInputBuffer.flip(); samplePipelineInputBuffer.flip();
samplePipeline.queueInputBuffer(); samplePipeline.queueInputBuffer();
return !samplePipelineInputBuffer.isEndOfStream(); return true;
case C.RESULT_FORMAT_READ: case C.RESULT_FORMAT_READ:
throw new IllegalStateException("Format changes are not supported."); throw new IllegalStateException("Format changes are not supported.");
case C.RESULT_NOTHING_READ: case C.RESULT_NOTHING_READ:
......
...@@ -18,6 +18,7 @@ package com.google.android.exoplayer2.transformer; ...@@ -18,6 +18,7 @@ package com.google.android.exoplayer2.transformer;
import static com.google.android.exoplayer2.source.SampleStream.FLAG_REQUIRE_FORMAT; import static com.google.android.exoplayer2.source.SampleStream.FLAG_REQUIRE_FORMAT;
import static com.google.android.exoplayer2.util.Assertions.checkNotNull; import static com.google.android.exoplayer2.util.Assertions.checkNotNull;
import static com.google.android.exoplayer2.util.Assertions.checkStateNotNull;
import android.content.Context; import android.content.Context;
import androidx.annotation.Nullable; import androidx.annotation.Nullable;
...@@ -28,6 +29,7 @@ import com.google.android.exoplayer2.Format; ...@@ -28,6 +29,7 @@ import com.google.android.exoplayer2.Format;
import com.google.android.exoplayer2.FormatHolder; import com.google.android.exoplayer2.FormatHolder;
import com.google.android.exoplayer2.decoder.DecoderInputBuffer; import com.google.android.exoplayer2.decoder.DecoderInputBuffer;
import com.google.android.exoplayer2.source.SampleStream.ReadDataResult; import com.google.android.exoplayer2.source.SampleStream.ReadDataResult;
import java.nio.ByteBuffer;
import org.checkerframework.checker.nullness.qual.EnsuresNonNullIf; import org.checkerframework.checker.nullness.qual.EnsuresNonNullIf;
import org.checkerframework.checker.nullness.qual.MonotonicNonNull; import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
import org.checkerframework.checker.nullness.qual.RequiresNonNull; import org.checkerframework.checker.nullness.qual.RequiresNonNull;
...@@ -40,7 +42,7 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull; ...@@ -40,7 +42,7 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull;
private final Context context; private final Context context;
private final DecoderInputBuffer decoderInputBuffer; private final DecoderInputBuffer decoderInputBuffer;
private @MonotonicNonNull SampleTransformer slowMotionSampleTransformer; private @MonotonicNonNull SefSlowMotionFlattener sefSlowMotionFlattener;
private @MonotonicNonNull SamplePipeline samplePipeline; private @MonotonicNonNull SamplePipeline samplePipeline;
private boolean muxerWrapperTrackAdded; private boolean muxerWrapperTrackAdded;
private boolean muxerWrapperTrackEnded; private boolean muxerWrapperTrackEnded;
...@@ -107,7 +109,7 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull; ...@@ -107,7 +109,7 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull;
samplePipeline = new PassthroughSamplePipeline(decoderInputFormat); samplePipeline = new PassthroughSamplePipeline(decoderInputFormat);
} }
if (transformation.flattenForSlowMotion) { if (transformation.flattenForSlowMotion) {
slowMotionSampleTransformer = new SefSlowMotionVideoSampleTransformer(decoderInputFormat); sefSlowMotionFlattener = new SefSlowMotionFlattener(decoderInputFormat);
} }
return true; return true;
} }
...@@ -141,7 +143,7 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull; ...@@ -141,7 +143,7 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull;
if (!muxerWrapper.writeSample( if (!muxerWrapper.writeSample(
getTrackType(), getTrackType(),
samplePipelineOutputBuffer.data, checkStateNotNull(samplePipelineOutputBuffer.data),
samplePipelineOutputBuffer.isKeyFrame(), samplePipelineOutputBuffer.isKeyFrame(),
samplePipelineOutputBuffer.timeUs)) { samplePipelineOutputBuffer.timeUs)) {
return false; return false;
...@@ -172,17 +174,24 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull; ...@@ -172,17 +174,24 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull;
int result = readSource(getFormatHolder(), samplePipelineInputBuffer, /* readFlags= */ 0); int result = readSource(getFormatHolder(), samplePipelineInputBuffer, /* readFlags= */ 0);
switch (result) { switch (result) {
case C.RESULT_BUFFER_READ: case C.RESULT_BUFFER_READ:
if (samplePipelineInputBuffer.data != null if (samplePipelineInputBuffer.isEndOfStream()) {
&& samplePipelineInputBuffer.data.position() > 0) { samplePipeline.queueInputBuffer();
mediaClock.updateTimeForTrackType(getTrackType(), samplePipelineInputBuffer.timeUs); return false;
samplePipelineInputBuffer.timeUs -= streamOffsetUs; }
samplePipelineInputBuffer.flip(); mediaClock.updateTimeForTrackType(getTrackType(), samplePipelineInputBuffer.timeUs);
if (slowMotionSampleTransformer != null) { samplePipelineInputBuffer.timeUs -= streamOffsetUs;
slowMotionSampleTransformer.transformSample(samplePipelineInputBuffer); samplePipelineInputBuffer.flip();
if (sefSlowMotionFlattener != null) {
ByteBuffer data = checkStateNotNull(samplePipelineInputBuffer.data);
boolean shouldDropSample =
sefSlowMotionFlattener.dropOrTransformSample(samplePipelineInputBuffer);
if (shouldDropSample) {
data.clear();
return true;
} }
} }
samplePipeline.queueInputBuffer(); samplePipeline.queueInputBuffer();
return !samplePipelineInputBuffer.isEndOfStream(); return true;
case C.RESULT_FORMAT_READ: case C.RESULT_FORMAT_READ:
throw new IllegalStateException("Format changes are not supported."); throw new IllegalStateException("Format changes are not supported.");
case C.RESULT_NOTHING_READ: case C.RESULT_NOTHING_READ:
......
...@@ -16,7 +16,7 @@ ...@@ -16,7 +16,7 @@
package com.google.android.exoplayer2.transformer; package com.google.android.exoplayer2.transformer;
import static com.google.android.exoplayer2.transformer.SefSlowMotionVideoSampleTransformer.INPUT_FRAME_RATE; import static com.google.android.exoplayer2.transformer.SefSlowMotionFlattener.INPUT_FRAME_RATE;
import static com.google.common.truth.Truth.assertThat; import static com.google.common.truth.Truth.assertThat;
import androidx.test.ext.junit.runners.AndroidJUnit4; import androidx.test.ext.junit.runners.AndroidJUnit4;
...@@ -32,9 +32,9 @@ import java.util.List; ...@@ -32,9 +32,9 @@ import java.util.List;
import org.junit.Test; import org.junit.Test;
import org.junit.runner.RunWith; import org.junit.runner.RunWith;
/** Unit tests for {@link SefSlowMotionVideoSampleTransformer}. */ /** Unit tests for {@link SefSlowMotionFlattener}. */
@RunWith(AndroidJUnit4.class) @RunWith(AndroidJUnit4.class)
public class SefSlowMotionVideoSampleTransformerTest { public class SefSlowMotionFlattenerTest {
/** /**
* Sequence of temporal SVC layers in an SEF slow motion video track with a maximum layer of 3. * Sequence of temporal SVC layers in an SEF slow motion video track with a maximum layer of 3.
...@@ -56,10 +56,9 @@ public class SefSlowMotionVideoSampleTransformerTest { ...@@ -56,10 +56,9 @@ public class SefSlowMotionVideoSampleTransformerTest {
createSefSlowMotionFormat( createSefSlowMotionFormat(
captureFrameRate, inputMaxLayer, Arrays.asList(segment1, segment2)); captureFrameRate, inputMaxLayer, Arrays.asList(segment1, segment2));
SefSlowMotionVideoSampleTransformer sampleTransformer = SefSlowMotionFlattener sefSlowMotionFlattener = new SefSlowMotionFlattener(format);
new SefSlowMotionVideoSampleTransformer(format);
List<Integer> outputLayers = List<Integer> outputLayers =
getKeptOutputLayers(sampleTransformer, LAYER_SEQUENCE_MAX_LAYER_THREE, frameCount); getKeptOutputLayers(sefSlowMotionFlattener, LAYER_SEQUENCE_MAX_LAYER_THREE, frameCount);
List<Integer> expectedLayers = Arrays.asList(0, 0, 1, 0, 0, 1, 2, 3, 0, 3, 2, 3, 1, 3, 0); List<Integer> expectedLayers = Arrays.asList(0, 0, 1, 0, 0, 1, 2, 3, 0, 3, 2, 3, 1, 3, 0);
assertThat(outputLayers).isEqualTo(expectedLayers); assertThat(outputLayers).isEqualTo(expectedLayers);
...@@ -78,10 +77,9 @@ public class SefSlowMotionVideoSampleTransformerTest { ...@@ -78,10 +77,9 @@ public class SefSlowMotionVideoSampleTransformerTest {
createSefSlowMotionFormat( createSefSlowMotionFormat(
captureFrameRate, inputMaxLayer, Arrays.asList(segment1, segment2)); captureFrameRate, inputMaxLayer, Arrays.asList(segment1, segment2));
SefSlowMotionVideoSampleTransformer sampleTransformer = SefSlowMotionFlattener sefSlowMotionFlattener = new SefSlowMotionFlattener(format);
new SefSlowMotionVideoSampleTransformer(format);
List<Integer> outputLayers = List<Integer> outputLayers =
getKeptOutputLayers(sampleTransformer, LAYER_SEQUENCE_MAX_LAYER_THREE, frameCount); getKeptOutputLayers(sefSlowMotionFlattener, LAYER_SEQUENCE_MAX_LAYER_THREE, frameCount);
List<Integer> expectedLayers = List<Integer> expectedLayers =
Arrays.asList(0, 1, 0, 3, 2, 3, 1, 3, 2, 3, 0, 1, 0, 1, 2, 3, 0, 3, 2, 3, 1, 3, 0, 1); Arrays.asList(0, 1, 0, 3, 2, 3, 1, 3, 2, 3, 0, 1, 0, 1, 2, 3, 0, 3, 2, 3, 1, 3, 0, 1);
...@@ -101,10 +99,9 @@ public class SefSlowMotionVideoSampleTransformerTest { ...@@ -101,10 +99,9 @@ public class SefSlowMotionVideoSampleTransformerTest {
createSefSlowMotionFormat( createSefSlowMotionFormat(
captureFrameRate, inputMaxLayer, Arrays.asList(segment1, segment2)); captureFrameRate, inputMaxLayer, Arrays.asList(segment1, segment2));
SefSlowMotionVideoSampleTransformer sampleTransformer = SefSlowMotionFlattener sefSlowMotionFlattener = new SefSlowMotionFlattener(format);
new SefSlowMotionVideoSampleTransformer(format);
List<Integer> outputLayers = List<Integer> outputLayers =
getKeptOutputLayers(sampleTransformer, LAYER_SEQUENCE_MAX_LAYER_THREE, frameCount); getKeptOutputLayers(sefSlowMotionFlattener, LAYER_SEQUENCE_MAX_LAYER_THREE, frameCount);
List<Integer> expectedLayers = Arrays.asList(0, 0, 1, 0, 2, 3, 1, 3, 0); List<Integer> expectedLayers = Arrays.asList(0, 0, 1, 0, 2, 3, 1, 3, 0);
assertThat(outputLayers).isEqualTo(expectedLayers); assertThat(outputLayers).isEqualTo(expectedLayers);
...@@ -129,10 +126,9 @@ public class SefSlowMotionVideoSampleTransformerTest { ...@@ -129,10 +126,9 @@ public class SefSlowMotionVideoSampleTransformerTest {
inputMaxLayer, inputMaxLayer,
Arrays.asList(segmentWithNoFrame1, segmentWithNoFrame2, segmentWithFrame)); Arrays.asList(segmentWithNoFrame1, segmentWithNoFrame2, segmentWithFrame));
SefSlowMotionVideoSampleTransformer sampleTransformer = SefSlowMotionFlattener sefSlowMotionFlattener = new SefSlowMotionFlattener(format);
new SefSlowMotionVideoSampleTransformer(format);
List<Integer> outputLayers = List<Integer> outputLayers =
getKeptOutputLayers(sampleTransformer, LAYER_SEQUENCE_MAX_LAYER_THREE, frameCount); getKeptOutputLayers(sefSlowMotionFlattener, LAYER_SEQUENCE_MAX_LAYER_THREE, frameCount);
List<Integer> expectedLayers = Arrays.asList(0, 0, 1); List<Integer> expectedLayers = Arrays.asList(0, 0, 1);
assertThat(outputLayers).isEqualTo(expectedLayers); assertThat(outputLayers).isEqualTo(expectedLayers);
...@@ -153,10 +149,9 @@ public class SefSlowMotionVideoSampleTransformerTest { ...@@ -153,10 +149,9 @@ public class SefSlowMotionVideoSampleTransformerTest {
createSefSlowMotionFormat( createSefSlowMotionFormat(
captureFrameRate, inputMaxLayer, Arrays.asList(segment1, segment2)); captureFrameRate, inputMaxLayer, Arrays.asList(segment1, segment2));
SefSlowMotionVideoSampleTransformer sampleTransformer = SefSlowMotionFlattener sefSlowMotionFlattener = new SefSlowMotionFlattener(format);
new SefSlowMotionVideoSampleTransformer(format);
List<Long> outputTimesUs = List<Long> outputTimesUs =
getOutputTimesUs(sampleTransformer, LAYER_SEQUENCE_MAX_LAYER_THREE, frameCount); getOutputTimesUs(sefSlowMotionFlattener, LAYER_SEQUENCE_MAX_LAYER_THREE, frameCount);
// Test frame inside segment. // Test frame inside segment.
assertThat(outputTimesUs.get(9)) assertThat(outputTimesUs.get(9))
...@@ -181,10 +176,9 @@ public class SefSlowMotionVideoSampleTransformerTest { ...@@ -181,10 +176,9 @@ public class SefSlowMotionVideoSampleTransformerTest {
createSefSlowMotionFormat( createSefSlowMotionFormat(
captureFrameRate, inputMaxLayer, Arrays.asList(segment1, segment2)); captureFrameRate, inputMaxLayer, Arrays.asList(segment1, segment2));
SefSlowMotionVideoSampleTransformer sampleTransformer = SefSlowMotionFlattener sefSlowMotionFlattener = new SefSlowMotionFlattener(format);
new SefSlowMotionVideoSampleTransformer(format);
List<Long> outputTimesUs = List<Long> outputTimesUs =
getOutputTimesUs(sampleTransformer, LAYER_SEQUENCE_MAX_LAYER_THREE, frameCount); getOutputTimesUs(sefSlowMotionFlattener, LAYER_SEQUENCE_MAX_LAYER_THREE, frameCount);
// Test frame inside segment. // Test frame inside segment.
assertThat(outputTimesUs.get(9)) assertThat(outputTimesUs.get(9))
...@@ -209,10 +203,9 @@ public class SefSlowMotionVideoSampleTransformerTest { ...@@ -209,10 +203,9 @@ public class SefSlowMotionVideoSampleTransformerTest {
createSefSlowMotionFormat( createSefSlowMotionFormat(
captureFrameRate, inputMaxLayer, Arrays.asList(segment1, segment2)); captureFrameRate, inputMaxLayer, Arrays.asList(segment1, segment2));
SefSlowMotionVideoSampleTransformer sampleTransformer = SefSlowMotionFlattener sefSlowMotionFlattener = new SefSlowMotionFlattener(format);
new SefSlowMotionVideoSampleTransformer(format);
List<Long> outputTimesUs = List<Long> outputTimesUs =
getOutputTimesUs(sampleTransformer, LAYER_SEQUENCE_MAX_LAYER_THREE, frameCount); getOutputTimesUs(sefSlowMotionFlattener, LAYER_SEQUENCE_MAX_LAYER_THREE, frameCount);
// Test frame inside second segment. // Test frame inside second segment.
assertThat(outputTimesUs.get(9)).isEqualTo(136_250); assertThat(outputTimesUs.get(9)).isEqualTo(136_250);
...@@ -249,20 +242,20 @@ public class SefSlowMotionVideoSampleTransformerTest { ...@@ -249,20 +242,20 @@ public class SefSlowMotionVideoSampleTransformerTest {
/** /**
* Returns a list containing the temporal SVC layers of the frames that should be kept according * Returns a list containing the temporal SVC layers of the frames that should be kept according
* to {@link SefSlowMotionVideoSampleTransformer#processCurrentFrame(int, long)}. * to {@link SefSlowMotionFlattener#processCurrentFrame(int, long)}.
* *
* @param sampleTransformer The {@link SefSlowMotionVideoSampleTransformer}. * @param sefSlowMotionFlattener The {@link SefSlowMotionFlattener}.
* @param layerSequence The sequence of layer values in the input. * @param layerSequence The sequence of layer values in the input.
* @param frameCount The number of video frames in the input. * @param frameCount The number of video frames in the input.
* @return The output layers. * @return The output layers.
*/ */
private static List<Integer> getKeptOutputLayers( private static List<Integer> getKeptOutputLayers(
SefSlowMotionVideoSampleTransformer sampleTransformer, int[] layerSequence, int frameCount) { SefSlowMotionFlattener sefSlowMotionFlattener, int[] layerSequence, int frameCount) {
List<Integer> outputLayers = new ArrayList<>(); List<Integer> outputLayers = new ArrayList<>();
for (int i = 0; i < frameCount; i++) { for (int i = 0; i < frameCount; i++) {
int layer = layerSequence[i % layerSequence.length]; int layer = layerSequence[i % layerSequence.length];
long timeUs = i * C.MICROS_PER_SECOND / INPUT_FRAME_RATE; long timeUs = i * C.MICROS_PER_SECOND / INPUT_FRAME_RATE;
if (sampleTransformer.processCurrentFrame(layer, timeUs)) { if (sefSlowMotionFlattener.processCurrentFrame(layer, timeUs)) {
outputLayers.add(layer); outputLayers.add(layer);
} }
} }
...@@ -271,24 +264,24 @@ public class SefSlowMotionVideoSampleTransformerTest { ...@@ -271,24 +264,24 @@ public class SefSlowMotionVideoSampleTransformerTest {
/** /**
* Returns a list containing the frame output times obtained using {@link * Returns a list containing the frame output times obtained using {@link
* SefSlowMotionVideoSampleTransformer#getCurrentFrameOutputTimeUs(long)}. * SefSlowMotionFlattener#getCurrentFrameOutputTimeUs(long)}.
* *
* <p>The output contains the output times for all the input frames, regardless of whether they * <p>The output contains the output times for all the input frames, regardless of whether they
* should be kept or not. * should be kept or not.
* *
* @param sampleTransformer The {@link SefSlowMotionVideoSampleTransformer}. * @param sefSlowMotionFlattener The {@link SefSlowMotionFlattener}.
* @param layerSequence The sequence of layer values in the input. * @param layerSequence The sequence of layer values in the input.
* @param frameCount The number of video frames in the input. * @param frameCount The number of video frames in the input.
* @return The frame output times, in microseconds. * @return The frame output times, in microseconds.
*/ */
private static List<Long> getOutputTimesUs( private static List<Long> getOutputTimesUs(
SefSlowMotionVideoSampleTransformer sampleTransformer, int[] layerSequence, int frameCount) { SefSlowMotionFlattener sefSlowMotionFlattener, int[] layerSequence, int frameCount) {
List<Long> outputTimesUs = new ArrayList<>(); List<Long> outputTimesUs = new ArrayList<>();
for (int i = 0; i < frameCount; i++) { for (int i = 0; i < frameCount; i++) {
int layer = layerSequence[i % layerSequence.length]; int layer = layerSequence[i % layerSequence.length];
long inputTimeUs = i * C.MICROS_PER_SECOND / INPUT_FRAME_RATE; long inputTimeUs = i * C.MICROS_PER_SECOND / INPUT_FRAME_RATE;
sampleTransformer.processCurrentFrame(layer, inputTimeUs); sefSlowMotionFlattener.processCurrentFrame(layer, inputTimeUs);
outputTimesUs.add(sampleTransformer.getCurrentFrameOutputTimeUs(inputTimeUs)); outputTimesUs.add(sefSlowMotionFlattener.getCurrentFrameOutputTimeUs(inputTimeUs));
} }
return outputTimesUs; return outputTimesUs;
} }
......
...@@ -299,6 +299,12 @@ sample: ...@@ -299,6 +299,12 @@ sample:
isKeyFrame = false isKeyFrame = false
presentationTimeUs = 734083 presentationTimeUs = 734083
sample: sample:
trackIndex = 0
dataHashCode = 820561200
size = 1252
isKeyFrame = true
presentationTimeUs = 201521
sample:
trackIndex = 1 trackIndex = 1
dataHashCode = -1554795381 dataHashCode = -1554795381
size = 205 size = 205
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or sign in to comment