Commit f4766ee4 by kimvde Committed by Andrew Lewis

Fix clipping in AudioSamplePipeline

When clipping a MediaItem with start time > 0, the audio was ending
before the video. This is because:
- Audio timestamps are computed based on the sample sizes, with a start
  time set to streamOffsetUs (i.e. the streamStartPositionUs is not
  taken into account).
- The SamplePipeline was subtracting streamStartPositionUs from the
  timestamps before sending the samples to the muxer.
- As a result, the audio timestamps were shifted by
  streamStartPositionUs, while they should be shifter by streamOffsetUs.

PiperOrigin-RevId: 511175923
parent 3009b4d5
...@@ -60,7 +60,6 @@ import org.checkerframework.dataflow.qual.Pure; ...@@ -60,7 +60,6 @@ import org.checkerframework.dataflow.qual.Pure;
// TODO(b/260618558): Move silent audio generation upstream of this component. // TODO(b/260618558): Move silent audio generation upstream of this component.
public AudioSamplePipeline( public AudioSamplePipeline(
Format firstInputFormat, Format firstInputFormat,
long streamStartPositionUs,
long streamOffsetUs, long streamOffsetUs,
TransformationRequest transformationRequest, TransformationRequest transformationRequest,
boolean flattenForSlowMotion, boolean flattenForSlowMotion,
...@@ -69,7 +68,7 @@ import org.checkerframework.dataflow.qual.Pure; ...@@ -69,7 +68,7 @@ import org.checkerframework.dataflow.qual.Pure;
MuxerWrapper muxerWrapper, MuxerWrapper muxerWrapper,
FallbackListener fallbackListener) FallbackListener fallbackListener)
throws ExportException { throws ExportException {
super(firstInputFormat, streamStartPositionUs, muxerWrapper); super(firstInputFormat, /* streamStartPositionUs= */ streamOffsetUs, muxerWrapper);
silentAudioGenerator = new SilentAudioGenerator(firstInputFormat); silentAudioGenerator = new SilentAudioGenerator(firstInputFormat);
availableInputBuffers = new ConcurrentLinkedDeque<>(); availableInputBuffers = new ConcurrentLinkedDeque<>();
......
...@@ -196,7 +196,7 @@ import java.util.concurrent.atomic.AtomicInteger; ...@@ -196,7 +196,7 @@ import java.util.concurrent.atomic.AtomicInteger;
compositeAssetLoaderListener.onTrackAdded( compositeAssetLoaderListener.onTrackAdded(
firstAudioFormat, firstAudioFormat,
SUPPORTED_OUTPUT_TYPE_DECODED, SUPPORTED_OUTPUT_TYPE_DECODED,
/* streamStartPositionUs= */ streamOffsetUs, streamStartPositionUs,
streamOffsetUs)); streamOffsetUs));
sampleConsumersByTrackType.put(C.TRACK_TYPE_AUDIO, audioSampleConsumer); sampleConsumersByTrackType.put(C.TRACK_TYPE_AUDIO, audioSampleConsumer);
} }
......
...@@ -434,7 +434,6 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull; ...@@ -434,7 +434,6 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
if (MimeTypes.isAudio(firstInputFormat.sampleMimeType)) { if (MimeTypes.isAudio(firstInputFormat.sampleMimeType)) {
return new AudioSamplePipeline( return new AudioSamplePipeline(
firstInputFormat, firstInputFormat,
streamStartPositionUs,
streamOffsetUs, streamOffsetUs,
transformationRequest, transformationRequest,
firstEditedMediaItem.flattenForSlowMotion, firstEditedMediaItem.flattenForSlowMotion,
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or sign in to comment