Commit 51acd815 by Oliver Woodman Committed by GitHub

Merge pull request #6150 from google/dev-v2-r2.10.3

r2.10.3
parents 1ab402cf 1275217b
Showing with 422 additions and 246 deletions
# Release notes # # Release notes #
### 2.10.3 ###
* Display last frame when seeking to end of stream
([#2568](https://github.com/google/ExoPlayer/issues/2568)).
* Audio:
* Fix an issue where not all audio was played out when the configuration
for the underlying track was changing (e.g., at some period transitions).
* Fix an issue where playback speed was applied inaccurately in playlists
([#6117](https://github.com/google/ExoPlayer/issues/6117)).
* UI: Fix `PlayerView` incorrectly consuming touch events if no controller is
attached ([#6109](https://github.com/google/ExoPlayer/issues/6109)).
* CEA608: Fix repetition of special North American characters
([#6133](https://github.com/google/ExoPlayer/issues/6133)).
* FLV: Fix bug that caused playback of some live streams to not start
([#6111](https://github.com/google/ExoPlayer/issues/6111)).
* SmoothStreaming: Parse text stream `Subtype` into `Format.roleFlags`.
* MediaSession extension: Fix `MediaSessionConnector.play()` not resuming
playback ([#6093](https://github.com/google/ExoPlayer/issues/6093)).
### 2.10.2 ### ### 2.10.2 ###
* Add `ResolvingDataSource` for just-in-time resolution of `DataSpec`s * Add `ResolvingDataSource` for just-in-time resolution of `DataSpec`s
......
...@@ -44,6 +44,7 @@ allprojects { ...@@ -44,6 +44,7 @@ allprojects {
} }
buildDir = "${externalBuildDir}/${project.name}" buildDir = "${externalBuildDir}/${project.name}"
} }
group = 'com.google.android.exoplayer'
} }
apply from: 'javadoc_combined.gradle' apply from: 'javadoc_combined.gradle'
...@@ -13,8 +13,8 @@ ...@@ -13,8 +13,8 @@
// limitations under the License. // limitations under the License.
project.ext { project.ext {
// ExoPlayer version and version code. // ExoPlayer version and version code.
releaseVersion = '2.10.2' releaseVersion = '2.10.3'
releaseVersionCode = 2010002 releaseVersionCode = 2010003
minSdkVersion = 16 minSdkVersion = 16
targetSdkVersion = 28 targetSdkVersion = 28
compileSdkVersion = 28 compileSdkVersion = 28
......
...@@ -306,7 +306,7 @@ public final class TrackSelectionDialog extends DialogFragment { ...@@ -306,7 +306,7 @@ public final class TrackSelectionDialog extends DialogFragment {
} }
} }
/** Fragment to show a track seleciton in tab of the track selection dialog. */ /** Fragment to show a track selection in tab of the track selection dialog. */
public static final class TrackSelectionViewFragment extends Fragment public static final class TrackSelectionViewFragment extends Fragment
implements TrackSelectionView.TrackSelectionListener { implements TrackSelectionView.TrackSelectionListener {
......
...@@ -377,6 +377,13 @@ public final class MediaSessionConnector { ...@@ -377,6 +377,13 @@ public final class MediaSessionConnector {
/** /**
* Gets the {@link MediaMetadataCompat} to be published to the session. * Gets the {@link MediaMetadataCompat} to be published to the session.
* *
* <p>An app may need to load metadata resources like artwork bitmaps asynchronously. In such a
* case the app should return a {@link MediaMetadataCompat} object that does not contain these
* resources as a placeholder. The app should start an asynchronous operation to download the
* bitmap and put it into a cache. Finally, the app should call {@link
* #invalidateMediaSessionMetadata()}. This causes this callback to be called again and the app
* can now return a {@link MediaMetadataCompat} object with all the resources included.
*
* @param player The player connected to the media session. * @param player The player connected to the media session.
* @return The {@link MediaMetadataCompat} to be published to the session. * @return The {@link MediaMetadataCompat} to be published to the session.
*/ */
...@@ -1066,8 +1073,9 @@ public final class MediaSessionConnector { ...@@ -1066,8 +1073,9 @@ public final class MediaSessionConnector {
} }
} else if (player.getPlaybackState() == Player.STATE_ENDED) { } else if (player.getPlaybackState() == Player.STATE_ENDED) {
controlDispatcher.dispatchSeekTo(player, player.getCurrentWindowIndex(), C.TIME_UNSET); controlDispatcher.dispatchSeekTo(player, player.getCurrentWindowIndex(), C.TIME_UNSET);
controlDispatcher.dispatchSetPlayWhenReady(player, /* playWhenReady= */ true);
} }
controlDispatcher.dispatchSetPlayWhenReady(
Assertions.checkNotNull(player), /* playWhenReady= */ true);
} }
} }
......
...@@ -29,11 +29,11 @@ public final class ExoPlayerLibraryInfo { ...@@ -29,11 +29,11 @@ public final class ExoPlayerLibraryInfo {
/** The version of the library expressed as a string, for example "1.2.3". */ /** The version of the library expressed as a string, for example "1.2.3". */
// Intentionally hardcoded. Do not derive from other constants (e.g. VERSION_INT) or vice versa. // Intentionally hardcoded. Do not derive from other constants (e.g. VERSION_INT) or vice versa.
public static final String VERSION = "2.10.2"; public static final String VERSION = "2.10.3";
/** The version of the library expressed as {@code "ExoPlayerLib/" + VERSION}. */ /** The version of the library expressed as {@code "ExoPlayerLib/" + VERSION}. */
// Intentionally hardcoded. Do not derive from other constants (e.g. VERSION) or vice versa. // Intentionally hardcoded. Do not derive from other constants (e.g. VERSION) or vice versa.
public static final String VERSION_SLASHY = "ExoPlayerLib/2.10.2"; public static final String VERSION_SLASHY = "ExoPlayerLib/2.10.3";
/** /**
* The version of the library expressed as an integer, for example 1002003. * The version of the library expressed as an integer, for example 1002003.
...@@ -43,7 +43,7 @@ public final class ExoPlayerLibraryInfo { ...@@ -43,7 +43,7 @@ public final class ExoPlayerLibraryInfo {
* integer version 123045006 (123-045-006). * integer version 123045006 (123-045-006).
*/ */
// Intentionally hardcoded. Do not derive from other constants (e.g. VERSION) or vice versa. // Intentionally hardcoded. Do not derive from other constants (e.g. VERSION) or vice versa.
public static final int VERSION_INT = 2010002; public static final int VERSION_INT = 2010003;
/** /**
* Whether the library was compiled with {@link com.google.android.exoplayer2.util.Assertions} * Whether the library was compiled with {@link com.google.android.exoplayer2.util.Assertions}
......
...@@ -1231,8 +1231,7 @@ public class SimpleExoPlayer extends BasePlayer ...@@ -1231,8 +1231,7 @@ public class SimpleExoPlayer extends BasePlayer
Log.w( Log.w(
TAG, TAG,
"Player is accessed on the wrong thread. See " "Player is accessed on the wrong thread. See "
+ "https://exoplayer.dev/troubleshooting.html#" + "https://exoplayer.dev/issues/player-accessed-on-wrong-thread",
+ "what-do-player-is-accessed-on-the-wrong-thread-warnings-mean",
hasNotifiedFullWrongThreadWarning ? null : new IllegalStateException()); hasNotifiedFullWrongThreadWarning ? null : new IllegalStateException());
hasNotifiedFullWrongThreadWarning = true; hasNotifiedFullWrongThreadWarning = true;
} }
......
...@@ -272,6 +272,7 @@ public final class DefaultAudioSink implements AudioSink { ...@@ -272,6 +272,7 @@ public final class DefaultAudioSink implements AudioSink {
private int preV21OutputBufferOffset; private int preV21OutputBufferOffset;
private int drainingAudioProcessorIndex; private int drainingAudioProcessorIndex;
private boolean handledEndOfStream; private boolean handledEndOfStream;
private boolean stoppedAudioTrack;
private boolean playing; private boolean playing;
private int audioSessionId; private int audioSessionId;
...@@ -465,19 +466,15 @@ public final class DefaultAudioSink implements AudioSink { ...@@ -465,19 +466,15 @@ public final class DefaultAudioSink implements AudioSink {
processingEnabled, processingEnabled,
canApplyPlaybackParameters, canApplyPlaybackParameters,
availableAudioProcessors); availableAudioProcessors);
if (isInitialized()) { // If we have a pending configuration already, we always drain audio processors as the preceding
if (!pendingConfiguration.canReuseAudioTrack(configuration)) { // configuration may have required it (even if this one doesn't).
// We need a new AudioTrack before we can handle more input. We should first stop() the boolean drainAudioProcessors = flushAudioProcessors || this.pendingConfiguration != null;
// track and wait for audio to play out (tracked by [Internal: b/33161961]), but for now we if (isInitialized()
// discard the audio track immediately. && (!pendingConfiguration.canReuseAudioTrack(configuration) || drainAudioProcessors)) {
flush(); this.pendingConfiguration = pendingConfiguration;
} else if (flushAudioProcessors) { } else {
// We don't need a new AudioTrack but audio processors need to be drained and flushed. configuration = pendingConfiguration;
this.pendingConfiguration = pendingConfiguration;
return;
}
} }
configuration = pendingConfiguration;
} }
private void setupAudioProcessors() { private void setupAudioProcessors() {
...@@ -504,7 +501,7 @@ public final class DefaultAudioSink implements AudioSink { ...@@ -504,7 +501,7 @@ public final class DefaultAudioSink implements AudioSink {
} }
} }
private void initialize() throws InitializationException { private void initialize(long presentationTimeUs) throws InitializationException {
// If we're asynchronously releasing a previous audio track then we block until it has been // If we're asynchronously releasing a previous audio track then we block until it has been
// released. This guarantees that we cannot end up in a state where we have multiple audio // released. This guarantees that we cannot end up in a state where we have multiple audio
// track instances. Without this guarantee it would be possible, in extreme cases, to exhaust // track instances. Without this guarantee it would be possible, in extreme cases, to exhaust
...@@ -536,11 +533,7 @@ public final class DefaultAudioSink implements AudioSink { ...@@ -536,11 +533,7 @@ public final class DefaultAudioSink implements AudioSink {
} }
} }
playbackParameters = applyPlaybackParameters(playbackParameters, presentationTimeUs);
configuration.canApplyPlaybackParameters
? audioProcessorChain.applyPlaybackParameters(playbackParameters)
: PlaybackParameters.DEFAULT;
setupAudioProcessors();
audioTrackPositionTracker.setAudioTrack( audioTrackPositionTracker.setAudioTrack(
audioTrack, audioTrack,
...@@ -579,21 +572,27 @@ public final class DefaultAudioSink implements AudioSink { ...@@ -579,21 +572,27 @@ public final class DefaultAudioSink implements AudioSink {
Assertions.checkArgument(inputBuffer == null || buffer == inputBuffer); Assertions.checkArgument(inputBuffer == null || buffer == inputBuffer);
if (pendingConfiguration != null) { if (pendingConfiguration != null) {
// We are waiting for audio processors to drain before applying a the new configuration.
if (!drainAudioProcessorsToEndOfStream()) { if (!drainAudioProcessorsToEndOfStream()) {
// There's still pending data in audio processors to write to the track.
return false; return false;
} else if (!pendingConfiguration.canReuseAudioTrack(configuration)) {
playPendingData();
if (hasPendingData()) {
// We're waiting for playout on the current audio track to finish.
return false;
}
flush();
} else {
// The current audio track can be reused for the new configuration.
configuration = pendingConfiguration;
pendingConfiguration = null;
} }
configuration = pendingConfiguration; // Re-apply playback parameters.
pendingConfiguration = null; applyPlaybackParameters(playbackParameters, presentationTimeUs);
playbackParameters =
configuration.canApplyPlaybackParameters
? audioProcessorChain.applyPlaybackParameters(playbackParameters)
: PlaybackParameters.DEFAULT;
setupAudioProcessors();
} }
if (!isInitialized()) { if (!isInitialized()) {
initialize(); initialize(presentationTimeUs);
if (playing) { if (playing) {
play(); play();
} }
...@@ -629,15 +628,7 @@ public final class DefaultAudioSink implements AudioSink { ...@@ -629,15 +628,7 @@ public final class DefaultAudioSink implements AudioSink {
} }
PlaybackParameters newPlaybackParameters = afterDrainPlaybackParameters; PlaybackParameters newPlaybackParameters = afterDrainPlaybackParameters;
afterDrainPlaybackParameters = null; afterDrainPlaybackParameters = null;
newPlaybackParameters = audioProcessorChain.applyPlaybackParameters(newPlaybackParameters); applyPlaybackParameters(newPlaybackParameters, presentationTimeUs);
// Store the position and corresponding media time from which the parameters will apply.
playbackParametersCheckpoints.add(
new PlaybackParametersCheckpoint(
newPlaybackParameters,
Math.max(0, presentationTimeUs),
configuration.framesToDurationUs(getWrittenFrames())));
// Update the set of active audio processors to take into account the new parameters.
setupAudioProcessors();
} }
if (startMediaTimeState == START_NOT_SET) { if (startMediaTimeState == START_NOT_SET) {
...@@ -786,15 +777,8 @@ public final class DefaultAudioSink implements AudioSink { ...@@ -786,15 +777,8 @@ public final class DefaultAudioSink implements AudioSink {
@Override @Override
public void playToEndOfStream() throws WriteException { public void playToEndOfStream() throws WriteException {
if (handledEndOfStream || !isInitialized()) { if (!handledEndOfStream && isInitialized() && drainAudioProcessorsToEndOfStream()) {
return; playPendingData();
}
if (drainAudioProcessorsToEndOfStream()) {
// The audio processors have drained, so drain the underlying audio track.
audioTrackPositionTracker.handleEndOfStream(getWrittenFrames());
audioTrack.stop();
bytesUntilNextAvSync = 0;
handledEndOfStream = true; handledEndOfStream = true;
} }
} }
...@@ -858,8 +842,9 @@ public final class DefaultAudioSink implements AudioSink { ...@@ -858,8 +842,9 @@ public final class DefaultAudioSink implements AudioSink {
// parameters apply. // parameters apply.
afterDrainPlaybackParameters = playbackParameters; afterDrainPlaybackParameters = playbackParameters;
} else { } else {
// Update the playback parameters now. // Update the playback parameters now. They will be applied to the audio processors during
this.playbackParameters = audioProcessorChain.applyPlaybackParameters(playbackParameters); // initialization.
this.playbackParameters = playbackParameters;
} }
} }
return this.playbackParameters; return this.playbackParameters;
...@@ -976,6 +961,7 @@ public final class DefaultAudioSink implements AudioSink { ...@@ -976,6 +961,7 @@ public final class DefaultAudioSink implements AudioSink {
flushAudioProcessors(); flushAudioProcessors();
inputBuffer = null; inputBuffer = null;
outputBuffer = null; outputBuffer = null;
stoppedAudioTrack = false;
handledEndOfStream = false; handledEndOfStream = false;
drainingAudioProcessorIndex = C.INDEX_UNSET; drainingAudioProcessorIndex = C.INDEX_UNSET;
avSyncHeader = null; avSyncHeader = null;
...@@ -1040,6 +1026,21 @@ public final class DefaultAudioSink implements AudioSink { ...@@ -1040,6 +1026,21 @@ public final class DefaultAudioSink implements AudioSink {
}.start(); }.start();
} }
private void applyPlaybackParameters(
PlaybackParameters playbackParameters, long presentationTimeUs) {
PlaybackParameters newPlaybackParameters =
configuration.canApplyPlaybackParameters
? audioProcessorChain.applyPlaybackParameters(playbackParameters)
: PlaybackParameters.DEFAULT;
// Store the position and corresponding media time from which the parameters will apply.
playbackParametersCheckpoints.add(
new PlaybackParametersCheckpoint(
newPlaybackParameters,
/* mediaTimeUs= */ Math.max(0, presentationTimeUs),
/* positionUs= */ configuration.framesToDurationUs(getWrittenFrames())));
setupAudioProcessors();
}
private long applySpeedup(long positionUs) { private long applySpeedup(long positionUs) {
@Nullable PlaybackParametersCheckpoint checkpoint = null; @Nullable PlaybackParametersCheckpoint checkpoint = null;
while (!playbackParametersCheckpoints.isEmpty() while (!playbackParametersCheckpoints.isEmpty()
...@@ -1223,6 +1224,15 @@ public final class DefaultAudioSink implements AudioSink { ...@@ -1223,6 +1224,15 @@ public final class DefaultAudioSink implements AudioSink {
audioTrack.setStereoVolume(volume, volume); audioTrack.setStereoVolume(volume, volume);
} }
private void playPendingData() {
if (!stoppedAudioTrack) {
stoppedAudioTrack = true;
audioTrackPositionTracker.handleEndOfStream(getWrittenFrames());
audioTrack.stop();
bytesUntilNextAvSync = 0;
}
}
/** Stores playback parameters with the position and media time at which they apply. */ /** Stores playback parameters with the position and media time at which they apply. */
private static final class PlaybackParametersCheckpoint { private static final class PlaybackParametersCheckpoint {
......
...@@ -695,7 +695,8 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media ...@@ -695,7 +695,8 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media
int bufferIndex, int bufferIndex,
int bufferFlags, int bufferFlags,
long bufferPresentationTimeUs, long bufferPresentationTimeUs,
boolean shouldSkip, boolean isDecodeOnlyBuffer,
boolean isLastBuffer,
Format format) Format format)
throws ExoPlaybackException { throws ExoPlaybackException {
if (codecNeedsEosBufferTimestampWorkaround if (codecNeedsEosBufferTimestampWorkaround
...@@ -711,7 +712,7 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media ...@@ -711,7 +712,7 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media
return true; return true;
} }
if (shouldSkip) { if (isDecodeOnlyBuffer) {
codec.releaseOutputBuffer(bufferIndex, false); codec.releaseOutputBuffer(bufferIndex, false);
decoderCounters.skippedOutputBufferCount++; decoderCounters.skippedOutputBufferCount++;
audioSink.handleDiscontinuity(); audioSink.handleDiscontinuity();
......
...@@ -544,7 +544,7 @@ public class DefaultDrmSessionManager<T extends ExoMediaCrypto> implements DrmSe ...@@ -544,7 +544,7 @@ public class DefaultDrmSessionManager<T extends ExoMediaCrypto> implements DrmSe
@Override @Override
public void onEvent( public void onEvent(
ExoMediaDrm<? extends T> md, ExoMediaDrm<? extends T> md,
byte[] sessionId, @Nullable byte[] sessionId,
int event, int event,
int extra, int extra,
@Nullable byte[] data) { @Nullable byte[] data) {
......
...@@ -80,7 +80,7 @@ public interface ExoMediaDrm<T extends ExoMediaCrypto> { ...@@ -80,7 +80,7 @@ public interface ExoMediaDrm<T extends ExoMediaCrypto> {
*/ */
void onEvent( void onEvent(
ExoMediaDrm<? extends T> mediaDrm, ExoMediaDrm<? extends T> mediaDrm,
byte[] sessionId, @Nullable byte[] sessionId,
int event, int event,
int extra, int extra,
@Nullable byte[] data); @Nullable byte[] data);
...@@ -215,6 +215,7 @@ public interface ExoMediaDrm<T extends ExoMediaCrypto> { ...@@ -215,6 +215,7 @@ public interface ExoMediaDrm<T extends ExoMediaCrypto> {
throws NotProvisionedException; throws NotProvisionedException;
/** @see MediaDrm#provideKeyResponse(byte[], byte[]) */ /** @see MediaDrm#provideKeyResponse(byte[], byte[]) */
@Nullable
byte[] provideKeyResponse(byte[] scope, byte[] response) byte[] provideKeyResponse(byte[] scope, byte[] response)
throws NotProvisionedException, DeniedByServerException; throws NotProvisionedException, DeniedByServerException;
......
...@@ -84,8 +84,6 @@ public final class FrameworkMediaDrm implements ExoMediaDrm<FrameworkMediaCrypto ...@@ -84,8 +84,6 @@ public final class FrameworkMediaDrm implements ExoMediaDrm<FrameworkMediaCrypto
} }
} }
// FIXME: incompatible types in argument.
@SuppressWarnings("nullness:argument.type.incompatible")
@Override @Override
public void setOnEventListener( public void setOnEventListener(
final ExoMediaDrm.OnEventListener<? super FrameworkMediaCrypto> listener) { final ExoMediaDrm.OnEventListener<? super FrameworkMediaCrypto> listener) {
...@@ -160,8 +158,7 @@ public final class FrameworkMediaDrm implements ExoMediaDrm<FrameworkMediaCrypto ...@@ -160,8 +158,7 @@ public final class FrameworkMediaDrm implements ExoMediaDrm<FrameworkMediaCrypto
return new KeyRequest(requestData, licenseServerUrl); return new KeyRequest(requestData, licenseServerUrl);
} }
// FIXME: incompatible types in return. @Nullable
@SuppressWarnings("nullness:return.type.incompatible")
@Override @Override
public byte[] provideKeyResponse(byte[] scope, byte[] response) public byte[] provideKeyResponse(byte[] scope, byte[] response)
throws NotProvisionedException, DeniedByServerException { throws NotProvisionedException, DeniedByServerException {
......
...@@ -86,11 +86,12 @@ import java.util.Collections; ...@@ -86,11 +86,12 @@ import java.util.Collections;
} }
@Override @Override
protected void parsePayload(ParsableByteArray data, long timeUs) throws ParserException { protected boolean parsePayload(ParsableByteArray data, long timeUs) throws ParserException {
if (audioFormat == AUDIO_FORMAT_MP3) { if (audioFormat == AUDIO_FORMAT_MP3) {
int sampleSize = data.bytesLeft(); int sampleSize = data.bytesLeft();
output.sampleData(data, sampleSize); output.sampleData(data, sampleSize);
output.sampleMetadata(timeUs, C.BUFFER_FLAG_KEY_FRAME, sampleSize, 0, null); output.sampleMetadata(timeUs, C.BUFFER_FLAG_KEY_FRAME, sampleSize, 0, null);
return true;
} else { } else {
int packetType = data.readUnsignedByte(); int packetType = data.readUnsignedByte();
if (packetType == AAC_PACKET_TYPE_SEQUENCE_HEADER && !hasOutputFormat) { if (packetType == AAC_PACKET_TYPE_SEQUENCE_HEADER && !hasOutputFormat) {
...@@ -104,12 +105,15 @@ import java.util.Collections; ...@@ -104,12 +105,15 @@ import java.util.Collections;
Collections.singletonList(audioSpecificConfig), null, 0, null); Collections.singletonList(audioSpecificConfig), null, 0, null);
output.format(format); output.format(format);
hasOutputFormat = true; hasOutputFormat = true;
return false;
} else if (audioFormat != AUDIO_FORMAT_AAC || packetType == AAC_PACKET_TYPE_AAC_RAW) { } else if (audioFormat != AUDIO_FORMAT_AAC || packetType == AAC_PACKET_TYPE_AAC_RAW) {
int sampleSize = data.bytesLeft(); int sampleSize = data.bytesLeft();
output.sampleData(data, sampleSize); output.sampleData(data, sampleSize);
output.sampleMetadata(timeUs, C.BUFFER_FLAG_KEY_FRAME, sampleSize, 0, null); output.sampleMetadata(timeUs, C.BUFFER_FLAG_KEY_FRAME, sampleSize, 0, null);
return true;
} else {
return false;
} }
} }
} }
} }
...@@ -74,6 +74,7 @@ public final class FlvExtractor implements Extractor { ...@@ -74,6 +74,7 @@ public final class FlvExtractor implements Extractor {
private ExtractorOutput extractorOutput; private ExtractorOutput extractorOutput;
private @States int state; private @States int state;
private boolean outputFirstSample;
private long mediaTagTimestampOffsetUs; private long mediaTagTimestampOffsetUs;
private int bytesToNextTagHeader; private int bytesToNextTagHeader;
private int tagType; private int tagType;
...@@ -90,7 +91,6 @@ public final class FlvExtractor implements Extractor { ...@@ -90,7 +91,6 @@ public final class FlvExtractor implements Extractor {
tagData = new ParsableByteArray(); tagData = new ParsableByteArray();
metadataReader = new ScriptTagPayloadReader(); metadataReader = new ScriptTagPayloadReader();
state = STATE_READING_FLV_HEADER; state = STATE_READING_FLV_HEADER;
mediaTagTimestampOffsetUs = C.TIME_UNSET;
} }
@Override @Override
...@@ -132,7 +132,7 @@ public final class FlvExtractor implements Extractor { ...@@ -132,7 +132,7 @@ public final class FlvExtractor implements Extractor {
@Override @Override
public void seek(long position, long timeUs) { public void seek(long position, long timeUs) {
state = STATE_READING_FLV_HEADER; state = STATE_READING_FLV_HEADER;
mediaTagTimestampOffsetUs = C.TIME_UNSET; outputFirstSample = false;
bytesToNextTagHeader = 0; bytesToNextTagHeader = 0;
} }
...@@ -253,14 +253,16 @@ public final class FlvExtractor implements Extractor { ...@@ -253,14 +253,16 @@ public final class FlvExtractor implements Extractor {
*/ */
private boolean readTagData(ExtractorInput input) throws IOException, InterruptedException { private boolean readTagData(ExtractorInput input) throws IOException, InterruptedException {
boolean wasConsumed = true; boolean wasConsumed = true;
boolean wasSampleOutput = false;
long timestampUs = getCurrentTimestampUs();
if (tagType == TAG_TYPE_AUDIO && audioReader != null) { if (tagType == TAG_TYPE_AUDIO && audioReader != null) {
ensureReadyForMediaOutput(); ensureReadyForMediaOutput();
audioReader.consume(prepareTagData(input), mediaTagTimestampOffsetUs + tagTimestampUs); wasSampleOutput = audioReader.consume(prepareTagData(input), timestampUs);
} else if (tagType == TAG_TYPE_VIDEO && videoReader != null) { } else if (tagType == TAG_TYPE_VIDEO && videoReader != null) {
ensureReadyForMediaOutput(); ensureReadyForMediaOutput();
videoReader.consume(prepareTagData(input), mediaTagTimestampOffsetUs + tagTimestampUs); wasSampleOutput = videoReader.consume(prepareTagData(input), timestampUs);
} else if (tagType == TAG_TYPE_SCRIPT_DATA && !outputSeekMap) { } else if (tagType == TAG_TYPE_SCRIPT_DATA && !outputSeekMap) {
metadataReader.consume(prepareTagData(input), tagTimestampUs); wasSampleOutput = metadataReader.consume(prepareTagData(input), timestampUs);
long durationUs = metadataReader.getDurationUs(); long durationUs = metadataReader.getDurationUs();
if (durationUs != C.TIME_UNSET) { if (durationUs != C.TIME_UNSET) {
extractorOutput.seekMap(new SeekMap.Unseekable(durationUs)); extractorOutput.seekMap(new SeekMap.Unseekable(durationUs));
...@@ -270,6 +272,11 @@ public final class FlvExtractor implements Extractor { ...@@ -270,6 +272,11 @@ public final class FlvExtractor implements Extractor {
input.skipFully(tagDataSize); input.skipFully(tagDataSize);
wasConsumed = false; wasConsumed = false;
} }
if (!outputFirstSample && wasSampleOutput) {
outputFirstSample = true;
mediaTagTimestampOffsetUs =
metadataReader.getDurationUs() == C.TIME_UNSET ? -tagTimestampUs : 0;
}
bytesToNextTagHeader = 4; // There's a 4 byte previous tag size before the next header. bytesToNextTagHeader = 4; // There's a 4 byte previous tag size before the next header.
state = STATE_SKIPPING_TO_TAG_HEADER; state = STATE_SKIPPING_TO_TAG_HEADER;
return wasConsumed; return wasConsumed;
...@@ -292,10 +299,11 @@ public final class FlvExtractor implements Extractor { ...@@ -292,10 +299,11 @@ public final class FlvExtractor implements Extractor {
extractorOutput.seekMap(new SeekMap.Unseekable(C.TIME_UNSET)); extractorOutput.seekMap(new SeekMap.Unseekable(C.TIME_UNSET));
outputSeekMap = true; outputSeekMap = true;
} }
if (mediaTagTimestampOffsetUs == C.TIME_UNSET) {
mediaTagTimestampOffsetUs =
metadataReader.getDurationUs() == C.TIME_UNSET ? -tagTimestampUs : 0;
}
} }
private long getCurrentTimestampUs() {
return outputFirstSample
? (mediaTagTimestampOffsetUs + tagTimestampUs)
: (metadataReader.getDurationUs() == C.TIME_UNSET ? 0 : tagTimestampUs);
}
} }
...@@ -63,7 +63,7 @@ import java.util.Map; ...@@ -63,7 +63,7 @@ import java.util.Map;
} }
@Override @Override
protected void parsePayload(ParsableByteArray data, long timeUs) throws ParserException { protected boolean parsePayload(ParsableByteArray data, long timeUs) throws ParserException {
int nameType = readAmfType(data); int nameType = readAmfType(data);
if (nameType != AMF_TYPE_STRING) { if (nameType != AMF_TYPE_STRING) {
// Should never happen. // Should never happen.
...@@ -72,12 +72,12 @@ import java.util.Map; ...@@ -72,12 +72,12 @@ import java.util.Map;
String name = readAmfString(data); String name = readAmfString(data);
if (!NAME_METADATA.equals(name)) { if (!NAME_METADATA.equals(name)) {
// We're only interested in metadata. // We're only interested in metadata.
return; return false;
} }
int type = readAmfType(data); int type = readAmfType(data);
if (type != AMF_TYPE_ECMA_ARRAY) { if (type != AMF_TYPE_ECMA_ARRAY) {
// We're not interested in this metadata. // We're not interested in this metadata.
return; return false;
} }
// Set the duration to the value contained in the metadata, if present. // Set the duration to the value contained in the metadata, if present.
Map<String, Object> metadata = readAmfEcmaArray(data); Map<String, Object> metadata = readAmfEcmaArray(data);
...@@ -87,6 +87,7 @@ import java.util.Map; ...@@ -87,6 +87,7 @@ import java.util.Map;
durationUs = (long) (durationSeconds * C.MICROS_PER_SECOND); durationUs = (long) (durationSeconds * C.MICROS_PER_SECOND);
} }
} }
return false;
} }
private static int readAmfType(ParsableByteArray data) { private static int readAmfType(ParsableByteArray data) {
......
...@@ -58,12 +58,11 @@ import com.google.android.exoplayer2.util.ParsableByteArray; ...@@ -58,12 +58,11 @@ import com.google.android.exoplayer2.util.ParsableByteArray;
* *
* @param data The payload data to consume. * @param data The payload data to consume.
* @param timeUs The timestamp associated with the payload. * @param timeUs The timestamp associated with the payload.
* @return Whether a sample was output.
* @throws ParserException If an error occurs parsing the data. * @throws ParserException If an error occurs parsing the data.
*/ */
public final void consume(ParsableByteArray data, long timeUs) throws ParserException { public final boolean consume(ParsableByteArray data, long timeUs) throws ParserException {
if (parseHeader(data)) { return parseHeader(data) && parsePayload(data, timeUs);
parsePayload(data, timeUs);
}
} }
/** /**
...@@ -78,10 +77,11 @@ import com.google.android.exoplayer2.util.ParsableByteArray; ...@@ -78,10 +77,11 @@ import com.google.android.exoplayer2.util.ParsableByteArray;
/** /**
* Parses tag payload. * Parses tag payload.
* *
* @param data Buffer where tag payload is stored * @param data Buffer where tag payload is stored.
* @param timeUs Time position of the frame * @param timeUs Time position of the frame.
* @return Whether a sample was output.
* @throws ParserException If an error occurs parsing the payload. * @throws ParserException If an error occurs parsing the payload.
*/ */
protected abstract void parsePayload(ParsableByteArray data, long timeUs) throws ParserException; protected abstract boolean parsePayload(ParsableByteArray data, long timeUs)
throws ParserException;
} }
...@@ -47,6 +47,7 @@ import com.google.android.exoplayer2.video.AvcConfig; ...@@ -47,6 +47,7 @@ import com.google.android.exoplayer2.video.AvcConfig;
// State variables. // State variables.
private boolean hasOutputFormat; private boolean hasOutputFormat;
private boolean hasOutputKeyframe;
private int frameType; private int frameType;
/** /**
...@@ -60,7 +61,7 @@ import com.google.android.exoplayer2.video.AvcConfig; ...@@ -60,7 +61,7 @@ import com.google.android.exoplayer2.video.AvcConfig;
@Override @Override
public void seek() { public void seek() {
// Do nothing. hasOutputKeyframe = false;
} }
@Override @Override
...@@ -77,7 +78,7 @@ import com.google.android.exoplayer2.video.AvcConfig; ...@@ -77,7 +78,7 @@ import com.google.android.exoplayer2.video.AvcConfig;
} }
@Override @Override
protected void parsePayload(ParsableByteArray data, long timeUs) throws ParserException { protected boolean parsePayload(ParsableByteArray data, long timeUs) throws ParserException {
int packetType = data.readUnsignedByte(); int packetType = data.readUnsignedByte();
int compositionTimeMs = data.readInt24(); int compositionTimeMs = data.readInt24();
...@@ -94,7 +95,12 @@ import com.google.android.exoplayer2.video.AvcConfig; ...@@ -94,7 +95,12 @@ import com.google.android.exoplayer2.video.AvcConfig;
avcConfig.initializationData, Format.NO_VALUE, avcConfig.pixelWidthAspectRatio, null); avcConfig.initializationData, Format.NO_VALUE, avcConfig.pixelWidthAspectRatio, null);
output.format(format); output.format(format);
hasOutputFormat = true; hasOutputFormat = true;
return false;
} else if (packetType == AVC_PACKET_TYPE_AVC_NALU && hasOutputFormat) { } else if (packetType == AVC_PACKET_TYPE_AVC_NALU && hasOutputFormat) {
boolean isKeyframe = frameType == VIDEO_FRAME_KEYFRAME;
if (!hasOutputKeyframe && !isKeyframe) {
return false;
}
// TODO: Deduplicate with Mp4Extractor. // TODO: Deduplicate with Mp4Extractor.
// Zero the top three bytes of the array that we'll use to decode nal unit lengths, in case // Zero the top three bytes of the array that we'll use to decode nal unit lengths, in case
// they're only 1 or 2 bytes long. // they're only 1 or 2 bytes long.
...@@ -123,8 +129,12 @@ import com.google.android.exoplayer2.video.AvcConfig; ...@@ -123,8 +129,12 @@ import com.google.android.exoplayer2.video.AvcConfig;
output.sampleData(data, bytesToWrite); output.sampleData(data, bytesToWrite);
bytesWritten += bytesToWrite; bytesWritten += bytesToWrite;
} }
output.sampleMetadata(timeUs, frameType == VIDEO_FRAME_KEYFRAME ? C.BUFFER_FLAG_KEY_FRAME : 0, output.sampleMetadata(
bytesWritten, 0, null); timeUs, isKeyframe ? C.BUFFER_FLAG_KEY_FRAME : 0, bytesWritten, 0, null);
hasOutputKeyframe = true;
return true;
} else {
return false;
} }
} }
......
...@@ -518,9 +518,15 @@ public final class MediaCodecInfo { ...@@ -518,9 +518,15 @@ public final class MediaCodecInfo {
@TargetApi(21) @TargetApi(21)
private static boolean areSizeAndRateSupportedV21(VideoCapabilities capabilities, int width, private static boolean areSizeAndRateSupportedV21(VideoCapabilities capabilities, int width,
int height, double frameRate) { int height, double frameRate) {
return frameRate == Format.NO_VALUE || frameRate <= 0 if (frameRate == Format.NO_VALUE || frameRate <= 0) {
? capabilities.isSizeSupported(width, height) return capabilities.isSizeSupported(width, height);
: capabilities.areSizeAndRateSupported(width, height, frameRate); } else {
// The signaled frame rate may be slightly higher than the actual frame rate, so we take the
// floor to avoid situations where a range check in areSizeAndRateSupported fails due to
// slightly exceeding the limits for a standard format (e.g., 1080p at 30 fps).
double floorFrameRate = Math.floor(frameRate);
return capabilities.areSizeAndRateSupported(width, height, floorFrameRate);
}
} }
@TargetApi(23) @TargetApi(23)
......
...@@ -328,14 +328,16 @@ public abstract class MediaCodecRenderer extends BaseRenderer { ...@@ -328,14 +328,16 @@ public abstract class MediaCodecRenderer extends BaseRenderer {
private int inputIndex; private int inputIndex;
private int outputIndex; private int outputIndex;
private ByteBuffer outputBuffer; private ByteBuffer outputBuffer;
private boolean shouldSkipOutputBuffer; private boolean isDecodeOnlyOutputBuffer;
private boolean isLastOutputBuffer;
private boolean codecReconfigured; private boolean codecReconfigured;
@ReconfigurationState private int codecReconfigurationState; @ReconfigurationState private int codecReconfigurationState;
@DrainState private int codecDrainState; @DrainState private int codecDrainState;
@DrainAction private int codecDrainAction; @DrainAction private int codecDrainAction;
private boolean codecReceivedBuffers; private boolean codecReceivedBuffers;
private boolean codecReceivedEos; private boolean codecReceivedEos;
private long lastBufferInStreamPresentationTimeUs;
private long largestQueuedPresentationTimeUs;
private boolean inputStreamEnded; private boolean inputStreamEnded;
private boolean outputStreamEnded; private boolean outputStreamEnded;
private boolean waitingForKeys; private boolean waitingForKeys;
...@@ -598,6 +600,8 @@ public abstract class MediaCodecRenderer extends BaseRenderer { ...@@ -598,6 +600,8 @@ public abstract class MediaCodecRenderer extends BaseRenderer {
waitingForKeys = false; waitingForKeys = false;
codecHotswapDeadlineMs = C.TIME_UNSET; codecHotswapDeadlineMs = C.TIME_UNSET;
decodeOnlyPresentationTimestamps.clear(); decodeOnlyPresentationTimestamps.clear();
largestQueuedPresentationTimeUs = C.TIME_UNSET;
lastBufferInStreamPresentationTimeUs = C.TIME_UNSET;
try { try {
if (codec != null) { if (codec != null) {
decoderCounters.decoderReleaseCount++; decoderCounters.decoderReleaseCount++;
...@@ -704,10 +708,13 @@ public abstract class MediaCodecRenderer extends BaseRenderer { ...@@ -704,10 +708,13 @@ public abstract class MediaCodecRenderer extends BaseRenderer {
waitingForFirstSyncSample = true; waitingForFirstSyncSample = true;
codecNeedsAdaptationWorkaroundBuffer = false; codecNeedsAdaptationWorkaroundBuffer = false;
shouldSkipAdaptationWorkaroundOutputBuffer = false; shouldSkipAdaptationWorkaroundOutputBuffer = false;
shouldSkipOutputBuffer = false; isDecodeOnlyOutputBuffer = false;
isLastOutputBuffer = false;
waitingForKeys = false; waitingForKeys = false;
decodeOnlyPresentationTimestamps.clear(); decodeOnlyPresentationTimestamps.clear();
largestQueuedPresentationTimeUs = C.TIME_UNSET;
lastBufferInStreamPresentationTimeUs = C.TIME_UNSET;
codecDrainState = DRAIN_STATE_NONE; codecDrainState = DRAIN_STATE_NONE;
codecDrainAction = DRAIN_ACTION_NONE; codecDrainAction = DRAIN_ACTION_NONE;
// Reconfiguration data sent shortly before the flush may not have been processed by the // Reconfiguration data sent shortly before the flush may not have been processed by the
...@@ -881,7 +888,8 @@ public abstract class MediaCodecRenderer extends BaseRenderer { ...@@ -881,7 +888,8 @@ public abstract class MediaCodecRenderer extends BaseRenderer {
codecDrainAction = DRAIN_ACTION_NONE; codecDrainAction = DRAIN_ACTION_NONE;
codecNeedsAdaptationWorkaroundBuffer = false; codecNeedsAdaptationWorkaroundBuffer = false;
shouldSkipAdaptationWorkaroundOutputBuffer = false; shouldSkipAdaptationWorkaroundOutputBuffer = false;
shouldSkipOutputBuffer = false; isDecodeOnlyOutputBuffer = false;
isLastOutputBuffer = false;
waitingForFirstSyncSample = true; waitingForFirstSyncSample = true;
decoderCounters.decoderInitCount++; decoderCounters.decoderInitCount++;
...@@ -1016,6 +1024,11 @@ public abstract class MediaCodecRenderer extends BaseRenderer { ...@@ -1016,6 +1024,11 @@ public abstract class MediaCodecRenderer extends BaseRenderer {
result = readSource(formatHolder, buffer, false); result = readSource(formatHolder, buffer, false);
} }
if (hasReadStreamToEnd()) {
// Notify output queue of the last buffer's timestamp.
lastBufferInStreamPresentationTimeUs = largestQueuedPresentationTimeUs;
}
if (result == C.RESULT_NOTHING_READ) { if (result == C.RESULT_NOTHING_READ) {
return false; return false;
} }
...@@ -1088,6 +1101,8 @@ public abstract class MediaCodecRenderer extends BaseRenderer { ...@@ -1088,6 +1101,8 @@ public abstract class MediaCodecRenderer extends BaseRenderer {
formatQueue.add(presentationTimeUs, inputFormat); formatQueue.add(presentationTimeUs, inputFormat);
waitingForFirstSampleInFormat = false; waitingForFirstSampleInFormat = false;
} }
largestQueuedPresentationTimeUs =
Math.max(largestQueuedPresentationTimeUs, presentationTimeUs);
buffer.flip(); buffer.flip();
onQueueInputBuffer(buffer); onQueueInputBuffer(buffer);
...@@ -1458,7 +1473,9 @@ public abstract class MediaCodecRenderer extends BaseRenderer { ...@@ -1458,7 +1473,9 @@ public abstract class MediaCodecRenderer extends BaseRenderer {
outputBuffer.position(outputBufferInfo.offset); outputBuffer.position(outputBufferInfo.offset);
outputBuffer.limit(outputBufferInfo.offset + outputBufferInfo.size); outputBuffer.limit(outputBufferInfo.offset + outputBufferInfo.size);
} }
shouldSkipOutputBuffer = shouldSkipOutputBuffer(outputBufferInfo.presentationTimeUs); isDecodeOnlyOutputBuffer = isDecodeOnlyBuffer(outputBufferInfo.presentationTimeUs);
isLastOutputBuffer =
lastBufferInStreamPresentationTimeUs == outputBufferInfo.presentationTimeUs;
updateOutputFormatForTime(outputBufferInfo.presentationTimeUs); updateOutputFormatForTime(outputBufferInfo.presentationTimeUs);
} }
...@@ -1474,7 +1491,8 @@ public abstract class MediaCodecRenderer extends BaseRenderer { ...@@ -1474,7 +1491,8 @@ public abstract class MediaCodecRenderer extends BaseRenderer {
outputIndex, outputIndex,
outputBufferInfo.flags, outputBufferInfo.flags,
outputBufferInfo.presentationTimeUs, outputBufferInfo.presentationTimeUs,
shouldSkipOutputBuffer, isDecodeOnlyOutputBuffer,
isLastOutputBuffer,
outputFormat); outputFormat);
} catch (IllegalStateException e) { } catch (IllegalStateException e) {
processEndOfStream(); processEndOfStream();
...@@ -1494,7 +1512,8 @@ public abstract class MediaCodecRenderer extends BaseRenderer { ...@@ -1494,7 +1512,8 @@ public abstract class MediaCodecRenderer extends BaseRenderer {
outputIndex, outputIndex,
outputBufferInfo.flags, outputBufferInfo.flags,
outputBufferInfo.presentationTimeUs, outputBufferInfo.presentationTimeUs,
shouldSkipOutputBuffer, isDecodeOnlyOutputBuffer,
isLastOutputBuffer,
outputFormat); outputFormat);
} }
...@@ -1561,7 +1580,9 @@ public abstract class MediaCodecRenderer extends BaseRenderer { ...@@ -1561,7 +1580,9 @@ public abstract class MediaCodecRenderer extends BaseRenderer {
* @param bufferIndex The index of the output buffer. * @param bufferIndex The index of the output buffer.
* @param bufferFlags The flags attached to the output buffer. * @param bufferFlags The flags attached to the output buffer.
* @param bufferPresentationTimeUs The presentation time of the output buffer in microseconds. * @param bufferPresentationTimeUs The presentation time of the output buffer in microseconds.
* @param shouldSkip Whether the buffer should be skipped (i.e. not rendered). * @param isDecodeOnlyBuffer Whether the buffer was marked with {@link C#BUFFER_FLAG_DECODE_ONLY}
* by the source.
* @param isLastBuffer Whether the buffer is the last sample of the current stream.
* @param format The format associated with the buffer. * @param format The format associated with the buffer.
* @return Whether the output buffer was fully processed (e.g. rendered or skipped). * @return Whether the output buffer was fully processed (e.g. rendered or skipped).
* @throws ExoPlaybackException If an error occurs processing the output buffer. * @throws ExoPlaybackException If an error occurs processing the output buffer.
...@@ -1574,7 +1595,8 @@ public abstract class MediaCodecRenderer extends BaseRenderer { ...@@ -1574,7 +1595,8 @@ public abstract class MediaCodecRenderer extends BaseRenderer {
int bufferIndex, int bufferIndex,
int bufferFlags, int bufferFlags,
long bufferPresentationTimeUs, long bufferPresentationTimeUs,
boolean shouldSkip, boolean isDecodeOnlyBuffer,
boolean isLastBuffer,
Format format) Format format)
throws ExoPlaybackException; throws ExoPlaybackException;
...@@ -1654,7 +1676,7 @@ public abstract class MediaCodecRenderer extends BaseRenderer { ...@@ -1654,7 +1676,7 @@ public abstract class MediaCodecRenderer extends BaseRenderer {
codecDrainAction = DRAIN_ACTION_NONE; codecDrainAction = DRAIN_ACTION_NONE;
} }
private boolean shouldSkipOutputBuffer(long presentationTimeUs) { private boolean isDecodeOnlyBuffer(long presentationTimeUs) {
// We avoid using decodeOnlyPresentationTimestamps.remove(presentationTimeUs) because it would // We avoid using decodeOnlyPresentationTimestamps.remove(presentationTimeUs) because it would
// box presentationTimeUs, creating a Long object that would need to be garbage collected. // box presentationTimeUs, creating a Long object that would need to be garbage collected.
int size = decodeOnlyPresentationTimestamps.size(); int size = decodeOnlyPresentationTimestamps.size();
......
...@@ -817,10 +817,10 @@ public final class DownloadHelper { ...@@ -817,10 +817,10 @@ public final class DownloadHelper {
private final MediaSource mediaSource; private final MediaSource mediaSource;
private final DownloadHelper downloadHelper; private final DownloadHelper downloadHelper;
private final Allocator allocator; private final Allocator allocator;
private final ArrayList<MediaPeriod> pendingMediaPeriods;
private final Handler downloadHelperHandler;
private final HandlerThread mediaSourceThread; private final HandlerThread mediaSourceThread;
private final Handler mediaSourceHandler; private final Handler mediaSourceHandler;
private final Handler downloadHelperHandler;
private final ArrayList<MediaPeriod> pendingMediaPeriods;
@Nullable public Object manifest; @Nullable public Object manifest;
public @MonotonicNonNull Timeline timeline; public @MonotonicNonNull Timeline timeline;
...@@ -832,6 +832,7 @@ public final class DownloadHelper { ...@@ -832,6 +832,7 @@ public final class DownloadHelper {
this.mediaSource = mediaSource; this.mediaSource = mediaSource;
this.downloadHelper = downloadHelper; this.downloadHelper = downloadHelper;
allocator = new DefaultAllocator(true, C.DEFAULT_BUFFER_SEGMENT_SIZE); allocator = new DefaultAllocator(true, C.DEFAULT_BUFFER_SEGMENT_SIZE);
pendingMediaPeriods = new ArrayList<>();
@SuppressWarnings("methodref.receiver.bound.invalid") @SuppressWarnings("methodref.receiver.bound.invalid")
Handler downloadThreadHandler = Util.createHandler(this::handleDownloadHelperCallbackMessage); Handler downloadThreadHandler = Util.createHandler(this::handleDownloadHelperCallbackMessage);
this.downloadHelperHandler = downloadThreadHandler; this.downloadHelperHandler = downloadThreadHandler;
...@@ -839,7 +840,6 @@ public final class DownloadHelper { ...@@ -839,7 +840,6 @@ public final class DownloadHelper {
mediaSourceThread.start(); mediaSourceThread.start();
mediaSourceHandler = Util.createHandler(mediaSourceThread.getLooper(), /* callback= */ this); mediaSourceHandler = Util.createHandler(mediaSourceThread.getLooper(), /* callback= */ this);
mediaSourceHandler.sendEmptyMessage(MESSAGE_PREPARE_SOURCE); mediaSourceHandler.sendEmptyMessage(MESSAGE_PREPARE_SOURCE);
pendingMediaPeriods = new ArrayList<>();
} }
public void release() { public void release() {
......
...@@ -25,6 +25,7 @@ import android.content.Context; ...@@ -25,6 +25,7 @@ import android.content.Context;
import android.content.Intent; import android.content.Intent;
import android.os.PersistableBundle; import android.os.PersistableBundle;
import androidx.annotation.RequiresPermission; import androidx.annotation.RequiresPermission;
import com.google.android.exoplayer2.util.Assertions;
import com.google.android.exoplayer2.util.Log; import com.google.android.exoplayer2.util.Log;
import com.google.android.exoplayer2.util.Util; import com.google.android.exoplayer2.util.Util;
...@@ -129,9 +130,8 @@ public final class PlatformScheduler implements Scheduler { ...@@ -129,9 +130,8 @@ public final class PlatformScheduler implements Scheduler {
logd("Requirements are met"); logd("Requirements are met");
String serviceAction = extras.getString(KEY_SERVICE_ACTION); String serviceAction = extras.getString(KEY_SERVICE_ACTION);
String servicePackage = extras.getString(KEY_SERVICE_PACKAGE); String servicePackage = extras.getString(KEY_SERVICE_PACKAGE);
// FIXME: incompatible types in argument. Intent intent =
@SuppressWarnings("nullness:argument.type.incompatible") new Intent(Assertions.checkNotNull(serviceAction)).setPackage(servicePackage);
Intent intent = new Intent(serviceAction).setPackage(servicePackage);
logd("Starting service action: " + serviceAction + " package: " + servicePackage); logd("Starting service action: " + serviceAction + " package: " + servicePackage);
Util.startForegroundService(this, intent); Util.startForegroundService(this, intent);
} else { } else {
......
...@@ -733,7 +733,7 @@ import org.checkerframework.checker.nullness.compatqual.NullableType; ...@@ -733,7 +733,7 @@ import org.checkerframework.checker.nullness.compatqual.NullableType;
if (prepared) { if (prepared) {
SeekMap seekMap = getPreparedState().seekMap; SeekMap seekMap = getPreparedState().seekMap;
Assertions.checkState(isPendingReset()); Assertions.checkState(isPendingReset());
if (durationUs != C.TIME_UNSET && pendingResetPositionUs >= durationUs) { if (durationUs != C.TIME_UNSET && pendingResetPositionUs > durationUs) {
loadingFinished = true; loadingFinished = true;
pendingResetPositionUs = C.TIME_UNSET; pendingResetPositionUs = C.TIME_UNSET;
return; return;
......
...@@ -242,7 +242,7 @@ public final class Cea608Decoder extends CeaDecoder { ...@@ -242,7 +242,7 @@ public final class Cea608Decoder extends CeaDecoder {
private int captionMode; private int captionMode;
private int captionRowCount; private int captionRowCount;
private boolean captionValid; private boolean isCaptionValid;
private boolean repeatableControlSet; private boolean repeatableControlSet;
private byte repeatableControlCc1; private byte repeatableControlCc1;
private byte repeatableControlCc2; private byte repeatableControlCc2;
...@@ -300,7 +300,7 @@ public final class Cea608Decoder extends CeaDecoder { ...@@ -300,7 +300,7 @@ public final class Cea608Decoder extends CeaDecoder {
setCaptionMode(CC_MODE_UNKNOWN); setCaptionMode(CC_MODE_UNKNOWN);
setCaptionRowCount(DEFAULT_CAPTIONS_ROW_COUNT); setCaptionRowCount(DEFAULT_CAPTIONS_ROW_COUNT);
resetCueBuilders(); resetCueBuilders();
captionValid = false; isCaptionValid = false;
repeatableControlSet = false; repeatableControlSet = false;
repeatableControlCc1 = 0; repeatableControlCc1 = 0;
repeatableControlCc2 = 0; repeatableControlCc2 = 0;
...@@ -358,13 +358,19 @@ public final class Cea608Decoder extends CeaDecoder { ...@@ -358,13 +358,19 @@ public final class Cea608Decoder extends CeaDecoder {
continue; continue;
} }
boolean repeatedControlPossible = repeatableControlSet; boolean previousIsCaptionValid = isCaptionValid;
repeatableControlSet = false; isCaptionValid =
(ccHeader & CC_VALID_FLAG) == CC_VALID_FLAG
&& ODD_PARITY_BYTE_TABLE[ccByte1]
&& ODD_PARITY_BYTE_TABLE[ccByte2];
if (isRepeatedCommand(isCaptionValid, ccData1, ccData2)) {
// Ignore repeated valid commands.
continue;
}
boolean previousCaptionValid = captionValid; if (!isCaptionValid) {
captionValid = (ccHeader & CC_VALID_FLAG) == CC_VALID_FLAG; if (previousIsCaptionValid) {
if (!captionValid) {
if (previousCaptionValid) {
// The encoder has flipped the validity bit to indicate captions are being turned off. // The encoder has flipped the validity bit to indicate captions are being turned off.
resetCueBuilders(); resetCueBuilders();
captionDataProcessed = true; captionDataProcessed = true;
...@@ -372,65 +378,41 @@ public final class Cea608Decoder extends CeaDecoder { ...@@ -372,65 +378,41 @@ public final class Cea608Decoder extends CeaDecoder {
continue; continue;
} }
// If we've reached this point then there is data to process; flag that work has been done.
captionDataProcessed = true;
if (!ODD_PARITY_BYTE_TABLE[ccByte1] || !ODD_PARITY_BYTE_TABLE[ccByte2]) {
// The data is invalid.
resetCueBuilders();
continue;
}
maybeUpdateIsInCaptionService(ccData1, ccData2); maybeUpdateIsInCaptionService(ccData1, ccData2);
if (!isInCaptionService) { if (!isInCaptionService) {
// Only the Captioning service is supported. Drop all other bytes. // Only the Captioning service is supported. Drop all other bytes.
continue; continue;
} }
// Special North American character set. if (!updateAndVerifyCurrentChannel(ccData1)) {
// ccData1 - 0|0|0|1|C|0|0|1 // Wrong channel.
// ccData2 - 0|0|1|1|X|X|X|X
if (((ccData1 & 0xF7) == 0x11) && ((ccData2 & 0xF0) == 0x30)) {
if (getChannel(ccData1) == selectedChannel) {
currentCueBuilder.append(getSpecialChar(ccData2));
}
continue; continue;
} }
// Extended Western European character set. if (isCtrlCode(ccData1)) {
// ccData1 - 0|0|0|1|C|0|1|S if (isSpecialNorthAmericanChar(ccData1, ccData2)) {
// ccData2 - 0|0|1|X|X|X|X|X currentCueBuilder.append(getSpecialNorthAmericanChar(ccData2));
if (((ccData1 & 0xF6) == 0x12) && (ccData2 & 0xE0) == 0x20) { } else if (isExtendedWestEuropeanChar(ccData1, ccData2)) {
if (getChannel(ccData1) == selectedChannel) { // Remove standard equivalent of the special extended char before appending new one.
// Remove standard equivalent of the special extended char before appending new one
currentCueBuilder.backspace(); currentCueBuilder.backspace();
if ((ccData1 & 0x01) == 0x00) { currentCueBuilder.append(getExtendedWestEuropeanChar(ccData1, ccData2));
// Extended Spanish/Miscellaneous and French character set (S = 0). } else if (isMidrowCtrlCode(ccData1, ccData2)) {
currentCueBuilder.append(getExtendedEsFrChar(ccData2)); handleMidrowCtrl(ccData2);
} else { } else if (isPreambleAddressCode(ccData1, ccData2)) {
// Extended Portuguese and German/Danish character set (S = 1). handlePreambleAddressCode(ccData1, ccData2);
currentCueBuilder.append(getExtendedPtDeChar(ccData2)); } else if (isTabCtrlCode(ccData1, ccData2)) {
} currentCueBuilder.tabOffset = ccData2 - 0x20;
} else if (isMiscCode(ccData1, ccData2)) {
handleMiscCode(ccData2);
}
} else {
// Basic North American character set.
currentCueBuilder.append(getBasicChar(ccData1));
if ((ccData2 & 0xE0) != 0x00) {
currentCueBuilder.append(getBasicChar(ccData2));
} }
continue;
}
// Control character.
// ccData1 - 0|0|0|X|X|X|X|X
if ((ccData1 & 0xE0) == 0x00) {
handleCtrl(ccData1, ccData2, repeatedControlPossible);
continue;
}
if (currentChannel != selectedChannel) {
continue;
}
// Basic North American character set.
currentCueBuilder.append(getChar(ccData1));
if ((ccData2 & 0xE0) != 0x00) {
currentCueBuilder.append(getChar(ccData2));
} }
captionDataProcessed = true;
} }
if (captionDataProcessed) { if (captionDataProcessed) {
...@@ -440,15 +422,22 @@ public final class Cea608Decoder extends CeaDecoder { ...@@ -440,15 +422,22 @@ public final class Cea608Decoder extends CeaDecoder {
} }
} }
private void handleCtrl(byte cc1, byte cc2, boolean repeatedControlPossible) { private boolean updateAndVerifyCurrentChannel(byte cc1) {
currentChannel = getChannel(cc1); if (isCtrlCode(cc1)) {
currentChannel = getChannel(cc1);
}
return currentChannel == selectedChannel;
}
private boolean isRepeatedCommand(boolean captionValid, byte cc1, byte cc2) {
// Most control commands are sent twice in succession to ensure they are received properly. We // Most control commands are sent twice in succession to ensure they are received properly. We
// don't want to process duplicate commands, so if we see the same repeatable command twice in a // don't want to process duplicate commands, so if we see the same repeatable command twice in a
// row then we ignore the second one. // row then we ignore the second one.
if (isRepeatable(cc1)) { if (captionValid && isRepeatable(cc1)) {
if (repeatedControlPossible && repeatableControlCc1 == cc1 && repeatableControlCc2 == cc2) { if (repeatableControlSet && repeatableControlCc1 == cc1 && repeatableControlCc2 == cc2) {
// This is a repeated command, so we ignore it. // This is a repeated command, so we ignore it.
return; repeatableControlSet = false;
return true;
} else { } else {
// This is the first occurrence of a repeatable command. Set the repeatable control // This is the first occurrence of a repeatable command. Set the repeatable control
// variables so that we can recognize and ignore a duplicate (if there is one), and then // variables so that we can recognize and ignore a duplicate (if there is one), and then
...@@ -457,21 +446,11 @@ public final class Cea608Decoder extends CeaDecoder { ...@@ -457,21 +446,11 @@ public final class Cea608Decoder extends CeaDecoder {
repeatableControlCc1 = cc1; repeatableControlCc1 = cc1;
repeatableControlCc2 = cc2; repeatableControlCc2 = cc2;
} }
} else {
// This command is not repeatable.
repeatableControlSet = false;
} }
return false;
if (currentChannel != selectedChannel) {
return;
}
if (isMidrowCtrlCode(cc1, cc2)) {
handleMidrowCtrl(cc2);
} else if (isPreambleAddressCode(cc1, cc2)) {
handlePreambleAddressCode(cc1, cc2);
} else if (isTabCtrlCode(cc1, cc2)) {
currentCueBuilder.tabOffset = cc2 - 0x20;
} else if (isMiscCode(cc1, cc2)) {
handleMiscCode(cc2);
}
} }
private void handleMidrowCtrl(byte cc2) { private void handleMidrowCtrl(byte cc2) {
...@@ -676,16 +655,38 @@ public final class Cea608Decoder extends CeaDecoder { ...@@ -676,16 +655,38 @@ public final class Cea608Decoder extends CeaDecoder {
} }
} }
private static char getChar(byte ccData) { private static char getBasicChar(byte ccData) {
int index = (ccData & 0x7F) - 0x20; int index = (ccData & 0x7F) - 0x20;
return (char) BASIC_CHARACTER_SET[index]; return (char) BASIC_CHARACTER_SET[index];
} }
private static char getSpecialChar(byte ccData) { private static boolean isSpecialNorthAmericanChar(byte cc1, byte cc2) {
// cc1 - 0|0|0|1|C|0|0|1
// cc2 - 0|0|1|1|X|X|X|X
return ((cc1 & 0xF7) == 0x11) && ((cc2 & 0xF0) == 0x30);
}
private static char getSpecialNorthAmericanChar(byte ccData) {
int index = ccData & 0x0F; int index = ccData & 0x0F;
return (char) SPECIAL_CHARACTER_SET[index]; return (char) SPECIAL_CHARACTER_SET[index];
} }
private static boolean isExtendedWestEuropeanChar(byte cc1, byte cc2) {
// cc1 - 0|0|0|1|C|0|1|S
// cc2 - 0|0|1|X|X|X|X|X
return ((cc1 & 0xF6) == 0x12) && ((cc2 & 0xE0) == 0x20);
}
private static char getExtendedWestEuropeanChar(byte cc1, byte cc2) {
if ((cc1 & 0x01) == 0x00) {
// Extended Spanish/Miscellaneous and French character set (S = 0).
return getExtendedEsFrChar(cc2);
} else {
// Extended Portuguese and German/Danish character set (S = 1).
return getExtendedPtDeChar(cc2);
}
}
private static char getExtendedEsFrChar(byte ccData) { private static char getExtendedEsFrChar(byte ccData) {
int index = ccData & 0x1F; int index = ccData & 0x1F;
return (char) SPECIAL_ES_FR_CHARACTER_SET[index]; return (char) SPECIAL_ES_FR_CHARACTER_SET[index];
...@@ -696,6 +697,11 @@ public final class Cea608Decoder extends CeaDecoder { ...@@ -696,6 +697,11 @@ public final class Cea608Decoder extends CeaDecoder {
return (char) SPECIAL_PT_DE_CHARACTER_SET[index]; return (char) SPECIAL_PT_DE_CHARACTER_SET[index];
} }
private static boolean isCtrlCode(byte cc1) {
// cc1 - 0|0|0|X|X|X|X|X
return (cc1 & 0xE0) == 0x00;
}
private static int getChannel(byte cc1) { private static int getChannel(byte cc1) {
// cc1 - X|X|X|X|C|X|X|X // cc1 - X|X|X|X|C|X|X|X
return (cc1 >> 3) & 0x1; return (cc1 >> 3) & 0x1;
......
...@@ -49,7 +49,6 @@ public final class CacheDataSink implements DataSink { ...@@ -49,7 +49,6 @@ public final class CacheDataSink implements DataSink {
private final long fragmentSize; private final long fragmentSize;
private final int bufferSize; private final int bufferSize;
private boolean syncFileDescriptor;
private DataSpec dataSpec; private DataSpec dataSpec;
private long dataSpecFragmentSize; private long dataSpecFragmentSize;
private File file; private File file;
...@@ -108,18 +107,6 @@ public final class CacheDataSink implements DataSink { ...@@ -108,18 +107,6 @@ public final class CacheDataSink implements DataSink {
this.cache = Assertions.checkNotNull(cache); this.cache = Assertions.checkNotNull(cache);
this.fragmentSize = fragmentSize == C.LENGTH_UNSET ? Long.MAX_VALUE : fragmentSize; this.fragmentSize = fragmentSize == C.LENGTH_UNSET ? Long.MAX_VALUE : fragmentSize;
this.bufferSize = bufferSize; this.bufferSize = bufferSize;
syncFileDescriptor = true;
}
/**
* Sets whether file descriptors are synced when closing output streams.
*
* <p>This method is experimental, and will be renamed or removed in a future release.
*
* @param syncFileDescriptor Whether file descriptors are synced when closing output streams.
*/
public void experimental_setSyncFileDescriptor(boolean syncFileDescriptor) {
this.syncFileDescriptor = syncFileDescriptor;
} }
@Override @Override
...@@ -208,9 +195,6 @@ public final class CacheDataSink implements DataSink { ...@@ -208,9 +195,6 @@ public final class CacheDataSink implements DataSink {
boolean success = false; boolean success = false;
try { try {
outputStream.flush(); outputStream.flush();
if (syncFileDescriptor) {
underlyingFileOutputStream.getFD().sync();
}
success = true; success = true;
} finally { } finally {
Util.closeQuietly(outputStream); Util.closeQuietly(outputStream);
......
...@@ -26,8 +26,6 @@ public final class CacheDataSinkFactory implements DataSink.Factory { ...@@ -26,8 +26,6 @@ public final class CacheDataSinkFactory implements DataSink.Factory {
private final long fragmentSize; private final long fragmentSize;
private final int bufferSize; private final int bufferSize;
private boolean syncFileDescriptor;
/** @see CacheDataSink#CacheDataSink(Cache, long) */ /** @see CacheDataSink#CacheDataSink(Cache, long) */
public CacheDataSinkFactory(Cache cache, long fragmentSize) { public CacheDataSinkFactory(Cache cache, long fragmentSize) {
this(cache, fragmentSize, CacheDataSink.DEFAULT_BUFFER_SIZE); this(cache, fragmentSize, CacheDataSink.DEFAULT_BUFFER_SIZE);
...@@ -40,20 +38,8 @@ public final class CacheDataSinkFactory implements DataSink.Factory { ...@@ -40,20 +38,8 @@ public final class CacheDataSinkFactory implements DataSink.Factory {
this.bufferSize = bufferSize; this.bufferSize = bufferSize;
} }
/**
* See {@link CacheDataSink#experimental_setSyncFileDescriptor(boolean)}.
*
* <p>This method is experimental, and will be renamed or removed in a future release.
*/
public CacheDataSinkFactory experimental_setSyncFileDescriptor(boolean syncFileDescriptor) {
this.syncFileDescriptor = syncFileDescriptor;
return this;
}
@Override @Override
public DataSink createDataSink() { public DataSink createDataSink() {
CacheDataSink dataSink = new CacheDataSink(cache, fragmentSize, bufferSize); return new CacheDataSink(cache, fragmentSize, bufferSize);
dataSink.experimental_setSyncFileDescriptor(syncFileDescriptor);
return dataSink;
} }
} }
...@@ -679,7 +679,8 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer { ...@@ -679,7 +679,8 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
int bufferIndex, int bufferIndex,
int bufferFlags, int bufferFlags,
long bufferPresentationTimeUs, long bufferPresentationTimeUs,
boolean shouldSkip, boolean isDecodeOnlyBuffer,
boolean isLastBuffer,
Format format) Format format)
throws ExoPlaybackException { throws ExoPlaybackException {
if (initialPositionUs == C.TIME_UNSET) { if (initialPositionUs == C.TIME_UNSET) {
...@@ -688,7 +689,7 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer { ...@@ -688,7 +689,7 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
long presentationTimeUs = bufferPresentationTimeUs - outputStreamOffsetUs; long presentationTimeUs = bufferPresentationTimeUs - outputStreamOffsetUs;
if (shouldSkip) { if (isDecodeOnlyBuffer && !isLastBuffer) {
skipOutputBuffer(codec, bufferIndex, presentationTimeUs); skipOutputBuffer(codec, bufferIndex, presentationTimeUs);
return true; return true;
} }
...@@ -736,10 +737,10 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer { ...@@ -736,10 +737,10 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
bufferPresentationTimeUs, unadjustedFrameReleaseTimeNs); bufferPresentationTimeUs, unadjustedFrameReleaseTimeNs);
earlyUs = (adjustedReleaseTimeNs - systemTimeNs) / 1000; earlyUs = (adjustedReleaseTimeNs - systemTimeNs) / 1000;
if (shouldDropBuffersToKeyframe(earlyUs, elapsedRealtimeUs) if (shouldDropBuffersToKeyframe(earlyUs, elapsedRealtimeUs, isLastBuffer)
&& maybeDropBuffersToKeyframe(codec, bufferIndex, presentationTimeUs, positionUs)) { && maybeDropBuffersToKeyframe(codec, bufferIndex, presentationTimeUs, positionUs)) {
return false; return false;
} else if (shouldDropOutputBuffer(earlyUs, elapsedRealtimeUs)) { } else if (shouldDropOutputBuffer(earlyUs, elapsedRealtimeUs, isLastBuffer)) {
dropOutputBuffer(codec, bufferIndex, presentationTimeUs); dropOutputBuffer(codec, bufferIndex, presentationTimeUs);
return true; return true;
} }
...@@ -807,8 +808,8 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer { ...@@ -807,8 +808,8 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
/** /**
* Returns the offset that should be subtracted from {@code bufferPresentationTimeUs} in {@link * Returns the offset that should be subtracted from {@code bufferPresentationTimeUs} in {@link
* #processOutputBuffer(long, long, MediaCodec, ByteBuffer, int, int, long, boolean, Format)} to * #processOutputBuffer(long, long, MediaCodec, ByteBuffer, int, int, long, boolean, boolean,
* get the playback position with respect to the media. * Format)} to get the playback position with respect to the media.
*/ */
protected long getOutputStreamOffsetUs() { protected long getOutputStreamOffsetUs() {
return outputStreamOffsetUs; return outputStreamOffsetUs;
...@@ -860,9 +861,11 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer { ...@@ -860,9 +861,11 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
* indicates that the buffer is late. * indicates that the buffer is late.
* @param elapsedRealtimeUs {@link android.os.SystemClock#elapsedRealtime()} in microseconds, * @param elapsedRealtimeUs {@link android.os.SystemClock#elapsedRealtime()} in microseconds,
* measured at the start of the current iteration of the rendering loop. * measured at the start of the current iteration of the rendering loop.
* @param isLastBuffer Whether the buffer is the last buffer in the current stream.
*/ */
protected boolean shouldDropOutputBuffer(long earlyUs, long elapsedRealtimeUs) { protected boolean shouldDropOutputBuffer(
return isBufferLate(earlyUs); long earlyUs, long elapsedRealtimeUs, boolean isLastBuffer) {
return isBufferLate(earlyUs) && !isLastBuffer;
} }
/** /**
...@@ -873,9 +876,11 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer { ...@@ -873,9 +876,11 @@ public class MediaCodecVideoRenderer extends MediaCodecRenderer {
* negative value indicates that the buffer is late. * negative value indicates that the buffer is late.
* @param elapsedRealtimeUs {@link android.os.SystemClock#elapsedRealtime()} in microseconds, * @param elapsedRealtimeUs {@link android.os.SystemClock#elapsedRealtime()} in microseconds,
* measured at the start of the current iteration of the rendering loop. * measured at the start of the current iteration of the rendering loop.
* @param isLastBuffer Whether the buffer is the last buffer in the current stream.
*/ */
protected boolean shouldDropBuffersToKeyframe(long earlyUs, long elapsedRealtimeUs) { protected boolean shouldDropBuffersToKeyframe(
return isBufferVeryLate(earlyUs); long earlyUs, long elapsedRealtimeUs, boolean isLastBuffer) {
return isBufferVeryLate(earlyUs) && !isLastBuffer;
} }
/** /**
......
...@@ -42,6 +42,7 @@ import java.io.ByteArrayOutputStream; ...@@ -42,6 +42,7 @@ import java.io.ByteArrayOutputStream;
import java.io.IOException; import java.io.IOException;
import java.io.InputStream; import java.io.InputStream;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Collections;
import java.util.List; import java.util.List;
import java.util.UUID; import java.util.UUID;
import java.util.regex.Matcher; import java.util.regex.Matcher;
...@@ -242,7 +243,7 @@ public class DashManifestParser extends DefaultHandler ...@@ -242,7 +243,7 @@ public class DashManifestParser extends DefaultHandler
} else if (XmlPullParserUtil.isStartTag(xpp, "SegmentList")) { } else if (XmlPullParserUtil.isStartTag(xpp, "SegmentList")) {
segmentBase = parseSegmentList(xpp, null); segmentBase = parseSegmentList(xpp, null);
} else if (XmlPullParserUtil.isStartTag(xpp, "SegmentTemplate")) { } else if (XmlPullParserUtil.isStartTag(xpp, "SegmentTemplate")) {
segmentBase = parseSegmentTemplate(xpp, null); segmentBase = parseSegmentTemplate(xpp, null, Collections.emptyList());
} else { } else {
maybeSkipTag(xpp); maybeSkipTag(xpp);
} }
...@@ -323,6 +324,7 @@ public class DashManifestParser extends DefaultHandler ...@@ -323,6 +324,7 @@ public class DashManifestParser extends DefaultHandler
language, language,
roleDescriptors, roleDescriptors,
accessibilityDescriptors, accessibilityDescriptors,
supplementalProperties,
segmentBase); segmentBase);
contentType = checkContentTypeConsistency(contentType, contentType = checkContentTypeConsistency(contentType,
getContentType(representationInfo.format)); getContentType(representationInfo.format));
...@@ -332,7 +334,8 @@ public class DashManifestParser extends DefaultHandler ...@@ -332,7 +334,8 @@ public class DashManifestParser extends DefaultHandler
} else if (XmlPullParserUtil.isStartTag(xpp, "SegmentList")) { } else if (XmlPullParserUtil.isStartTag(xpp, "SegmentList")) {
segmentBase = parseSegmentList(xpp, (SegmentList) segmentBase); segmentBase = parseSegmentList(xpp, (SegmentList) segmentBase);
} else if (XmlPullParserUtil.isStartTag(xpp, "SegmentTemplate")) { } else if (XmlPullParserUtil.isStartTag(xpp, "SegmentTemplate")) {
segmentBase = parseSegmentTemplate(xpp, (SegmentTemplate) segmentBase); segmentBase =
parseSegmentTemplate(xpp, (SegmentTemplate) segmentBase, supplementalProperties);
} else if (XmlPullParserUtil.isStartTag(xpp, "InbandEventStream")) { } else if (XmlPullParserUtil.isStartTag(xpp, "InbandEventStream")) {
inbandEventStreams.add(parseDescriptor(xpp, "InbandEventStream")); inbandEventStreams.add(parseDescriptor(xpp, "InbandEventStream"));
} else if (XmlPullParserUtil.isStartTag(xpp)) { } else if (XmlPullParserUtil.isStartTag(xpp)) {
...@@ -492,6 +495,7 @@ public class DashManifestParser extends DefaultHandler ...@@ -492,6 +495,7 @@ public class DashManifestParser extends DefaultHandler
String adaptationSetLanguage, String adaptationSetLanguage,
List<Descriptor> adaptationSetRoleDescriptors, List<Descriptor> adaptationSetRoleDescriptors,
List<Descriptor> adaptationSetAccessibilityDescriptors, List<Descriptor> adaptationSetAccessibilityDescriptors,
List<Descriptor> adaptationSetSupplementalProperties,
SegmentBase segmentBase) SegmentBase segmentBase)
throws XmlPullParserException, IOException { throws XmlPullParserException, IOException {
String id = xpp.getAttributeValue(null, "id"); String id = xpp.getAttributeValue(null, "id");
...@@ -524,7 +528,9 @@ public class DashManifestParser extends DefaultHandler ...@@ -524,7 +528,9 @@ public class DashManifestParser extends DefaultHandler
} else if (XmlPullParserUtil.isStartTag(xpp, "SegmentList")) { } else if (XmlPullParserUtil.isStartTag(xpp, "SegmentList")) {
segmentBase = parseSegmentList(xpp, (SegmentList) segmentBase); segmentBase = parseSegmentList(xpp, (SegmentList) segmentBase);
} else if (XmlPullParserUtil.isStartTag(xpp, "SegmentTemplate")) { } else if (XmlPullParserUtil.isStartTag(xpp, "SegmentTemplate")) {
segmentBase = parseSegmentTemplate(xpp, (SegmentTemplate) segmentBase); segmentBase =
parseSegmentTemplate(
xpp, (SegmentTemplate) segmentBase, adaptationSetSupplementalProperties);
} else if (XmlPullParserUtil.isStartTag(xpp, "ContentProtection")) { } else if (XmlPullParserUtil.isStartTag(xpp, "ContentProtection")) {
Pair<String, SchemeData> contentProtection = parseContentProtection(xpp); Pair<String, SchemeData> contentProtection = parseContentProtection(xpp);
if (contentProtection.first != null) { if (contentProtection.first != null) {
...@@ -763,13 +769,19 @@ public class DashManifestParser extends DefaultHandler ...@@ -763,13 +769,19 @@ public class DashManifestParser extends DefaultHandler
startNumber, duration, timeline, segments); startNumber, duration, timeline, segments);
} }
protected SegmentTemplate parseSegmentTemplate(XmlPullParser xpp, SegmentTemplate parent) protected SegmentTemplate parseSegmentTemplate(
XmlPullParser xpp,
SegmentTemplate parent,
List<Descriptor> adaptationSetSupplementalProperties)
throws XmlPullParserException, IOException { throws XmlPullParserException, IOException {
long timescale = parseLong(xpp, "timescale", parent != null ? parent.timescale : 1); long timescale = parseLong(xpp, "timescale", parent != null ? parent.timescale : 1);
long presentationTimeOffset = parseLong(xpp, "presentationTimeOffset", long presentationTimeOffset = parseLong(xpp, "presentationTimeOffset",
parent != null ? parent.presentationTimeOffset : 0); parent != null ? parent.presentationTimeOffset : 0);
long duration = parseLong(xpp, "duration", parent != null ? parent.duration : C.TIME_UNSET); long duration = parseLong(xpp, "duration", parent != null ? parent.duration : C.TIME_UNSET);
long startNumber = parseLong(xpp, "startNumber", parent != null ? parent.startNumber : 1); long startNumber = parseLong(xpp, "startNumber", parent != null ? parent.startNumber : 1);
long endNumber =
parseLastSegmentNumberSupplementalProperty(adaptationSetSupplementalProperties);
UrlTemplate mediaTemplate = parseUrlTemplate(xpp, "media", UrlTemplate mediaTemplate = parseUrlTemplate(xpp, "media",
parent != null ? parent.mediaTemplate : null); parent != null ? parent.mediaTemplate : null);
UrlTemplate initializationTemplate = parseUrlTemplate(xpp, "initialization", UrlTemplate initializationTemplate = parseUrlTemplate(xpp, "initialization",
...@@ -794,8 +806,16 @@ public class DashManifestParser extends DefaultHandler ...@@ -794,8 +806,16 @@ public class DashManifestParser extends DefaultHandler
timeline = timeline != null ? timeline : parent.segmentTimeline; timeline = timeline != null ? timeline : parent.segmentTimeline;
} }
return buildSegmentTemplate(initialization, timescale, presentationTimeOffset, return buildSegmentTemplate(
startNumber, duration, timeline, initializationTemplate, mediaTemplate); initialization,
timescale,
presentationTimeOffset,
startNumber,
endNumber,
duration,
timeline,
initializationTemplate,
mediaTemplate);
} }
protected SegmentTemplate buildSegmentTemplate( protected SegmentTemplate buildSegmentTemplate(
...@@ -803,12 +823,21 @@ public class DashManifestParser extends DefaultHandler ...@@ -803,12 +823,21 @@ public class DashManifestParser extends DefaultHandler
long timescale, long timescale,
long presentationTimeOffset, long presentationTimeOffset,
long startNumber, long startNumber,
long endNumber,
long duration, long duration,
List<SegmentTimelineElement> timeline, List<SegmentTimelineElement> timeline,
UrlTemplate initializationTemplate, UrlTemplate initializationTemplate,
UrlTemplate mediaTemplate) { UrlTemplate mediaTemplate) {
return new SegmentTemplate(initialization, timescale, presentationTimeOffset, return new SegmentTemplate(
startNumber, duration, timeline, initializationTemplate, mediaTemplate); initialization,
timescale,
presentationTimeOffset,
startNumber,
endNumber,
duration,
timeline,
initializationTemplate,
mediaTemplate);
} }
/** /**
...@@ -1445,6 +1474,18 @@ public class DashManifestParser extends DefaultHandler ...@@ -1445,6 +1474,18 @@ public class DashManifestParser extends DefaultHandler
} }
} }
protected static long parseLastSegmentNumberSupplementalProperty(
List<Descriptor> supplementalProperties) {
for (int i = 0; i < supplementalProperties.size(); i++) {
Descriptor descriptor = supplementalProperties.get(i);
if ("http://dashif.org/guidelines/last-segment-number"
.equalsIgnoreCase(descriptor.schemeIdUri)) {
return Long.parseLong(descriptor.value);
}
}
return C.INDEX_UNSET;
}
/** A parsed Representation element. */ /** A parsed Representation element. */
protected static final class RepresentationInfo { protected static final class RepresentationInfo {
......
...@@ -277,6 +277,7 @@ public abstract class SegmentBase { ...@@ -277,6 +277,7 @@ public abstract class SegmentBase {
/* package */ final UrlTemplate initializationTemplate; /* package */ final UrlTemplate initializationTemplate;
/* package */ final UrlTemplate mediaTemplate; /* package */ final UrlTemplate mediaTemplate;
/* package */ final long endNumber;
/** /**
* @param initialization A {@link RangedUri} corresponding to initialization data, if such data * @param initialization A {@link RangedUri} corresponding to initialization data, if such data
...@@ -286,6 +287,9 @@ public abstract class SegmentBase { ...@@ -286,6 +287,9 @@ public abstract class SegmentBase {
* @param presentationTimeOffset The presentation time offset. The value in seconds is the * @param presentationTimeOffset The presentation time offset. The value in seconds is the
* division of this value and {@code timescale}. * division of this value and {@code timescale}.
* @param startNumber The sequence number of the first segment. * @param startNumber The sequence number of the first segment.
* @param endNumber The sequence number of the last segment as specified by the
* SupplementalProperty with schemeIdUri="http://dashif.org/guidelines/last-segment-number",
* or {@link C#INDEX_UNSET}.
* @param duration The duration of each segment in the case of fixed duration segments. The * @param duration The duration of each segment in the case of fixed duration segments. The
* value in seconds is the division of this value and {@code timescale}. If {@code * value in seconds is the division of this value and {@code timescale}. If {@code
* segmentTimeline} is non-null then this parameter is ignored. * segmentTimeline} is non-null then this parameter is ignored.
...@@ -302,14 +306,21 @@ public abstract class SegmentBase { ...@@ -302,14 +306,21 @@ public abstract class SegmentBase {
long timescale, long timescale,
long presentationTimeOffset, long presentationTimeOffset,
long startNumber, long startNumber,
long endNumber,
long duration, long duration,
List<SegmentTimelineElement> segmentTimeline, List<SegmentTimelineElement> segmentTimeline,
UrlTemplate initializationTemplate, UrlTemplate initializationTemplate,
UrlTemplate mediaTemplate) { UrlTemplate mediaTemplate) {
super(initialization, timescale, presentationTimeOffset, startNumber, super(
duration, segmentTimeline); initialization,
timescale,
presentationTimeOffset,
startNumber,
duration,
segmentTimeline);
this.initializationTemplate = initializationTemplate; this.initializationTemplate = initializationTemplate;
this.mediaTemplate = mediaTemplate; this.mediaTemplate = mediaTemplate;
this.endNumber = endNumber;
} }
@Override @Override
...@@ -340,6 +351,8 @@ public abstract class SegmentBase { ...@@ -340,6 +351,8 @@ public abstract class SegmentBase {
public int getSegmentCount(long periodDurationUs) { public int getSegmentCount(long periodDurationUs) {
if (segmentTimeline != null) { if (segmentTimeline != null) {
return segmentTimeline.size(); return segmentTimeline.size();
} else if (endNumber != C.INDEX_UNSET) {
return (int) (endNumber - startNumber + 1);
} else if (periodDurationUs != C.TIME_UNSET) { } else if (periodDurationUs != C.TIME_UNSET) {
long durationUs = (duration * C.MICROS_PER_SECOND) / timescale; long durationUs = (duration * C.MICROS_PER_SECOND) / timescale;
return (int) Util.ceilDivide(periodDurationUs, durationUs); return (int) Util.ceilDivide(periodDurationUs, durationUs);
...@@ -347,7 +360,6 @@ public abstract class SegmentBase { ...@@ -347,7 +360,6 @@ public abstract class SegmentBase {
return DashSegmentIndex.INDEX_UNBOUNDED; return DashSegmentIndex.INDEX_UNBOUNDED;
} }
} }
} }
/** /**
......
...@@ -586,6 +586,7 @@ public class SsManifestParser implements ParsingLoadable.Parser<SsManifest> { ...@@ -586,6 +586,7 @@ public class SsManifestParser implements ParsingLoadable.Parser<SsManifest> {
} else { } else {
subType = parser.getAttributeValue(null, KEY_SUB_TYPE); subType = parser.getAttributeValue(null, KEY_SUB_TYPE);
} }
putNormalizedAttribute(KEY_SUB_TYPE, subType);
name = parser.getAttributeValue(null, KEY_NAME); name = parser.getAttributeValue(null, KEY_NAME);
url = parseRequiredString(parser, KEY_URL); url = parseRequiredString(parser, KEY_URL);
maxWidth = parseInt(parser, KEY_MAX_WIDTH, Format.NO_VALUE); maxWidth = parseInt(parser, KEY_MAX_WIDTH, Format.NO_VALUE);
...@@ -645,6 +646,7 @@ public class SsManifestParser implements ParsingLoadable.Parser<SsManifest> { ...@@ -645,6 +646,7 @@ public class SsManifestParser implements ParsingLoadable.Parser<SsManifest> {
private static final String KEY_CHANNELS = "Channels"; private static final String KEY_CHANNELS = "Channels";
private static final String KEY_FOUR_CC = "FourCC"; private static final String KEY_FOUR_CC = "FourCC";
private static final String KEY_TYPE = "Type"; private static final String KEY_TYPE = "Type";
private static final String KEY_SUB_TYPE = "Subtype";
private static final String KEY_LANGUAGE = "Language"; private static final String KEY_LANGUAGE = "Language";
private static final String KEY_NAME = "Name"; private static final String KEY_NAME = "Name";
private static final String KEY_MAX_WIDTH = "MaxWidth"; private static final String KEY_MAX_WIDTH = "MaxWidth";
...@@ -709,6 +711,18 @@ public class SsManifestParser implements ParsingLoadable.Parser<SsManifest> { ...@@ -709,6 +711,18 @@ public class SsManifestParser implements ParsingLoadable.Parser<SsManifest> {
/* roleFlags= */ 0, /* roleFlags= */ 0,
language); language);
} else if (type == C.TRACK_TYPE_TEXT) { } else if (type == C.TRACK_TYPE_TEXT) {
String subType = (String) getNormalizedAttribute(KEY_SUB_TYPE);
@C.RoleFlags int roleFlags = 0;
switch (subType) {
case "CAPT":
roleFlags = C.ROLE_FLAG_CAPTION;
break;
case "DESC":
roleFlags = C.ROLE_FLAG_DESCRIBES_MUSIC_AND_SOUND;
break;
default:
break;
}
String language = (String) getNormalizedAttribute(KEY_LANGUAGE); String language = (String) getNormalizedAttribute(KEY_LANGUAGE);
format = format =
Format.createTextContainerFormat( Format.createTextContainerFormat(
...@@ -719,7 +733,7 @@ public class SsManifestParser implements ParsingLoadable.Parser<SsManifest> { ...@@ -719,7 +733,7 @@ public class SsManifestParser implements ParsingLoadable.Parser<SsManifest> {
/* codecs= */ null, /* codecs= */ null,
bitrate, bitrate,
/* selectionFlags= */ 0, /* selectionFlags= */ 0,
/* roleFlags= */ 0, roleFlags,
language); language);
} else { } else {
format = format =
......
...@@ -40,7 +40,7 @@ android { ...@@ -40,7 +40,7 @@ android {
dependencies { dependencies {
implementation project(modulePrefix + 'library-core') implementation project(modulePrefix + 'library-core')
implementation 'androidx.media:media:1.0.0' implementation 'androidx.media:media:1.0.1'
implementation 'androidx.annotation:annotation:1.0.2' implementation 'androidx.annotation:annotation:1.0.2'
compileOnly 'org.checkerframework:checker-qual:' + checkerframeworkVersion compileOnly 'org.checkerframework:checker-qual:' + checkerframeworkVersion
testImplementation project(modulePrefix + 'testutils-robolectric') testImplementation project(modulePrefix + 'testutils-robolectric')
......
...@@ -1050,6 +1050,9 @@ public class PlayerView extends FrameLayout implements AdsLoader.AdViewProvider ...@@ -1050,6 +1050,9 @@ public class PlayerView extends FrameLayout implements AdsLoader.AdViewProvider
@Override @Override
public boolean onTouchEvent(MotionEvent event) { public boolean onTouchEvent(MotionEvent event) {
if (!useController || player == null) {
return false;
}
switch (event.getAction()) { switch (event.getAction()) {
case MotionEvent.ACTION_DOWN: case MotionEvent.ACTION_DOWN:
isTouching = true; isTouching = true;
...@@ -1150,9 +1153,6 @@ public class PlayerView extends FrameLayout implements AdsLoader.AdViewProvider ...@@ -1150,9 +1153,6 @@ public class PlayerView extends FrameLayout implements AdsLoader.AdViewProvider
// Internal methods. // Internal methods.
private boolean toggleControllerVisibility() { private boolean toggleControllerVisibility() {
if (!useController || player == null) {
return false;
}
if (!controller.isVisible()) { if (!controller.isVisible()) {
maybeShowController(true); maybeShowController(true);
} else if (controllerHideOnTouch) { } else if (controllerHideOnTouch) {
...@@ -1472,6 +1472,9 @@ public class PlayerView extends FrameLayout implements AdsLoader.AdViewProvider ...@@ -1472,6 +1472,9 @@ public class PlayerView extends FrameLayout implements AdsLoader.AdViewProvider
@Override @Override
public boolean onSingleTapUp(MotionEvent e) { public boolean onSingleTapUp(MotionEvent e) {
if (!useController || player == null) {
return false;
}
return toggleControllerVisibility(); return toggleControllerVisibility();
} }
} }
......
...@@ -23,6 +23,21 @@ if (project.ext.has("exoplayerPublishEnabled") ...@@ -23,6 +23,21 @@ if (project.ext.has("exoplayerPublishEnabled")
groupId = 'com.google.android.exoplayer' groupId = 'com.google.android.exoplayer'
website = 'https://github.com/google/ExoPlayer' website = 'https://github.com/google/ExoPlayer'
} }
gradle.taskGraph.whenReady { taskGraph ->
project.tasks
.findAll { task -> task.name.contains("generatePomFileFor") }
.forEach { task ->
task.doLast {
task.outputs.files
.filter { File file ->
file.path.contains("publications") \
&& file.name.matches("^pom-.+\\.xml\$")
}
.forEach { File file -> addLicense(file) }
}
}
}
} }
def getBintrayRepo() { def getBintrayRepo() {
...@@ -30,3 +45,24 @@ def getBintrayRepo() { ...@@ -30,3 +45,24 @@ def getBintrayRepo() {
property('publicRepo').toBoolean() property('publicRepo').toBoolean()
return publicRepo ? 'exoplayer' : 'exoplayer-test' return publicRepo ? 'exoplayer' : 'exoplayer-test'
} }
static void addLicense(File pom) {
def licenseNode = new Node(null, "license")
licenseNode.append(
new Node(null, "name", "The Apache Software License, Version 2.0"))
licenseNode.append(
new Node(null, "url", "http://www.apache.org/licenses/LICENSE-2.0.txt"))
licenseNode.append(new Node(null, "distribution", "repo"))
def licensesNode = new Node(null, "licenses")
licensesNode.append(licenseNode)
def xml = new XmlParser().parse(pom)
xml.append(licensesNode)
def writer = new PrintWriter(new FileWriter(pom))
writer.write("<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n")
def printer = new XmlNodePrinter(writer)
printer.preserveWhitespace = true
printer.print(xml)
writer.close()
}
...@@ -163,14 +163,15 @@ public class DebugRenderersFactory extends DefaultRenderersFactory { ...@@ -163,14 +163,15 @@ public class DebugRenderersFactory extends DefaultRenderersFactory {
int bufferIndex, int bufferIndex,
int bufferFlags, int bufferFlags,
long bufferPresentationTimeUs, long bufferPresentationTimeUs,
boolean shouldSkip, boolean isDecodeOnlyBuffer,
boolean isLastBuffer,
Format format) Format format)
throws ExoPlaybackException { throws ExoPlaybackException {
if (skipToPositionBeforeRenderingFirstFrame && bufferPresentationTimeUs < positionUs) { if (skipToPositionBeforeRenderingFirstFrame && bufferPresentationTimeUs < positionUs) {
// After the codec has been initialized, don't render the first frame until we've caught up // After the codec has been initialized, don't render the first frame until we've caught up
// to the playback position. Else test runs on devices that do not support dummy surface // to the playback position. Else test runs on devices that do not support dummy surface
// will drop frames between rendering the first one and catching up [Internal: b/66494991]. // will drop frames between rendering the first one and catching up [Internal: b/66494991].
shouldSkip = true; isDecodeOnlyBuffer = true;
} }
return super.processOutputBuffer( return super.processOutputBuffer(
positionUs, positionUs,
...@@ -180,7 +181,8 @@ public class DebugRenderersFactory extends DefaultRenderersFactory { ...@@ -180,7 +181,8 @@ public class DebugRenderersFactory extends DefaultRenderersFactory {
bufferIndex, bufferIndex,
bufferFlags, bufferFlags,
bufferPresentationTimeUs, bufferPresentationTimeUs,
shouldSkip, isDecodeOnlyBuffer,
isLastBuffer,
format); format);
} }
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or sign in to comment