Skip to content
Toggle navigation
P
Projects
G
Groups
S
Snippets
Help
SDK
/
exoplayer
This project
Loading...
Sign in
Toggle navigation
Go to a project
Project
Repository
Issues
0
Merge Requests
0
Pipelines
Wiki
Snippets
Settings
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Commit
87d0be25
authored
Nov 19, 2014
by
ojw28
Browse files
Options
_('Browse Files')
Download
Plain Diff
Merge pull request #149 from google/dev
dev -> dev-hls
parents
3cfe894b
bc303b73
Show whitespace changes
Inline
Side-by-side
Showing
17 changed files
with
1393 additions
and
651 deletions
demo/src/main/java/com/google/android/exoplayer/demo/full/EventLogger.java
demo/src/main/java/com/google/android/exoplayer/demo/full/player/DemoPlayer.java
library/src/main/java/com/google/android/exoplayer/DecoderInfo.java
library/src/main/java/com/google/android/exoplayer/ExoPlayerImplInternal.java
library/src/main/java/com/google/android/exoplayer/MediaCodecAudioTrackRenderer.java
library/src/main/java/com/google/android/exoplayer/MediaCodecTrackRenderer.java
library/src/main/java/com/google/android/exoplayer/MediaCodecUtil.java
library/src/main/java/com/google/android/exoplayer/audio/AudioCapabilities.java
library/src/main/java/com/google/android/exoplayer/audio/AudioCapabilitiesReceiver.java
library/src/main/java/com/google/android/exoplayer/audio/AudioTrack.java
library/src/main/java/com/google/android/exoplayer/dash/DashChunkSource.java
library/src/main/java/com/google/android/exoplayer/parser/webm/DefaultEbmlReader.java
library/src/main/java/com/google/android/exoplayer/parser/webm/EbmlEventHandler.java
library/src/main/java/com/google/android/exoplayer/parser/webm/EbmlReader.java
library/src/main/java/com/google/android/exoplayer/parser/webm/WebmExtractor.java
library/src/main/java/com/google/android/exoplayer/util/MimeTypes.java
library/src/main/java/com/google/android/exoplayer/util/PriorityHandlerThread.java
demo/src/main/java/com/google/android/exoplayer/demo/full/EventLogger.java
View file @
87d0be25
...
...
@@ -16,8 +16,8 @@
package
com
.
google
.
android
.
exoplayer
.
demo
.
full
;
import
com.google.android.exoplayer.ExoPlayer
;
import
com.google.android.exoplayer.MediaCodecAudioTrackRenderer.AudioTrackInitializationException
;
import
com.google.android.exoplayer.MediaCodecTrackRenderer.DecoderInitializationException
;
import
com.google.android.exoplayer.audio.AudioTrack
;
import
com.google.android.exoplayer.demo.full.player.DemoPlayer
;
import
com.google.android.exoplayer.util.VerboseLogUtil
;
...
...
@@ -149,7 +149,7 @@ public class EventLogger implements DemoPlayer.Listener, DemoPlayer.InfoListener
}
@Override
public
void
onAudioTrackInitializationError
(
AudioTrackInitializationException
e
)
{
public
void
onAudioTrackInitializationError
(
AudioTrack
.
InitializationException
e
)
{
printInternalError
(
"audioTrackInitializationError"
,
e
);
}
...
...
demo/src/main/java/com/google/android/exoplayer/demo/full/player/DemoPlayer.java
View file @
87d0be25
...
...
@@ -19,10 +19,10 @@ import com.google.android.exoplayer.DummyTrackRenderer;
import
com.google.android.exoplayer.ExoPlaybackException
;
import
com.google.android.exoplayer.ExoPlayer
;
import
com.google.android.exoplayer.MediaCodecAudioTrackRenderer
;
import
com.google.android.exoplayer.MediaCodecAudioTrackRenderer.AudioTrackInitializationException
;
import
com.google.android.exoplayer.MediaCodecTrackRenderer.DecoderInitializationException
;
import
com.google.android.exoplayer.MediaCodecVideoTrackRenderer
;
import
com.google.android.exoplayer.TrackRenderer
;
import
com.google.android.exoplayer.audio.AudioTrack
;
import
com.google.android.exoplayer.chunk.ChunkSampleSource
;
import
com.google.android.exoplayer.chunk.MultiTrackChunkSource
;
import
com.google.android.exoplayer.drm.StreamingDrmSessionManager
;
...
...
@@ -110,7 +110,7 @@ public class DemoPlayer implements ExoPlayer.Listener, ChunkSampleSource.EventLi
*/
public
interface
InternalErrorListener
{
void
onRendererInitializationError
(
Exception
e
);
void
onAudioTrackInitializationError
(
AudioTrackInitializationException
e
);
void
onAudioTrackInitializationError
(
AudioTrack
.
InitializationException
e
);
void
onDecoderInitializationError
(
DecoderInitializationException
e
);
void
onCryptoError
(
CryptoException
e
);
void
onUpstreamError
(
int
sourceId
,
IOException
e
);
...
...
@@ -454,7 +454,7 @@ public class DemoPlayer implements ExoPlayer.Listener, ChunkSampleSource.EventLi
}
@Override
public
void
onAudioTrackInitializationError
(
AudioTrackInitializationException
e
)
{
public
void
onAudioTrackInitializationError
(
AudioTrack
.
InitializationException
e
)
{
if
(
internalErrorListener
!=
null
)
{
internalErrorListener
.
onAudioTrackInitializationError
(
e
);
}
...
...
library/src/main/java/com/google/android/exoplayer/DecoderInfo.java
View file @
87d0be25
...
...
@@ -29,7 +29,7 @@ public final class DecoderInfo {
public
final
String
name
;
/**
* Whether the decoder
is adaptive
.
* Whether the decoder
supports seamless resolution switches
.
*
* @see android.media.MediaCodecInfo.CodecCapabilities#isFeatureSupported(String)
* @see android.media.MediaCodecInfo.CodecCapabilities#FEATURE_AdaptivePlayback
...
...
library/src/main/java/com/google/android/exoplayer/ExoPlayerImplInternal.java
View file @
87d0be25
...
...
@@ -17,9 +17,9 @@ package com.google.android.exoplayer;
import
com.google.android.exoplayer.ExoPlayer.ExoPlayerComponent
;
import
com.google.android.exoplayer.util.Assertions
;
import
com.google.android.exoplayer.util.PriorityHandlerThread
;
import
com.google.android.exoplayer.util.TraceUtil
;
import
android.annotation.SuppressLint
;
import
android.os.Handler
;
import
android.os.HandlerThread
;
import
android.os.Looper
;
...
...
@@ -83,7 +83,6 @@ import java.util.List;
private
volatile
long
positionUs
;
private
volatile
long
bufferedPositionUs
;
@SuppressLint
(
"HandlerLeak"
)
public
ExoPlayerImplInternal
(
Handler
eventHandler
,
boolean
playWhenReady
,
boolean
[]
rendererEnabledFlags
,
int
minBufferMs
,
int
minRebufferMs
)
{
this
.
eventHandler
=
eventHandler
;
...
...
@@ -101,15 +100,10 @@ import java.util.List;
mediaClock
=
new
MediaClock
();
enabledRenderers
=
new
ArrayList
<
TrackRenderer
>(
rendererEnabledFlags
.
length
);
internalPlaybackThread
=
new
HandlerThread
(
getClass
().
getSimpleName
()
+
":Handler"
)
{
@Override
public
void
run
()
{
// Note: The documentation for Process.THREAD_PRIORITY_AUDIO that states "Applications can
// not normally change to this priority" is incorrect.
Process
.
setThreadPriority
(
Process
.
THREAD_PRIORITY_AUDIO
);
super
.
run
();
}
};
internalPlaybackThread
=
new
PriorityHandlerThread
(
getClass
().
getSimpleName
()
+
":Handler"
,
Process
.
THREAD_PRIORITY_AUDIO
);
internalPlaybackThread
.
start
();
handler
=
new
Handler
(
internalPlaybackThread
.
getLooper
(),
this
);
}
...
...
library/src/main/java/com/google/android/exoplayer/MediaCodecAudioTrackRenderer.java
View file @
87d0be25
...
...
@@ -15,28 +15,21 @@
*/
package
com
.
google
.
android
.
exoplayer
;
import
com.google.android.exoplayer.audio.AudioTrack
;
import
com.google.android.exoplayer.drm.DrmSessionManager
;
import
com.google.android.exoplayer.util.Assertions
;
import
com.google.android.exoplayer.util.MimeTypes
;
import
com.google.android.exoplayer.util.Util
;
import
android.annotation.TargetApi
;
import
android.media.AudioFormat
;
import
android.media.AudioManager
;
import
android.media.AudioTimestamp
;
import
android.media.AudioTrack
;
import
android.media.MediaCodec
;
import
android.media.MediaFormat
;
import
android.media.audiofx.Virtualizer
;
import
android.os.ConditionVariable
;
import
android.os.Handler
;
import
android.util.Log
;
import
java.lang.reflect.Method
;
import
java.nio.ByteBuffer
;
/**
* Decodes and renders audio using {@link MediaCodec} and {@link AudioTrack}.
* Decodes and renders audio using {@link MediaCodec} and {@link
android.media.
AudioTrack}.
*/
@TargetApi
(
16
)
public
class
MediaCodecAudioTrackRenderer
extends
MediaCodecTrackRenderer
{
...
...
@@ -52,26 +45,7 @@ public class MediaCodecAudioTrackRenderer extends MediaCodecTrackRenderer {
*
* @param e The corresponding exception.
*/
void
onAudioTrackInitializationError
(
AudioTrackInitializationException
e
);
}
/**
* Thrown when a failure occurs instantiating an audio track.
*/
public
static
class
AudioTrackInitializationException
extends
Exception
{
/**
* The state as reported by {@link AudioTrack#getState()}
*/
public
final
int
audioTrackState
;
public
AudioTrackInitializationException
(
int
audioTrackState
,
int
sampleRate
,
int
channelConfig
,
int
bufferSize
)
{
super
(
"AudioTrack init failed: "
+
audioTrackState
+
", Config("
+
sampleRate
+
", "
+
channelConfig
+
", "
+
bufferSize
+
")"
);
this
.
audioTrackState
=
audioTrackState
;
}
void
onAudioTrackInitializationError
(
AudioTrack
.
InitializationException
e
);
}
...
...
@@ -82,73 +56,12 @@ public class MediaCodecAudioTrackRenderer extends MediaCodecTrackRenderer {
*/
public
static
final
int
MSG_SET_VOLUME
=
1
;
/**
* The default multiplication factor used when determining the size of the underlying
* {@link AudioTrack}'s buffer.
*/
public
static
final
float
DEFAULT_MIN_BUFFER_MULTIPLICATION_FACTOR
=
4
;
private
static
final
String
TAG
=
"MediaCodecAudioTrackRenderer"
;
private
static
final
long
MICROS_PER_SECOND
=
1000000L
;
/**
* AudioTrack timestamps are deemed spurious if they are offset from the system clock by more
* than this amount.
* <p>
* This is a fail safe that should not be required on correctly functioning devices.
*/
private
static
final
long
MAX_AUDIO_TIMESTAMP_OFFSET_US
=
10
*
MICROS_PER_SECOND
;
/**
* AudioTrack latencies are deemed impossibly large if they are greater than this amount.
* <p>
* This is a fail safe that should not be required on correctly functioning devices.
*/
private
static
final
long
MAX_AUDIO_TRACK_LATENCY_US
=
10
*
MICROS_PER_SECOND
;
private
static
final
int
MAX_PLAYHEAD_OFFSET_COUNT
=
10
;
private
static
final
int
MIN_PLAYHEAD_OFFSET_SAMPLE_INTERVAL_US
=
30000
;
private
static
final
int
MIN_TIMESTAMP_SAMPLE_INTERVAL_US
=
500000
;
private
static
final
int
START_NOT_SET
=
0
;
private
static
final
int
START_IN_SYNC
=
1
;
private
static
final
int
START_NEED_SYNC
=
2
;
private
final
EventListener
eventListener
;
private
final
ConditionVariable
audioTrackReleasingConditionVariable
;
private
final
AudioTimestampCompat
audioTimestampCompat
;
private
final
long
[]
playheadOffsets
;
private
final
float
minBufferMultiplicationFactor
;
private
int
nextPlayheadOffsetIndex
;
private
int
playheadOffsetCount
;
private
long
smoothedPlayheadOffsetUs
;
private
long
lastPlayheadSampleTimeUs
;
private
boolean
audioTimestampSet
;
private
long
lastTimestampSampleTimeUs
;
private
long
lastRawPlaybackHeadPosition
;
private
long
rawPlaybackHeadWrapCount
;
private
int
sampleRate
;
private
int
frameSize
;
private
int
channelConfig
;
private
int
minBufferSize
;
private
int
bufferSize
;
private
AudioTrack
audioTrack
;
private
Method
audioTrackGetLatencyMethod
;
private
final
AudioTrack
audioTrack
;
private
int
audioSessionId
;
private
long
submittedBytes
;
private
int
audioTrackStartMediaTimeState
;
private
long
audioTrackStartMediaTimeUs
;
private
long
audioTrackResumeSystemTimeUs
;
private
long
lastReportedCurrentPositionUs
;
private
long
audioTrackLatencyUs
;
private
float
volume
;
private
byte
[]
temporaryBuffer
;
private
int
temporaryBufferOffset
;
private
int
temporaryBufferSize
;
private
long
currentPositionUs
;
/**
* @param source The upstream source from which the renderer obtains samples.
...
...
@@ -198,15 +111,16 @@ public class MediaCodecAudioTrackRenderer extends MediaCodecTrackRenderer {
*/
public
MediaCodecAudioTrackRenderer
(
SampleSource
source
,
DrmSessionManager
drmSessionManager
,
boolean
playClearSamplesWithoutKeys
,
Handler
eventHandler
,
EventListener
eventListener
)
{
this
(
source
,
drmSessionManager
,
playClearSamplesWithoutKeys
,
DEFAULT_MIN_BUFFER_MULTIPLICATION_FACTOR
,
eventHandler
,
eventListener
);
this
(
source
,
drmSessionManager
,
playClearSamplesWithoutKeys
,
eventHandler
,
eventListener
,
new
AudioTrack
()
);
}
/**
* @param source The upstream source from which the renderer obtains samples.
* @param minBufferMultiplicationFactor When instantiating an underlying {@link AudioTrack},
* the size of the track's is calculated as this value multiplied by the minimum buffer size
* obtained from {@link AudioTrack#getMinBufferSize(int, int, int)}. The multiplication
* @param minBufferMultiplicationFactor When instantiating an underlying
* {@link android.media.AudioTrack}, the size of the track is calculated as this value
* multiplied by the minimum buffer size obtained from
* {@link android.media.AudioTrack#getMinBufferSize(int, int, int)}. The multiplication
* factor must be greater than or equal to 1.
* @param eventHandler A handler to use when delivering events to {@code eventListener}. May be
* null if delivery of events is not required.
...
...
@@ -226,9 +140,10 @@ public class MediaCodecAudioTrackRenderer extends MediaCodecTrackRenderer {
* begin in parallel with key acquisision. This parameter specifies whether the renderer is
* permitted to play clear regions of encrypted media files before {@code drmSessionManager}
* has obtained the keys necessary to decrypt encrypted regions of the media.
* @param minBufferMultiplicationFactor When instantiating an underlying {@link AudioTrack},
* the size of the track's is calculated as this value multiplied by the minimum buffer size
* obtained from {@link AudioTrack#getMinBufferSize(int, int, int)}. The multiplication
* @param minBufferMultiplicationFactor When instantiating an underlying
* {@link android.media.AudioTrack}, the size of the track is calculated as this value
* multiplied by the minimum buffer size obtained from
* {@link android.media.AudioTrack#getMinBufferSize(int, int, int)}. The multiplication
* factor must be greater than or equal to 1.
* @param eventHandler A handler to use when delivering events to {@code eventListener}. May be
* null if delivery of events is not required.
...
...
@@ -237,25 +152,31 @@ public class MediaCodecAudioTrackRenderer extends MediaCodecTrackRenderer {
public
MediaCodecAudioTrackRenderer
(
SampleSource
source
,
DrmSessionManager
drmSessionManager
,
boolean
playClearSamplesWithoutKeys
,
float
minBufferMultiplicationFactor
,
Handler
eventHandler
,
EventListener
eventListener
)
{
this
(
source
,
drmSessionManager
,
playClearSamplesWithoutKeys
,
eventHandler
,
eventListener
,
new
AudioTrack
(
minBufferMultiplicationFactor
));
}
/**
* @param source The upstream source from which the renderer obtains samples.
* @param drmSessionManager For use with encrypted content. May be null if support for encrypted
* content is not required.
* @param playClearSamplesWithoutKeys Encrypted media may contain clear (un-encrypted) regions.
* For example a media file may start with a short clear region so as to allow playback to
* begin in parallel with key acquisision. This parameter specifies whether the renderer is
* permitted to play clear regions of encrypted media files before {@code drmSessionManager}
* has obtained the keys necessary to decrypt encrypted regions of the media.
* @param eventHandler A handler to use when delivering events to {@code eventListener}. May be
* null if delivery of events is not required.
* @param eventListener A listener of events. May be null if delivery of events is not required.
* @param audioTrack Used for playing back decoded audio samples.
*/
public
MediaCodecAudioTrackRenderer
(
SampleSource
source
,
DrmSessionManager
drmSessionManager
,
boolean
playClearSamplesWithoutKeys
,
Handler
eventHandler
,
EventListener
eventListener
,
AudioTrack
audioTrack
)
{
super
(
source
,
drmSessionManager
,
playClearSamplesWithoutKeys
,
eventHandler
,
eventListener
);
Assertions
.
checkState
(
minBufferMultiplicationFactor
>=
1
);
this
.
minBufferMultiplicationFactor
=
minBufferMultiplicationFactor
;
this
.
eventListener
=
eventListener
;
audioTrackReleasingConditionVariable
=
new
ConditionVariable
(
true
);
if
(
Util
.
SDK_INT
>=
19
)
{
audioTimestampCompat
=
new
AudioTimestampCompatV19
();
}
else
{
audioTimestampCompat
=
new
NoopAudioTimestampCompat
();
}
if
(
Util
.
SDK_INT
>=
18
)
{
try
{
audioTrackGetLatencyMethod
=
AudioTrack
.
class
.
getMethod
(
"getLatency"
,
(
Class
<?>[])
null
);
}
catch
(
NoSuchMethodException
e
)
{
// There's no guarantee this method exists. Do nothing.
}
}
playheadOffsets
=
new
long
[
MAX_PLAYHEAD_OFFSET_COUNT
];
volume
=
1.0f
;
this
.
audioTrack
=
Assertions
.
checkNotNull
(
audioTrack
);
this
.
audioSessionId
=
AudioTrack
.
SESSION_ID_NOT_SET
;
}
@Override
...
...
@@ -271,104 +192,12 @@ public class MediaCodecAudioTrackRenderer extends MediaCodecTrackRenderer {
@Override
protected
void
onEnabled
(
long
positionUs
,
boolean
joining
)
{
super
.
onEnabled
(
positionUs
,
joining
);
lastReportedCurrentPositionUs
=
Long
.
MIN_VALUE
;
}
@Override
protected
void
doSomeWork
(
long
positionUs
,
long
elapsedRealtimeUs
)
throws
ExoPlaybackException
{
super
.
doSomeWork
(
positionUs
,
elapsedRealtimeUs
);
maybeSampleSyncParams
();
currentPositionUs
=
Long
.
MIN_VALUE
;
}
@Override
protected
void
onOutputFormatChanged
(
MediaFormat
format
)
{
int
channelCount
=
format
.
getInteger
(
MediaFormat
.
KEY_CHANNEL_COUNT
);
int
channelConfig
;
switch
(
channelCount
)
{
case
1
:
channelConfig
=
AudioFormat
.
CHANNEL_OUT_MONO
;
break
;
case
2
:
channelConfig
=
AudioFormat
.
CHANNEL_OUT_STEREO
;
break
;
case
6
:
channelConfig
=
AudioFormat
.
CHANNEL_OUT_5POINT1
;
break
;
case
8
:
channelConfig
=
AudioFormat
.
CHANNEL_OUT_7POINT1
;
break
;
default
:
throw
new
IllegalArgumentException
(
"Unsupported channel count: "
+
channelCount
);
}
int
sampleRate
=
format
.
getInteger
(
MediaFormat
.
KEY_SAMPLE_RATE
);
if
(
audioTrack
!=
null
&&
this
.
sampleRate
==
sampleRate
&&
this
.
channelConfig
==
channelConfig
)
{
// We already have an existing audio track with the correct sample rate and channel config.
return
;
}
releaseAudioTrack
();
this
.
sampleRate
=
sampleRate
;
this
.
channelConfig
=
channelConfig
;
this
.
minBufferSize
=
AudioTrack
.
getMinBufferSize
(
sampleRate
,
channelConfig
,
AudioFormat
.
ENCODING_PCM_16BIT
);
this
.
bufferSize
=
(
int
)
(
minBufferMultiplicationFactor
*
minBufferSize
);
this
.
frameSize
=
2
*
channelCount
;
// 2 bytes per 16 bit sample * number of channels.
}
private
void
initAudioTrack
()
throws
ExoPlaybackException
{
// If we're asynchronously releasing a previous audio track then we block until it has been
// released. This guarantees that we cannot end up in a state where we have multiple audio
// track instances. Without this guarantee it would be possible, in extreme cases, to exhaust
// the shared memory that's available for audio track buffers. This would in turn cause the
// initialization of the audio track to fail.
audioTrackReleasingConditionVariable
.
block
();
if
(
audioSessionId
==
0
)
{
audioTrack
=
new
AudioTrack
(
AudioManager
.
STREAM_MUSIC
,
sampleRate
,
channelConfig
,
AudioFormat
.
ENCODING_PCM_16BIT
,
bufferSize
,
AudioTrack
.
MODE_STREAM
);
checkAudioTrackInitialized
();
audioSessionId
=
audioTrack
.
getAudioSessionId
();
onAudioSessionId
(
audioSessionId
);
}
else
{
// Re-attach to the same audio session.
audioTrack
=
new
AudioTrack
(
AudioManager
.
STREAM_MUSIC
,
sampleRate
,
channelConfig
,
AudioFormat
.
ENCODING_PCM_16BIT
,
bufferSize
,
AudioTrack
.
MODE_STREAM
,
audioSessionId
);
checkAudioTrackInitialized
();
}
setVolume
(
volume
);
if
(
getState
()
==
TrackRenderer
.
STATE_STARTED
)
{
audioTrackResumeSystemTimeUs
=
System
.
nanoTime
()
/
1000
;
audioTrack
.
play
();
}
}
/**
* Checks that {@link #audioTrack} has been successfully initialized. If it has then calling this
* method is a no-op. If it hasn't then {@link #audioTrack} is released and set to null, and an
* exception is thrown.
*
* @throws ExoPlaybackException If {@link #audioTrack} has not been successfully initialized.
*/
private
void
checkAudioTrackInitialized
()
throws
ExoPlaybackException
{
int
audioTrackState
=
audioTrack
.
getState
();
if
(
audioTrackState
==
AudioTrack
.
STATE_INITIALIZED
)
{
return
;
}
// The track is not successfully initialized. Release and null the track.
try
{
audioTrack
.
release
();
}
catch
(
Exception
e
)
{
// The track has already failed to initialize, so it wouldn't be that surprising if release
// were to fail too. Swallow the exception.
}
finally
{
audioTrack
=
null
;
}
// Propagate the relevant exceptions.
AudioTrackInitializationException
exception
=
new
AudioTrackInitializationException
(
audioTrackState
,
sampleRate
,
channelConfig
,
bufferSize
);
notifyAudioTrackInitializationError
(
exception
);
throw
new
ExoPlaybackException
(
exception
);
audioTrack
.
reconfigure
(
format
);
}
/**
...
...
@@ -387,51 +216,15 @@ public class MediaCodecAudioTrackRenderer extends MediaCodecTrackRenderer {
// Do nothing.
}
private
void
releaseAudioTrack
()
{
if
(
audioTrack
!=
null
)
{
submittedBytes
=
0
;
temporaryBufferSize
=
0
;
lastRawPlaybackHeadPosition
=
0
;
rawPlaybackHeadWrapCount
=
0
;
audioTrackStartMediaTimeUs
=
0
;
audioTrackStartMediaTimeState
=
START_NOT_SET
;
resetSyncParams
();
int
playState
=
audioTrack
.
getPlayState
();
if
(
playState
==
AudioTrack
.
PLAYSTATE_PLAYING
)
{
audioTrack
.
pause
();
}
// AudioTrack.release can take some time, so we call it on a background thread.
final
AudioTrack
toRelease
=
audioTrack
;
audioTrack
=
null
;
audioTrackReleasingConditionVariable
.
close
();
new
Thread
()
{
@Override
public
void
run
()
{
try
{
toRelease
.
release
();
}
finally
{
audioTrackReleasingConditionVariable
.
open
();
}
}
}.
start
();
}
}
@Override
protected
void
onStarted
()
{
super
.
onStarted
();
if
(
audioTrack
!=
null
)
{
audioTrackResumeSystemTimeUs
=
System
.
nanoTime
()
/
1000
;
audioTrack
.
play
();
}
}
@Override
protected
void
onStopped
()
{
if
(
audioTrack
!=
null
)
{
resetSyncParams
();
audioTrack
.
pause
();
}
super
.
onStopped
();
}
...
...
@@ -439,149 +232,34 @@ public class MediaCodecAudioTrackRenderer extends MediaCodecTrackRenderer {
protected
boolean
isEnded
()
{
// We've exhausted the output stream, and the AudioTrack has either played all of the data
// submitted, or has been fed insufficient data to begin playback.
return
super
.
isEnded
()
&&
(
getPendingFrameCount
()
==
0
||
submittedBytes
<
minBufferSize
);
return
super
.
isEnded
()
&&
(!
audioTrack
.
hasPendingData
()
||
!
audioTrack
.
hasEnoughDataToBeginPlayback
());
}
@Override
protected
boolean
isReady
()
{
return
getPendingFrameCount
()
>
0
return
audioTrack
.
hasPendingData
()
||
(
super
.
isReady
()
&&
getSourceState
()
==
SOURCE_STATE_READY_READ_MAY_FAIL
);
}
/**
* This method uses a variety of techniques to compute the current position:
*
* 1. Prior to playback having started, calls up to the super class to obtain the pending seek
* position.
* 2. During playback, uses AudioTimestamps obtained from AudioTrack.getTimestamp on supported
* devices.
* 3. Else, derives a smoothed position by sampling the AudioTrack's frame position.
*/
@Override
protected
long
getCurrentPositionUs
()
{
long
systemClockUs
=
System
.
nanoTime
()
/
1000
;
long
currentPositionUs
;
if
(
audioTrack
==
null
||
audioTrackStartMediaTimeState
==
START_NOT_SET
)
{
// The AudioTrack hasn't started.
currentPositionUs
=
super
.
getCurrentPositionUs
();
}
else
if
(
audioTimestampSet
)
{
// How long ago in the past the audio timestamp is (negative if it's in the future)
long
presentationDiff
=
systemClockUs
-
(
audioTimestampCompat
.
getNanoTime
()
/
1000
);
long
framesDiff
=
durationUsToFrames
(
presentationDiff
);
// The position of the frame that's currently being presented.
long
currentFramePosition
=
audioTimestampCompat
.
getFramePosition
()
+
framesDiff
;
currentPositionUs
=
framesToDurationUs
(
currentFramePosition
)
+
audioTrackStartMediaTimeUs
;
}
else
{
if
(
playheadOffsetCount
==
0
)
{
// The AudioTrack has started, but we don't have any samples to compute a smoothed position.
currentPositionUs
=
getPlayheadPositionUs
()
+
audioTrackStartMediaTimeUs
;
long
audioTrackCurrentPositionUs
=
audioTrack
.
getCurrentPositionUs
(
isEnded
());
if
(
audioTrackCurrentPositionUs
==
AudioTrack
.
CURRENT_POSITION_NOT_SET
)
{
// Use the super class position before audio playback starts.
currentPositionUs
=
Math
.
max
(
currentPositionUs
,
super
.
getCurrentPositionUs
());
}
else
{
// getPlayheadPositionUs() only has a granularity of ~20ms, so we base the position off the
// system clock (and a smoothed offset between it and the playhead position) so as to
// prevent jitter in the reported positions.
currentPositionUs
=
systemClockUs
+
smoothedPlayheadOffsetUs
+
audioTrackStartMediaTimeUs
;
}
if
(!
isEnded
())
{
currentPositionUs
-=
audioTrackLatencyUs
;
// Make sure we don't ever report time moving backwards.
currentPositionUs
=
Math
.
max
(
currentPositionUs
,
audioTrackCurrentPositionUs
);
}
}
// Make sure we don't ever report time moving backwards as a result of smoothing or switching
// between the various code paths above.
currentPositionUs
=
Math
.
max
(
lastReportedCurrentPositionUs
,
currentPositionUs
);
lastReportedCurrentPositionUs
=
currentPositionUs
;
return
currentPositionUs
;
}
private
void
maybeSampleSyncParams
()
{
if
(
audioTrack
==
null
||
audioTrackStartMediaTimeState
==
START_NOT_SET
||
getState
()
!=
STATE_STARTED
)
{
// The AudioTrack isn't playing.
return
;
}
long
playheadPositionUs
=
getPlayheadPositionUs
();
if
(
playheadPositionUs
==
0
)
{
// The AudioTrack hasn't output anything yet.
return
;
}
long
systemClockUs
=
System
.
nanoTime
()
/
1000
;
if
(
systemClockUs
-
lastPlayheadSampleTimeUs
>=
MIN_PLAYHEAD_OFFSET_SAMPLE_INTERVAL_US
)
{
// Take a new sample and update the smoothed offset between the system clock and the playhead.
playheadOffsets
[
nextPlayheadOffsetIndex
]
=
playheadPositionUs
-
systemClockUs
;
nextPlayheadOffsetIndex
=
(
nextPlayheadOffsetIndex
+
1
)
%
MAX_PLAYHEAD_OFFSET_COUNT
;
if
(
playheadOffsetCount
<
MAX_PLAYHEAD_OFFSET_COUNT
)
{
playheadOffsetCount
++;
}
lastPlayheadSampleTimeUs
=
systemClockUs
;
smoothedPlayheadOffsetUs
=
0
;
for
(
int
i
=
0
;
i
<
playheadOffsetCount
;
i
++)
{
smoothedPlayheadOffsetUs
+=
playheadOffsets
[
i
]
/
playheadOffsetCount
;
}
}
if
(
systemClockUs
-
lastTimestampSampleTimeUs
>=
MIN_TIMESTAMP_SAMPLE_INTERVAL_US
)
{
audioTimestampSet
=
audioTimestampCompat
.
update
(
audioTrack
);
if
(
audioTimestampSet
)
{
// Perform sanity checks on the timestamp.
long
audioTimestampUs
=
audioTimestampCompat
.
getNanoTime
()
/
1000
;
if
(
audioTimestampUs
<
audioTrackResumeSystemTimeUs
)
{
// The timestamp corresponds to a time before the track was most recently resumed.
audioTimestampSet
=
false
;
}
else
if
(
Math
.
abs
(
audioTimestampUs
-
systemClockUs
)
>
MAX_AUDIO_TIMESTAMP_OFFSET_US
)
{
// The timestamp time base is probably wrong.
audioTimestampSet
=
false
;
Log
.
w
(
TAG
,
"Spurious audio timestamp: "
+
audioTimestampCompat
.
getFramePosition
()
+
", "
+
audioTimestampUs
+
", "
+
systemClockUs
);
}
}
if
(
audioTrackGetLatencyMethod
!=
null
)
{
try
{
// Compute the audio track latency, excluding the latency due to the buffer (leaving
// latency due to the mixer and audio hardware driver).
audioTrackLatencyUs
=
(
Integer
)
audioTrackGetLatencyMethod
.
invoke
(
audioTrack
,
(
Object
[])
null
)
*
1000L
-
framesToDurationUs
(
bufferSize
/
frameSize
);
// Sanity check that the latency is non-negative.
audioTrackLatencyUs
=
Math
.
max
(
audioTrackLatencyUs
,
0
);
// Sanity check that the latency isn't too large.
if
(
audioTrackLatencyUs
>
MAX_AUDIO_TRACK_LATENCY_US
)
{
Log
.
w
(
TAG
,
"Ignoring impossibly large audio latency: "
+
audioTrackLatencyUs
);
audioTrackLatencyUs
=
0
;
}
}
catch
(
Exception
e
)
{
// The method existed, but doesn't work. Don't try again.
audioTrackGetLatencyMethod
=
null
;
}
}
lastTimestampSampleTimeUs
=
systemClockUs
;
}
}
private
void
resetSyncParams
()
{
smoothedPlayheadOffsetUs
=
0
;
playheadOffsetCount
=
0
;
nextPlayheadOffsetIndex
=
0
;
lastPlayheadSampleTimeUs
=
0
;
audioTimestampSet
=
false
;
lastTimestampSampleTimeUs
=
0
;
}
private
long
getPlayheadPositionUs
()
{
return
framesToDurationUs
(
getPlaybackHeadPosition
());
}
private
long
framesToDurationUs
(
long
frameCount
)
{
return
(
frameCount
*
MICROS_PER_SECOND
)
/
sampleRate
;
}
private
long
durationUsToFrames
(
long
durationUs
)
{
return
(
durationUs
*
sampleRate
)
/
MICROS_PER_SECOND
;
}
@Override
protected
void
onDisabled
()
{
audioSessionId
=
0
;
audioSessionId
=
AudioTrack
.
SESSION_ID_NOT_SET
;
try
{
releaseAudioTrack
();
audioTrack
.
reset
();
}
finally
{
super
.
onDisabled
();
}
...
...
@@ -591,8 +269,8 @@ public class MediaCodecAudioTrackRenderer extends MediaCodecTrackRenderer {
protected
void
seekTo
(
long
positionUs
)
throws
ExoPlaybackException
{
super
.
seekTo
(
positionUs
);
// TODO: Try and re-use the same AudioTrack instance once [redacted] is fixed.
releaseAudioTrack
();
lastReportedC
urrentPositionUs
=
Long
.
MIN_VALUE
;
audioTrack
.
reset
();
c
urrentPositionUs
=
Long
.
MIN_VALUE
;
}
@Override
...
...
@@ -602,74 +280,39 @@ public class MediaCodecAudioTrackRenderer extends MediaCodecTrackRenderer {
if
(
shouldSkip
)
{
codec
.
releaseOutputBuffer
(
bufferIndex
,
false
);
codecCounters
.
skippedOutputBufferCount
++;
if
(
audioTrackStartMediaTimeState
==
START_IN_SYNC
)
{
// Skipping the sample will push track time out of sync. We'll need to sync again.
audioTrackStartMediaTimeState
=
START_NEED_SYNC
;
}
audioTrack
.
handleDiscontinuity
();
return
true
;
}
if
(
temporaryBufferSize
==
0
)
{
// This is the first time we've seen this {@code buffer}.
// Note: presentationTimeUs corresponds to the end of the sample, not the start.
long
bufferStartTime
=
bufferInfo
.
presentationTimeUs
-
framesToDurationUs
(
bufferInfo
.
size
/
frameSize
);
if
(
audioTrackStartMediaTimeState
==
START_NOT_SET
)
{
audioTrackStartMediaTimeUs
=
Math
.
max
(
0
,
bufferStartTime
);
audioTrackStartMediaTimeState
=
START_IN_SYNC
;
// Initialize and start the audio track now.
if
(!
audioTrack
.
isInitialized
())
{
try
{
if
(
audioSessionId
!=
AudioTrack
.
SESSION_ID_NOT_SET
)
{
audioTrack
.
initialize
(
audioSessionId
);
}
else
{
// Sanity check that bufferStartTime is consistent with the expected value.
long
expectedBufferStartTime
=
audioTrackStartMediaTimeUs
+
framesToDurationUs
(
submittedBytes
/
frameSize
);
if
(
audioTrackStartMediaTimeState
==
START_IN_SYNC
&&
Math
.
abs
(
expectedBufferStartTime
-
bufferStartTime
)
>
200000
)
{
Log
.
e
(
TAG
,
"Discontinuity detected [expected "
+
expectedBufferStartTime
+
", got "
+
bufferStartTime
+
"]"
);
audioTrackStartMediaTimeState
=
START_NEED_SYNC
;
}
if
(
audioTrackStartMediaTimeState
==
START_NEED_SYNC
)
{
// Adjust audioTrackStartMediaTimeUs to be consistent with the current buffer's start
// time and the number of bytes submitted. Also reset lastReportedCurrentPositionUs to
// allow time to jump backwards if it really wants to.
audioTrackStartMediaTimeUs
+=
(
bufferStartTime
-
expectedBufferStartTime
);
audioTrackStartMediaTimeState
=
START_IN_SYNC
;
lastReportedCurrentPositionUs
=
Long
.
MIN_VALUE
;
audioSessionId
=
audioTrack
.
initialize
();
onAudioSessionId
(
audioSessionId
);
}
}
catch
(
AudioTrack
.
InitializationException
e
)
{
notifyAudioTrackInitializationError
(
e
);
throw
new
ExoPlaybackException
(
e
);
}
temporaryBufferSize
=
bufferInfo
.
size
;
buffer
.
position
(
bufferInfo
.
offset
);
if
(
Util
.
SDK_INT
<
21
)
{
// Copy {@code buffer} into {@code temporaryBuffer}.
if
(
temporaryBuffer
==
null
||
temporaryBuffer
.
length
<
bufferInfo
.
size
)
{
temporaryBuffer
=
new
byte
[
bufferInfo
.
size
];
}
buffer
.
get
(
temporaryBuffer
,
0
,
bufferInfo
.
size
);
temporaryBufferOffset
=
0
;
if
(
getState
()
==
TrackRenderer
.
STATE_STARTED
)
{
audioTrack
.
play
();
}
}
if
(
audioTrack
==
null
)
{
initAudioTrack
();
}
int
handleBufferResult
=
audioTrack
.
handleBuffer
(
buffer
,
bufferInfo
.
offset
,
bufferInfo
.
size
,
bufferInfo
.
presentationTimeUs
);
int
bytesWritten
=
0
;
if
(
Util
.
SDK_INT
<
21
)
{
// Work out how many bytes we can write without the risk of blocking.
int
bytesPending
=
(
int
)
(
submittedBytes
-
getPlaybackHeadPosition
()
*
frameSize
);
int
bytesToWrite
=
bufferSize
-
bytesPending
;
if
(
bytesToWrite
>
0
)
{
bytesToWrite
=
Math
.
min
(
temporaryBufferSize
,
bytesToWrite
);
bytesWritten
=
audioTrack
.
write
(
temporaryBuffer
,
temporaryBufferOffset
,
bytesToWrite
);
temporaryBufferOffset
+=
bytesWritten
;
}
}
else
{
bytesWritten
=
writeNonBlockingV21
(
audioTrack
,
buffer
,
temporaryBufferSize
);
// If we are out of sync, allow currentPositionUs to jump backwards.
if
((
handleBufferResult
&
AudioTrack
.
RESULT_POSITION_DISCONTINUITY
)
!=
0
)
{
currentPositionUs
=
Long
.
MIN_VALUE
;
}
temporaryBufferSize
-=
bytesWritten
;
submittedBytes
+=
bytesWritten
;
if
(
temporaryBufferSize
==
0
)
{
// Release the buffer if it was consumed.
if
((
handleBufferResult
&
AudioTrack
.
RESULT_BUFFER_CONSUMED
)
!=
0
)
{
codec
.
releaseOutputBuffer
(
bufferIndex
,
false
);
codecCounters
.
renderedOutputBufferCount
++;
return
true
;
...
...
@@ -678,66 +321,16 @@ public class MediaCodecAudioTrackRenderer extends MediaCodecTrackRenderer {
return
false
;
}
@TargetApi
(
21
)
private
static
int
writeNonBlockingV21
(
AudioTrack
audioTrack
,
ByteBuffer
buffer
,
int
size
)
{
return
audioTrack
.
write
(
buffer
,
size
,
AudioTrack
.
WRITE_NON_BLOCKING
);
}
/**
* {@link AudioTrack#getPlaybackHeadPosition()} returns a value intended to be interpreted as
* an unsigned 32 bit integer, which also wraps around periodically. This method returns the
* playback head position as a long that will only wrap around if the value exceeds
* {@link Long#MAX_VALUE} (which in practice will never happen).
*
* @return {@link AudioTrack#getPlaybackHeadPosition()} of {@link #audioTrack} expressed as a
* long.
*/
private
long
getPlaybackHeadPosition
()
{
long
rawPlaybackHeadPosition
=
0xFFFFFFFF
L
&
audioTrack
.
getPlaybackHeadPosition
();
if
(
lastRawPlaybackHeadPosition
>
rawPlaybackHeadPosition
)
{
// The value must have wrapped around.
rawPlaybackHeadWrapCount
++;
}
lastRawPlaybackHeadPosition
=
rawPlaybackHeadPosition
;
return
rawPlaybackHeadPosition
+
(
rawPlaybackHeadWrapCount
<<
32
);
}
private
int
getPendingFrameCount
()
{
return
audioTrack
==
null
?
0
:
(
int
)
(
submittedBytes
/
frameSize
-
getPlaybackHeadPosition
());
}
@Override
public
void
handleMessage
(
int
messageType
,
Object
message
)
throws
ExoPlaybackException
{
if
(
messageType
==
MSG_SET_VOLUME
)
{
setVolume
((
Float
)
message
);
audioTrack
.
setVolume
((
Float
)
message
);
}
else
{
super
.
handleMessage
(
messageType
,
message
);
}
}
private
void
setVolume
(
float
volume
)
{
this
.
volume
=
volume
;
if
(
audioTrack
!=
null
)
{
if
(
Util
.
SDK_INT
>=
21
)
{
setVolumeV21
(
audioTrack
,
volume
);
}
else
{
setVolumeV3
(
audioTrack
,
volume
);
}
}
}
@TargetApi
(
21
)
private
static
void
setVolumeV21
(
AudioTrack
audioTrack
,
float
volume
)
{
audioTrack
.
setVolume
(
volume
);
}
@SuppressWarnings
(
"deprecation"
)
private
static
void
setVolumeV3
(
AudioTrack
audioTrack
,
float
volume
)
{
audioTrack
.
setStereoVolume
(
volume
,
volume
);
}
private
void
notifyAudioTrackInitializationError
(
final
AudioTrackInitializationException
e
)
{
private
void
notifyAudioTrackInitializationError
(
final
AudioTrack
.
InitializationException
e
)
{
if
(
eventHandler
!=
null
&&
eventListener
!=
null
)
{
eventHandler
.
post
(
new
Runnable
()
{
@Override
...
...
@@ -748,74 +341,4 @@ public class MediaCodecAudioTrackRenderer extends MediaCodecTrackRenderer {
}
}
/**
* Interface exposing the {@link AudioTimestamp} methods we need that were added in SDK 19.
*/
private
interface
AudioTimestampCompat
{
/**
* Returns true if the audioTimestamp was retrieved from the audioTrack.
*/
boolean
update
(
AudioTrack
audioTrack
);
long
getNanoTime
();
long
getFramePosition
();
}
/**
* The AudioTimestampCompat implementation for SDK < 19 that does nothing or throws an exception.
*/
private
static
final
class
NoopAudioTimestampCompat
implements
AudioTimestampCompat
{
@Override
public
boolean
update
(
AudioTrack
audioTrack
)
{
return
false
;
}
@Override
public
long
getNanoTime
()
{
// Should never be called if initTimestamp() returned false.
throw
new
UnsupportedOperationException
();
}
@Override
public
long
getFramePosition
()
{
// Should never be called if initTimestamp() returned false.
throw
new
UnsupportedOperationException
();
}
}
/**
* The AudioTimestampCompat implementation for SDK >= 19 that simply calls through to the actual
* implementations added in SDK 19.
*/
@TargetApi
(
19
)
private
static
final
class
AudioTimestampCompatV19
implements
AudioTimestampCompat
{
private
final
AudioTimestamp
audioTimestamp
;
public
AudioTimestampCompatV19
()
{
audioTimestamp
=
new
AudioTimestamp
();
}
@Override
public
boolean
update
(
AudioTrack
audioTrack
)
{
return
audioTrack
.
getTimestamp
(
audioTimestamp
);
}
@Override
public
long
getNanoTime
()
{
return
audioTimestamp
.
nanoTime
;
}
@Override
public
long
getFramePosition
()
{
return
audioTimestamp
.
framePosition
;
}
}
}
library/src/main/java/com/google/android/exoplayer/MediaCodecTrackRenderer.java
View file @
87d0be25
...
...
@@ -280,11 +280,9 @@ public abstract class MediaCodecTrackRenderer extends TrackRenderer {
}
}
DecoderInfo
selectedDecoderInfo
=
MediaCodecUtil
.
getDecoderInfo
(
mimeType
);
DecoderInfo
selectedDecoderInfo
=
MediaCodecUtil
.
getDecoderInfo
(
mimeType
,
requiresSecureDecoder
);
String
selectedDecoderName
=
selectedDecoderInfo
.
name
;
if
(
requiresSecureDecoder
)
{
selectedDecoderName
=
getSecureDecoderName
(
selectedDecoderName
);
}
codecIsAdaptive
=
selectedDecoderInfo
.
adaptive
;
try
{
codec
=
MediaCodec
.
createByCodecName
(
selectedDecoderName
);
...
...
@@ -765,13 +763,6 @@ public abstract class MediaCodecTrackRenderer extends TrackRenderer {
MediaCodec
codec
,
ByteBuffer
buffer
,
MediaCodec
.
BufferInfo
bufferInfo
,
int
bufferIndex
,
boolean
shouldSkip
)
throws
ExoPlaybackException
;
/**
* Returns the name of the secure variant of a given decoder.
*/
private
static
String
getSecureDecoderName
(
String
rawDecoderName
)
{
return
rawDecoderName
+
".secure"
;
}
private
void
notifyDecoderInitializationError
(
final
DecoderInitializationException
e
)
{
if
(
eventHandler
!=
null
&&
eventListener
!=
null
)
{
eventHandler
.
post
(
new
Runnable
()
{
...
...
library/src/main/java/com/google/android/exoplayer/MediaCodecUtil.java
View file @
87d0be25
...
...
@@ -23,6 +23,7 @@ import android.media.MediaCodecInfo;
import
android.media.MediaCodecInfo.CodecCapabilities
;
import
android.media.MediaCodecInfo.CodecProfileLevel
;
import
android.media.MediaCodecList
;
import
android.text.TextUtils
;
import
android.util.Pair
;
import
java.util.HashMap
;
...
...
@@ -33,60 +34,79 @@ import java.util.HashMap;
@TargetApi
(
16
)
public
class
MediaCodecUtil
{
private
static
final
HashMap
<
String
,
Pair
<
MediaCodecInfo
,
CodecCapabilities
>>
codecs
=
new
HashMap
<
String
,
Pair
<
MediaCodecInfo
,
CodecCapabilities
>>();
private
static
final
HashMap
<
CodecKey
,
Pair
<
String
,
CodecCapabilities
>>
codecs
=
new
HashMap
<
CodecKey
,
Pair
<
String
,
CodecCapabilities
>>();
/**
* Get information about the decoder that will be used for a given mime type. If no decoder
* exists for the mime type then null is returned.
* Get information about the decoder that will be used for a given mime type.
*
* @param mimeType The mime type.
* @param secure Whether the decoder is required to support secure decryption. Always pass false
* unless secure decryption really is required.
* @return Information about the decoder that will be used, or null if no decoder exists.
*/
public
static
DecoderInfo
getDecoderInfo
(
String
mimeType
)
{
Pair
<
MediaCodecInfo
,
CodecCapabilities
>
info
=
getMediaCodecInfo
(
mimeTyp
e
);
public
static
DecoderInfo
getDecoderInfo
(
String
mimeType
,
boolean
secure
)
{
Pair
<
String
,
CodecCapabilities
>
info
=
getMediaCodecInfo
(
mimeType
,
secur
e
);
if
(
info
==
null
)
{
return
null
;
}
return
new
DecoderInfo
(
info
.
first
.
getName
()
,
isAdaptive
(
info
.
second
));
return
new
DecoderInfo
(
info
.
first
,
isAdaptive
(
info
.
second
));
}
/**
* Optional call to warm the codec cache. Call from any appropriate
* place to hide latency.
* Optional call to warm the codec cache for a given mime type.
* <p>
* Calling this method may speed up subsequent calls to {@link #getDecoderInfo(String, boolean)}.
*
* @param mimeType The mime type.
* @param secure Whether the decoder is required to support secure decryption. Always pass false
* unless secure decryption really is required.
*/
public
static
synchronized
void
warmCodecs
(
String
[]
mimeTypes
)
{
for
(
int
i
=
0
;
i
<
mimeTypes
.
length
;
i
++)
{
getMediaCodecInfo
(
mimeTypes
[
i
]);
}
public
static
synchronized
void
warmCodec
(
String
mimeType
,
boolean
secure
)
{
getMediaCodecInfo
(
mimeType
,
secure
);
}
/**
* Returns the best decoder and its capabilities for the given mimeType. If there's no decoder
* returns null.
*
* TODO: We need to use the new object based MediaCodecList API.
* Returns the name of the best decoder and its capabilities for the given mimeType.
*/
@SuppressWarnings
(
"deprecation"
)
private
static
synchronized
Pair
<
MediaCodecInfo
,
CodecCapabilities
>
getMediaCodecInfo
(
String
mimeType
)
{
Pair
<
MediaCodecInfo
,
CodecCapabilities
>
result
=
codecs
.
get
(
mimeType
);
if
(
result
!=
null
)
{
return
result
;
private
static
synchronized
Pair
<
String
,
CodecCapabilities
>
getMediaCodecInfo
(
String
mimeType
,
boolean
secure
)
{
CodecKey
key
=
new
CodecKey
(
mimeType
,
secure
);
if
(
codecs
.
containsKey
(
key
))
{
return
codecs
.
get
(
key
);
}
int
numberOfCodecs
=
MediaCodecList
.
getCodecCount
();
MediaCodecListCompat
mediaCodecList
=
Util
.
SDK_INT
>=
21
?
new
MediaCodecListCompatV21
(
secure
)
:
new
MediaCodecListCompatV16
();
int
numberOfCodecs
=
mediaCodecList
.
getCodecCount
();
boolean
secureDecodersExplicit
=
mediaCodecList
.
secureDecodersExplicit
();
// Note: MediaCodecList is sorted by the framework such that the best decoders come first.
for
(
int
i
=
0
;
i
<
numberOfCodecs
;
i
++)
{
MediaCodecInfo
info
=
M
ediaCodecList
.
getCodecInfoAt
(
i
);
MediaCodecInfo
info
=
m
ediaCodecList
.
getCodecInfoAt
(
i
);
String
codecName
=
info
.
getName
();
if
(!
info
.
isEncoder
()
&&
codecName
.
startsWith
(
"OMX."
)
&&
!
codecName
.
endsWith
(
".secure"
))
{
if
(!
info
.
isEncoder
()
&&
codecName
.
startsWith
(
"OMX."
)
&&
(
secureDecodersExplicit
||
!
codecName
.
endsWith
(
".secure"
)))
{
String
[]
supportedTypes
=
info
.
getSupportedTypes
();
for
(
int
j
=
0
;
j
<
supportedTypes
.
length
;
j
++)
{
String
supportedType
=
supportedTypes
[
j
];
if
(
supportedType
.
equalsIgnoreCase
(
mimeType
))
{
result
=
Pair
.
create
(
info
,
info
.
getCapabilitiesForType
(
supportedType
));
codecs
.
put
(
mimeType
,
result
);
return
result
;
CodecCapabilities
capabilities
=
info
.
getCapabilitiesForType
(
supportedType
);
if
(!
secureDecodersExplicit
)
{
// Cache variants for secure and insecure playback. Note that the secure decoder is
// inferred, and may not actually exist.
codecs
.
put
(
key
.
secure
?
new
CodecKey
(
mimeType
,
false
)
:
key
,
Pair
.
create
(
codecName
,
capabilities
));
codecs
.
put
(
key
.
secure
?
key
:
new
CodecKey
(
mimeType
,
true
),
Pair
.
create
(
codecName
+
".secure"
,
capabilities
));
}
else
{
// We can only cache this variant. The other should be listed explicitly.
boolean
codecSecure
=
mediaCodecList
.
isSecurePlaybackSupported
(
info
.
getCapabilitiesForType
(
supportedType
));
codecs
.
put
(
key
.
secure
==
codecSecure
?
key
:
new
CodecKey
(
mimeType
,
codecSecure
),
Pair
.
create
(
codecName
,
capabilities
));
}
if
(
codecs
.
containsKey
(
key
))
{
return
codecs
.
get
(
key
);
}
}
}
}
...
...
@@ -113,7 +133,7 @@ public class MediaCodecUtil {
* @return Whether the specified profile is supported at the specified level.
*/
public
static
boolean
isH264ProfileSupported
(
int
profile
,
int
level
)
{
Pair
<
MediaCodecInfo
,
CodecCapabilities
>
info
=
getMediaCodecInfo
(
MimeTypes
.
VIDEO_H264
);
Pair
<
String
,
CodecCapabilities
>
info
=
getMediaCodecInfo
(
MimeTypes
.
VIDEO_H264
,
false
);
if
(
info
==
null
)
{
return
false
;
}
...
...
@@ -133,7 +153,7 @@ public class MediaCodecUtil {
* @return the maximum frame size for an H264 stream that can be decoded on the device.
*/
public
static
int
maxH264DecodableFrameSize
()
{
Pair
<
MediaCodecInfo
,
CodecCapabilities
>
info
=
getMediaCodecInfo
(
MimeTypes
.
VIDEO_H264
);
Pair
<
String
,
CodecCapabilities
>
info
=
getMediaCodecInfo
(
MimeTypes
.
VIDEO_H264
,
false
);
if
(
info
==
null
)
{
return
0
;
}
...
...
@@ -177,4 +197,123 @@ public class MediaCodecUtil {
}
}
private
interface
MediaCodecListCompat
{
/**
* The number of codecs in the list.
*/
public
int
getCodecCount
();
/**
* The info at the specified index in the list.
*
* @param index The index.
*/
public
MediaCodecInfo
getCodecInfoAt
(
int
index
);
/**
* @return Returns whether secure decoders are explicitly listed, if present.
*/
public
boolean
secureDecodersExplicit
();
/**
* Whether secure playback is supported for the given {@link CodecCapabilities}, which should
* have been obtained from a {@link MediaCodecInfo} obtained from this list.
* <p>
* May only be called if {@link #secureDecodersExplicit()} returns true.
*/
public
boolean
isSecurePlaybackSupported
(
CodecCapabilities
capabilities
);
}
@TargetApi
(
21
)
private
static
final
class
MediaCodecListCompatV21
implements
MediaCodecListCompat
{
private
final
MediaCodecInfo
[]
mediaCodecInfos
;
public
MediaCodecListCompatV21
(
boolean
includeSecure
)
{
int
codecKind
=
includeSecure
?
MediaCodecList
.
ALL_CODECS
:
MediaCodecList
.
REGULAR_CODECS
;
mediaCodecInfos
=
new
MediaCodecList
(
codecKind
).
getCodecInfos
();
}
@Override
public
int
getCodecCount
()
{
return
mediaCodecInfos
.
length
;
}
@Override
public
MediaCodecInfo
getCodecInfoAt
(
int
index
)
{
return
mediaCodecInfos
[
index
];
}
@Override
public
boolean
secureDecodersExplicit
()
{
return
true
;
}
@Override
public
boolean
isSecurePlaybackSupported
(
CodecCapabilities
capabilities
)
{
return
capabilities
.
isFeatureSupported
(
CodecCapabilities
.
FEATURE_SecurePlayback
);
}
}
@SuppressWarnings
(
"deprecation"
)
private
static
final
class
MediaCodecListCompatV16
implements
MediaCodecListCompat
{
@Override
public
int
getCodecCount
()
{
return
MediaCodecList
.
getCodecCount
();
}
@Override
public
MediaCodecInfo
getCodecInfoAt
(
int
index
)
{
return
MediaCodecList
.
getCodecInfoAt
(
index
);
}
@Override
public
boolean
secureDecodersExplicit
()
{
return
false
;
}
@Override
public
boolean
isSecurePlaybackSupported
(
CodecCapabilities
capabilities
)
{
throw
new
UnsupportedOperationException
();
}
}
private
static
final
class
CodecKey
{
public
final
String
mimeType
;
public
final
boolean
secure
;
public
CodecKey
(
String
mimeType
,
boolean
secure
)
{
this
.
mimeType
=
mimeType
;
this
.
secure
=
secure
;
}
@Override
public
int
hashCode
()
{
final
int
prime
=
31
;
int
result
=
1
;
result
=
prime
*
result
+
((
mimeType
==
null
)
?
0
:
mimeType
.
hashCode
());
result
=
prime
*
result
+
(
secure
?
1231
:
1237
);
return
result
;
}
@Override
public
boolean
equals
(
Object
obj
)
{
if
(
this
==
obj
)
{
return
true
;
}
if
(
obj
==
null
||
obj
.
getClass
()
!=
CodecKey
.
class
)
{
return
false
;
}
CodecKey
other
=
(
CodecKey
)
obj
;
return
TextUtils
.
equals
(
mimeType
,
other
.
mimeType
)
&&
secure
==
other
.
secure
;
}
}
}
library/src/main/java/com/google/android/exoplayer/audio/AudioCapabilities.java
0 → 100644
View file @
87d0be25
/*
* Copyright (C) 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package
com
.
google
.
android
.
exoplayer
.
audio
;
import
com.google.android.exoplayer.util.Util
;
import
android.annotation.TargetApi
;
import
android.media.AudioFormat
;
import
java.util.HashSet
;
import
java.util.Set
;
/**
* Represents the set of audio formats a device is capable of playing back.
*/
@TargetApi
(
21
)
public
final
class
AudioCapabilities
{
private
final
Set
<
Integer
>
supportedEncodings
;
private
final
int
maxChannelCount
;
/**
* Constructs new audio capabilities based on a set of supported encodings and a maximum channel
* count.
*
* @param supportedEncodings Supported audio encodings from {@link android.media.AudioFormat}'s
* {@code ENCODING_*} constants.
* @param maxChannelCount The maximum number of audio channels that can be played simultaneously.
*/
public
AudioCapabilities
(
int
[]
supportedEncodings
,
int
maxChannelCount
)
{
this
.
supportedEncodings
=
new
HashSet
<
Integer
>();
if
(
supportedEncodings
!=
null
)
{
for
(
int
i
:
supportedEncodings
)
{
this
.
supportedEncodings
.
add
(
i
);
}
}
this
.
maxChannelCount
=
maxChannelCount
;
}
/** Returns whether the device supports playback of AC-3. */
public
boolean
supportsAc3
()
{
return
Util
.
SDK_INT
>=
21
&&
supportedEncodings
.
contains
(
AudioFormat
.
ENCODING_AC3
);
}
/** Returns whether the device supports playback of enhanced AC-3. */
public
boolean
supportsEAc3
()
{
return
Util
.
SDK_INT
>=
21
&&
supportedEncodings
.
contains
(
AudioFormat
.
ENCODING_E_AC3
);
}
/** Returns whether the device supports playback of 16-bit PCM. */
public
boolean
supportsPcm
()
{
return
supportedEncodings
.
contains
(
AudioFormat
.
ENCODING_PCM_16BIT
);
}
/** Returns the maximum number of channels the device can play at the same time. */
public
int
getMaxChannelCount
()
{
return
maxChannelCount
;
}
@Override
public
boolean
equals
(
Object
other
)
{
if
(
this
==
other
)
{
return
true
;
}
if
(!(
other
instanceof
AudioCapabilities
))
{
return
false
;
}
AudioCapabilities
audioCapabilities
=
(
AudioCapabilities
)
other
;
return
supportedEncodings
.
equals
(
audioCapabilities
.
supportedEncodings
)
&&
maxChannelCount
==
audioCapabilities
.
maxChannelCount
;
}
@Override
public
int
hashCode
()
{
return
maxChannelCount
+
31
*
supportedEncodings
.
hashCode
();
}
@Override
public
String
toString
()
{
return
"AudioCapabilities[maxChannelCount="
+
maxChannelCount
+
", supportedEncodings="
+
supportedEncodings
+
"]"
;
}
}
library/src/main/java/com/google/android/exoplayer/audio/AudioCapabilitiesReceiver.java
0 → 100644
View file @
87d0be25
/*
* Copyright (C) 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package
com
.
google
.
android
.
exoplayer
.
audio
;
import
com.google.android.exoplayer.util.Assertions
;
import
com.google.android.exoplayer.util.Util
;
import
android.annotation.TargetApi
;
import
android.content.BroadcastReceiver
;
import
android.content.Context
;
import
android.content.Intent
;
import
android.content.IntentFilter
;
import
android.media.AudioFormat
;
import
android.media.AudioManager
;
/**
* Notifies a listener when the audio playback capabilities change. Call {@link #register} to start
* receiving notifications, and {@link #unregister} to stop.
*/
public
final
class
AudioCapabilitiesReceiver
{
/** Listener notified when audio capabilities change. */
public
interface
Listener
{
/** Called when the audio capabilities change. */
void
onAudioCapabilitiesChanged
(
AudioCapabilities
audioCapabilities
);
}
/** Default to stereo PCM on SDK <= 21 and when HDMI is unplugged. */
private
static
final
AudioCapabilities
DEFAULT_AUDIO_CAPABILITIES
=
new
AudioCapabilities
(
new
int
[]
{
AudioFormat
.
ENCODING_PCM_16BIT
},
2
);
private
final
Context
context
;
private
final
Listener
listener
;
private
final
BroadcastReceiver
receiver
;
/**
* Constructs a new audio capabilities receiver.
*
* @param context Application context for registering to receive broadcasts.
* @param listener Listener to notify when audio capabilities change.
*/
public
AudioCapabilitiesReceiver
(
Context
context
,
Listener
listener
)
{
this
.
context
=
Assertions
.
checkNotNull
(
context
);
this
.
listener
=
Assertions
.
checkNotNull
(
listener
);
this
.
receiver
=
Util
.
SDK_INT
>=
21
?
new
HdmiAudioPlugBroadcastReceiver
()
:
null
;
}
/**
* Registers to notify the listener when audio capabilities change. The listener will immediately
* receive the current audio capabilities. It is important to call {@link #unregister} so that
* the listener can be garbage collected.
*/
@TargetApi
(
21
)
public
void
register
()
{
if
(
receiver
!=
null
)
{
context
.
registerReceiver
(
receiver
,
new
IntentFilter
(
AudioManager
.
ACTION_HDMI_AUDIO_PLUG
));
}
listener
.
onAudioCapabilitiesChanged
(
DEFAULT_AUDIO_CAPABILITIES
);
}
/** Unregisters to stop notifying the listener when audio capabilities change. */
public
void
unregister
()
{
if
(
receiver
!=
null
)
{
context
.
unregisterReceiver
(
receiver
);
}
}
@TargetApi
(
21
)
private
final
class
HdmiAudioPlugBroadcastReceiver
extends
BroadcastReceiver
{
@Override
public
void
onReceive
(
Context
context
,
Intent
intent
)
{
String
action
=
intent
.
getAction
();
if
(!
action
.
equals
(
AudioManager
.
ACTION_HDMI_AUDIO_PLUG
))
{
return
;
}
listener
.
onAudioCapabilitiesChanged
(
new
AudioCapabilities
(
intent
.
getIntArrayExtra
(
AudioManager
.
EXTRA_ENCODINGS
),
intent
.
getIntExtra
(
AudioManager
.
EXTRA_MAX_CHANNEL_COUNT
,
0
)));
}
}
}
library/src/main/java/com/google/android/exoplayer/audio/AudioTrack.java
0 → 100644
View file @
87d0be25
/*
* Copyright (C) 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package
com
.
google
.
android
.
exoplayer
.
audio
;
import
com.google.android.exoplayer.ExoPlaybackException
;
import
com.google.android.exoplayer.util.Assertions
;
import
com.google.android.exoplayer.util.Util
;
import
android.annotation.TargetApi
;
import
android.media.AudioFormat
;
import
android.media.AudioManager
;
import
android.media.AudioTimestamp
;
import
android.media.MediaFormat
;
import
android.os.ConditionVariable
;
import
android.util.Log
;
import
java.lang.reflect.Method
;
import
java.nio.ByteBuffer
;
/**
* Plays audio data. The implementation delegates to an {@link android.media.AudioTrack} and handles
* playback position smoothing, non-blocking writes and reconfiguration.
*
* <p>If {@link #isInitialized} returns {@code false}, the instance can be {@link #initialize}d.
* After initialization, start playback by calling {@link #play}.
*
* <p>Call {@link #handleBuffer} to write data for playback.
*
* <p>Call {@link #handleDiscontinuity} when a buffer is skipped.
*
* <p>Call {@link #reconfigure} when the output format changes.
*
* <p>Call {@link #reset} to free resources. It is safe to re-{@link #initialize} the instance.
*/
@TargetApi
(
16
)
public
final
class
AudioTrack
{
/**
* Thrown when a failure occurs instantiating an {@link android.media.AudioTrack}.
*/
public
static
class
InitializationException
extends
Exception
{
/** The state as reported by {@link android.media.AudioTrack#getState()}. */
public
final
int
audioTrackState
;
public
InitializationException
(
int
audioTrackState
,
int
sampleRate
,
int
channelConfig
,
int
bufferSize
)
{
super
(
"AudioTrack init failed: "
+
audioTrackState
+
", Config("
+
sampleRate
+
", "
+
channelConfig
+
", "
+
bufferSize
+
")"
);
this
.
audioTrackState
=
audioTrackState
;
}
}
/** Returned in the result of {@link #handleBuffer} if the buffer was discontinuous. */
public
static
final
int
RESULT_POSITION_DISCONTINUITY
=
1
;
/** Returned in the result of {@link #handleBuffer} if the buffer can be released. */
public
static
final
int
RESULT_BUFFER_CONSUMED
=
2
;
/** Represents an unset {@link android.media.AudioTrack} session identifier. */
public
static
final
int
SESSION_ID_NOT_SET
=
0
;
/** The default multiplication factor used when determining the size of the track's buffer. */
public
static
final
float
DEFAULT_MIN_BUFFER_MULTIPLICATION_FACTOR
=
4
;
/** Returned by {@link #getCurrentPositionUs} when the position is not set. */
public
static
final
long
CURRENT_POSITION_NOT_SET
=
Long
.
MIN_VALUE
;
private
static
final
String
TAG
=
"AudioTrack"
;
private
static
final
long
MICROS_PER_SECOND
=
1000000L
;
/**
* AudioTrack timestamps are deemed spurious if they are offset from the system clock by more
* than this amount.
*
* <p>This is a fail safe that should not be required on correctly functioning devices.
*/
private
static
final
long
MAX_AUDIO_TIMESTAMP_OFFSET_US
=
10
*
MICROS_PER_SECOND
;
/**
* AudioTrack latencies are deemed impossibly large if they are greater than this amount.
*
* <p>This is a fail safe that should not be required on correctly functioning devices.
*/
private
static
final
long
MAX_LATENCY_US
=
10
*
MICROS_PER_SECOND
;
private
static
final
int
START_NOT_SET
=
0
;
private
static
final
int
START_IN_SYNC
=
1
;
private
static
final
int
START_NEED_SYNC
=
2
;
private
static
final
int
MAX_PLAYHEAD_OFFSET_COUNT
=
10
;
private
static
final
int
MIN_PLAYHEAD_OFFSET_SAMPLE_INTERVAL_US
=
30000
;
private
static
final
int
MIN_TIMESTAMP_SAMPLE_INTERVAL_US
=
500000
;
private
final
ConditionVariable
releasingConditionVariable
;
private
final
AudioTimestampCompat
audioTimestampCompat
;
private
final
long
[]
playheadOffsets
;
private
final
float
minBufferMultiplicationFactor
;
private
android
.
media
.
AudioTrack
audioTrack
;
private
int
sampleRate
;
private
int
channelConfig
;
private
int
encoding
;
private
int
frameSize
;
private
int
minBufferSize
;
private
int
bufferSize
;
private
int
nextPlayheadOffsetIndex
;
private
int
playheadOffsetCount
;
private
long
smoothedPlayheadOffsetUs
;
private
long
lastPlayheadSampleTimeUs
;
private
boolean
audioTimestampSet
;
private
long
lastTimestampSampleTimeUs
;
private
long
lastRawPlaybackHeadPosition
;
private
long
rawPlaybackHeadWrapCount
;
private
Method
getLatencyMethod
;
private
long
submittedBytes
;
private
int
startMediaTimeState
;
private
long
startMediaTimeUs
;
private
long
resumeSystemTimeUs
;
private
long
latencyUs
;
private
float
volume
;
private
byte
[]
temporaryBuffer
;
private
int
temporaryBufferOffset
;
private
int
temporaryBufferSize
;
/** Constructs an audio track using the default minimum buffer size multiplier. */
public
AudioTrack
()
{
this
(
DEFAULT_MIN_BUFFER_MULTIPLICATION_FACTOR
);
}
/** Constructs an audio track using the specified minimum buffer size multiplier. */
public
AudioTrack
(
float
minBufferMultiplicationFactor
)
{
Assertions
.
checkArgument
(
minBufferMultiplicationFactor
>=
1
);
this
.
minBufferMultiplicationFactor
=
minBufferMultiplicationFactor
;
releasingConditionVariable
=
new
ConditionVariable
(
true
);
if
(
Util
.
SDK_INT
>=
19
)
{
audioTimestampCompat
=
new
AudioTimestampCompatV19
();
}
else
{
audioTimestampCompat
=
new
NoopAudioTimestampCompat
();
}
if
(
Util
.
SDK_INT
>=
18
)
{
try
{
getLatencyMethod
=
android
.
media
.
AudioTrack
.
class
.
getMethod
(
"getLatency"
,
(
Class
<?>[])
null
);
}
catch
(
NoSuchMethodException
e
)
{
// There's no guarantee this method exists. Do nothing.
}
}
playheadOffsets
=
new
long
[
MAX_PLAYHEAD_OFFSET_COUNT
];
volume
=
1.0f
;
startMediaTimeState
=
START_NOT_SET
;
}
/**
* Returns whether the audio track has been successfully initialized via {@link #initialize} and
* not yet {@link #reset}.
*/
public
boolean
isInitialized
()
{
return
audioTrack
!=
null
;
}
/**
* Returns the playback position in the stream starting at zero, in microseconds, or
* {@link #CURRENT_POSITION_NOT_SET} if it is not yet available.
*
* <p>If the device supports it, the method uses the playback timestamp from
* {@link android.media.AudioTrack#getTimestamp}. Otherwise, it derives a smoothed position by
* sampling the {@link android.media.AudioTrack}'s frame position.
*
* @param sourceEnded Specify {@code true} if no more input buffers will be provided.
* @return The playback position relative to the start of playback, in microseconds.
*/
public
long
getCurrentPositionUs
(
boolean
sourceEnded
)
{
if
(!
hasCurrentPositionUs
())
{
return
CURRENT_POSITION_NOT_SET
;
}
long
systemClockUs
=
System
.
nanoTime
()
/
1000
;
long
currentPositionUs
;
maybeSampleSyncParams
();
if
(
audioTimestampSet
)
{
// How long ago in the past the audio timestamp is (negative if it's in the future).
long
presentationDiff
=
systemClockUs
-
(
audioTimestampCompat
.
getNanoTime
()
/
1000
);
long
framesDiff
=
durationUsToFrames
(
presentationDiff
);
// The position of the frame that's currently being presented.
long
currentFramePosition
=
audioTimestampCompat
.
getFramePosition
()
+
framesDiff
;
currentPositionUs
=
framesToDurationUs
(
currentFramePosition
)
+
startMediaTimeUs
;
}
else
{
if
(
playheadOffsetCount
==
0
)
{
// The AudioTrack has started, but we don't have any samples to compute a smoothed position.
currentPositionUs
=
getPlaybackPositionUs
()
+
startMediaTimeUs
;
}
else
{
// getPlayheadPositionUs() only has a granularity of ~20ms, so we base the position off the
// system clock (and a smoothed offset between it and the playhead position) so as to
// prevent jitter in the reported positions.
currentPositionUs
=
systemClockUs
+
smoothedPlayheadOffsetUs
+
startMediaTimeUs
;
}
if
(!
sourceEnded
)
{
currentPositionUs
-=
latencyUs
;
}
}
return
currentPositionUs
;
}
/**
* Initializes the audio track for writing new buffers using {@link #handleBuffer}.
*
* @return The audio track session identifier.
*/
public
int
initialize
()
throws
InitializationException
{
return
initialize
(
SESSION_ID_NOT_SET
);
}
/**
* Initializes the audio track for writing new buffers using {@link #handleBuffer}.
*
* @param sessionId Audio track session identifier to re-use, or {@link #SESSION_ID_NOT_SET} to
* create a new one.
* @return The new (or re-used) session identifier.
*/
public
int
initialize
(
int
sessionId
)
throws
InitializationException
{
// If we're asynchronously releasing a previous audio track then we block until it has been
// released. This guarantees that we cannot end up in a state where we have multiple audio
// track instances. Without this guarantee it would be possible, in extreme cases, to exhaust
// the shared memory that's available for audio track buffers. This would in turn cause the
// initialization of the audio track to fail.
releasingConditionVariable
.
block
();
if
(
sessionId
==
SESSION_ID_NOT_SET
)
{
audioTrack
=
new
android
.
media
.
AudioTrack
(
AudioManager
.
STREAM_MUSIC
,
sampleRate
,
channelConfig
,
encoding
,
bufferSize
,
android
.
media
.
AudioTrack
.
MODE_STREAM
);
}
else
{
// Re-attach to the same audio session.
audioTrack
=
new
android
.
media
.
AudioTrack
(
AudioManager
.
STREAM_MUSIC
,
sampleRate
,
channelConfig
,
encoding
,
bufferSize
,
android
.
media
.
AudioTrack
.
MODE_STREAM
,
sessionId
);
}
checkAudioTrackInitialized
();
setVolume
(
volume
);
return
audioTrack
.
getAudioSessionId
();
}
/**
* Reconfigures the audio track to play back media in {@code format}. The encoding is assumed to
* be {@link AudioFormat#ENCODING_PCM_16BIT}.
*/
public
void
reconfigure
(
MediaFormat
format
)
{
reconfigure
(
format
,
AudioFormat
.
ENCODING_PCM_16BIT
,
0
);
}
/**
* Reconfigures the audio track to play back media in {@code format}. Buffers passed to
* {@link #handleBuffer} must using the specified {@code encoding}, which should be a constant
* from {@link AudioFormat}.
*
* @param format Specifies the channel count and sample rate to play back.
* @param encoding The format in which audio is represented.
* @param bufferSize The total size of the playback buffer in bytes. Specify 0 to use a buffer
* size based on the minimum for format.
*/
public
void
reconfigure
(
MediaFormat
format
,
int
encoding
,
int
bufferSize
)
{
int
channelCount
=
format
.
getInteger
(
MediaFormat
.
KEY_CHANNEL_COUNT
);
int
channelConfig
;
switch
(
channelCount
)
{
case
1
:
channelConfig
=
AudioFormat
.
CHANNEL_OUT_MONO
;
break
;
case
2
:
channelConfig
=
AudioFormat
.
CHANNEL_OUT_STEREO
;
break
;
case
6
:
channelConfig
=
AudioFormat
.
CHANNEL_OUT_5POINT1
;
break
;
case
8
:
channelConfig
=
AudioFormat
.
CHANNEL_OUT_7POINT1
;
break
;
default
:
throw
new
IllegalArgumentException
(
"Unsupported channel count: "
+
channelCount
);
}
int
sampleRate
=
format
.
getInteger
(
MediaFormat
.
KEY_SAMPLE_RATE
);
// TODO: Does channelConfig determine channelCount?
if
(
audioTrack
!=
null
&&
this
.
sampleRate
==
sampleRate
&&
this
.
channelConfig
==
channelConfig
)
{
// We already have an existing audio track with the correct sample rate and channel config.
return
;
}
reset
();
minBufferSize
=
android
.
media
.
AudioTrack
.
getMinBufferSize
(
sampleRate
,
channelConfig
,
encoding
);
this
.
encoding
=
encoding
;
this
.
bufferSize
=
bufferSize
==
0
?
(
int
)
(
minBufferMultiplicationFactor
*
minBufferSize
)
:
bufferSize
;
this
.
sampleRate
=
sampleRate
;
this
.
channelConfig
=
channelConfig
;
frameSize
=
2
*
channelCount
;
// 2 bytes per 16 bit sample * number of channels.
}
/** Starts/resumes playing audio if the audio track has been initialized. */
public
void
play
()
{
if
(
isInitialized
())
{
resumeSystemTimeUs
=
System
.
nanoTime
()
/
1000
;
audioTrack
.
play
();
}
}
/** Signals to the audio track that the next buffer is discontinuous with the previous buffer. */
public
void
handleDiscontinuity
()
{
// Force resynchronization after a skipped buffer.
if
(
startMediaTimeState
==
START_IN_SYNC
)
{
startMediaTimeState
=
START_NEED_SYNC
;
}
}
/**
* Attempts to write {@code size} bytes from {@code buffer} at {@code offset} to the audio track.
* Returns a bit field containing {@link #RESULT_BUFFER_CONSUMED} if the buffer can be released
* (due to having been written), and {@link #RESULT_POSITION_DISCONTINUITY} if the buffer was
* discontinuous with previously written data.
*
* @param buffer The buffer containing audio data to play back.
* @param offset The offset in the buffer from which to consume data.
* @param size The number of bytes to consume from {@code buffer}.
* @param presentationTimeUs Presentation timestamp of the next buffer in microseconds.
* @return A bit field with {@link #RESULT_BUFFER_CONSUMED} if the buffer can be released, and
* {@link #RESULT_POSITION_DISCONTINUITY} if the buffer was not contiguous with previously
* written data.
*/
public
int
handleBuffer
(
ByteBuffer
buffer
,
int
offset
,
int
size
,
long
presentationTimeUs
)
{
int
result
=
0
;
if
(
temporaryBufferSize
==
0
&&
size
!=
0
)
{
// This is the first time we've seen this {@code buffer}.
// Note: presentationTimeUs corresponds to the end of the sample, not the start.
long
bufferStartTime
=
presentationTimeUs
-
framesToDurationUs
(
bytesToFrames
(
size
));
if
(
startMediaTimeUs
==
START_NOT_SET
)
{
startMediaTimeUs
=
Math
.
max
(
0
,
bufferStartTime
);
startMediaTimeState
=
START_IN_SYNC
;
}
else
{
// Sanity check that bufferStartTime is consistent with the expected value.
long
expectedBufferStartTime
=
startMediaTimeUs
+
framesToDurationUs
(
bytesToFrames
(
submittedBytes
));
if
(
startMediaTimeState
==
START_IN_SYNC
&&
Math
.
abs
(
expectedBufferStartTime
-
bufferStartTime
)
>
200000
)
{
Log
.
e
(
TAG
,
"Discontinuity detected [expected "
+
expectedBufferStartTime
+
", got "
+
bufferStartTime
+
"]"
);
startMediaTimeState
=
START_NEED_SYNC
;
}
if
(
startMediaTimeState
==
START_NEED_SYNC
)
{
// Adjust startMediaTimeUs to be consistent with the current buffer's start time and the
// number of bytes submitted.
startMediaTimeUs
+=
(
bufferStartTime
-
expectedBufferStartTime
);
startMediaTimeState
=
START_IN_SYNC
;
result
=
RESULT_POSITION_DISCONTINUITY
;
}
}
}
if
(
size
==
0
)
{
return
result
;
}
if
(
temporaryBufferSize
==
0
)
{
temporaryBufferSize
=
size
;
buffer
.
position
(
offset
);
if
(
Util
.
SDK_INT
<
21
)
{
// Copy {@code buffer} into {@code temporaryBuffer}.
if
(
temporaryBuffer
==
null
||
temporaryBuffer
.
length
<
size
)
{
temporaryBuffer
=
new
byte
[
size
];
}
buffer
.
get
(
temporaryBuffer
,
0
,
size
);
temporaryBufferOffset
=
0
;
}
}
int
bytesWritten
=
0
;
if
(
Util
.
SDK_INT
<
21
)
{
// Work out how many bytes we can write without the risk of blocking.
int
bytesPending
=
(
int
)
(
submittedBytes
-
framesToBytes
(
getPlaybackPositionFrames
()));
int
bytesToWrite
=
bufferSize
-
bytesPending
;
if
(
bytesToWrite
>
0
)
{
bytesToWrite
=
Math
.
min
(
temporaryBufferSize
,
bytesToWrite
);
bytesWritten
=
audioTrack
.
write
(
temporaryBuffer
,
temporaryBufferOffset
,
bytesToWrite
);
if
(
bytesWritten
<
0
)
{
Log
.
w
(
TAG
,
"AudioTrack.write returned error code: "
+
bytesWritten
);
}
else
{
temporaryBufferOffset
+=
bytesWritten
;
}
}
}
else
{
bytesWritten
=
writeNonBlockingV21
(
audioTrack
,
buffer
,
temporaryBufferSize
);
}
temporaryBufferSize
-=
bytesWritten
;
submittedBytes
+=
bytesWritten
;
if
(
temporaryBufferSize
==
0
)
{
result
|=
RESULT_BUFFER_CONSUMED
;
}
return
result
;
}
@TargetApi
(
21
)
private
static
int
writeNonBlockingV21
(
android
.
media
.
AudioTrack
audioTrack
,
ByteBuffer
buffer
,
int
size
)
{
return
audioTrack
.
write
(
buffer
,
size
,
android
.
media
.
AudioTrack
.
WRITE_NON_BLOCKING
);
}
/** Returns whether the audio track has more data pending that will be played back. */
public
boolean
hasPendingData
()
{
return
audioTrack
!=
null
&&
bytesToFrames
(
submittedBytes
)
>
getPlaybackPositionFrames
();
}
/** Returns whether enough data has been supplied via {@link #handleBuffer} to begin playback. */
public
boolean
hasEnoughDataToBeginPlayback
()
{
return
submittedBytes
>=
minBufferSize
;
}
/** Sets the playback volume. */
public
void
setVolume
(
float
volume
)
{
this
.
volume
=
volume
;
if
(
audioTrack
!=
null
)
{
if
(
Util
.
SDK_INT
>=
21
)
{
setVolumeV21
(
audioTrack
,
volume
);
}
else
{
setVolumeV3
(
audioTrack
,
volume
);
}
}
}
@TargetApi
(
21
)
private
static
void
setVolumeV21
(
android
.
media
.
AudioTrack
audioTrack
,
float
volume
)
{
audioTrack
.
setVolume
(
volume
);
}
@SuppressWarnings
(
"deprecation"
)
private
static
void
setVolumeV3
(
android
.
media
.
AudioTrack
audioTrack
,
float
volume
)
{
audioTrack
.
setStereoVolume
(
volume
,
volume
);
}
/** Pauses playback. */
public
void
pause
()
{
if
(
audioTrack
!=
null
)
{
resetSyncParams
();
audioTrack
.
pause
();
}
}
/**
* Releases resources associated with this instance asynchronously. Calling {@link #initialize}
* will block until the audio track has been released, so it is safe to initialize immediately
* after resetting.
*/
public
void
reset
()
{
if
(
audioTrack
!=
null
)
{
submittedBytes
=
0
;
temporaryBufferSize
=
0
;
lastRawPlaybackHeadPosition
=
0
;
rawPlaybackHeadWrapCount
=
0
;
startMediaTimeUs
=
START_NOT_SET
;
resetSyncParams
();
int
playState
=
audioTrack
.
getPlayState
();
if
(
playState
==
android
.
media
.
AudioTrack
.
PLAYSTATE_PLAYING
)
{
audioTrack
.
pause
();
}
// AudioTrack.release can take some time, so we call it on a background thread.
final
android
.
media
.
AudioTrack
toRelease
=
audioTrack
;
audioTrack
=
null
;
releasingConditionVariable
.
close
();
new
Thread
()
{
@Override
public
void
run
()
{
try
{
toRelease
.
release
();
}
finally
{
releasingConditionVariable
.
open
();
}
}
}.
start
();
}
}
/** Returns whether {@link #getCurrentPositionUs} can return the current playback position. */
private
boolean
hasCurrentPositionUs
()
{
return
isInitialized
()
&&
startMediaTimeUs
!=
START_NOT_SET
;
}
/** Updates the audio track latency and playback position parameters. */
private
void
maybeSampleSyncParams
()
{
if
(!
hasCurrentPositionUs
())
{
return
;
}
long
playbackPositionUs
=
getPlaybackPositionUs
();
if
(
playbackPositionUs
==
0
)
{
// The AudioTrack hasn't output anything yet.
return
;
}
long
systemClockUs
=
System
.
nanoTime
()
/
1000
;
if
(
systemClockUs
-
lastPlayheadSampleTimeUs
>=
MIN_PLAYHEAD_OFFSET_SAMPLE_INTERVAL_US
)
{
// Take a new sample and update the smoothed offset between the system clock and the playhead.
playheadOffsets
[
nextPlayheadOffsetIndex
]
=
playbackPositionUs
-
systemClockUs
;
nextPlayheadOffsetIndex
=
(
nextPlayheadOffsetIndex
+
1
)
%
MAX_PLAYHEAD_OFFSET_COUNT
;
if
(
playheadOffsetCount
<
MAX_PLAYHEAD_OFFSET_COUNT
)
{
playheadOffsetCount
++;
}
lastPlayheadSampleTimeUs
=
systemClockUs
;
smoothedPlayheadOffsetUs
=
0
;
for
(
int
i
=
0
;
i
<
playheadOffsetCount
;
i
++)
{
smoothedPlayheadOffsetUs
+=
playheadOffsets
[
i
]
/
playheadOffsetCount
;
}
}
if
(
systemClockUs
-
lastTimestampSampleTimeUs
>=
MIN_TIMESTAMP_SAMPLE_INTERVAL_US
)
{
audioTimestampSet
=
audioTimestampCompat
.
update
(
audioTrack
);
if
(
audioTimestampSet
)
{
// Perform sanity checks on the timestamp.
long
audioTimestampUs
=
audioTimestampCompat
.
getNanoTime
()
/
1000
;
if
(
audioTimestampUs
<
resumeSystemTimeUs
)
{
// The timestamp corresponds to a time before the track was most recently resumed.
audioTimestampSet
=
false
;
}
else
if
(
Math
.
abs
(
audioTimestampUs
-
systemClockUs
)
>
MAX_AUDIO_TIMESTAMP_OFFSET_US
)
{
// The timestamp time base is probably wrong.
audioTimestampSet
=
false
;
Log
.
w
(
TAG
,
"Spurious audio timestamp: "
+
audioTimestampCompat
.
getFramePosition
()
+
", "
+
audioTimestampUs
+
", "
+
systemClockUs
);
}
}
if
(
getLatencyMethod
!=
null
)
{
try
{
// Compute the audio track latency, excluding the latency due to the buffer (leaving
// latency due to the mixer and audio hardware driver).
latencyUs
=
(
Integer
)
getLatencyMethod
.
invoke
(
audioTrack
,
(
Object
[])
null
)
*
1000L
-
framesToDurationUs
(
bytesToFrames
(
bufferSize
));
// Sanity check that the latency is non-negative.
latencyUs
=
Math
.
max
(
latencyUs
,
0
);
// Sanity check that the latency isn't too large.
if
(
latencyUs
>
MAX_LATENCY_US
)
{
Log
.
w
(
TAG
,
"Ignoring impossibly large audio latency: "
+
latencyUs
);
latencyUs
=
0
;
}
}
catch
(
Exception
e
)
{
// The method existed, but doesn't work. Don't try again.
getLatencyMethod
=
null
;
}
}
lastTimestampSampleTimeUs
=
systemClockUs
;
}
}
/**
* Checks that {@link #audioTrack} has been successfully initialized. If it has then calling this
* method is a no-op. If it hasn't then {@link #audioTrack} is released and set to null, and an
* exception is thrown.
*
* @throws ExoPlaybackException If {@link #audioTrack} has not been successfully initialized.
*/
private
void
checkAudioTrackInitialized
()
throws
InitializationException
{
int
state
=
audioTrack
.
getState
();
if
(
state
==
android
.
media
.
AudioTrack
.
STATE_INITIALIZED
)
{
return
;
}
// The track is not successfully initialized. Release and null the track.
try
{
audioTrack
.
release
();
}
catch
(
Exception
e
)
{
// The track has already failed to initialize, so it wouldn't be that surprising if release
// were to fail too. Swallow the exception.
}
finally
{
audioTrack
=
null
;
}
throw
new
InitializationException
(
state
,
sampleRate
,
channelConfig
,
bufferSize
);
}
/**
* {@link android.media.AudioTrack#getPlaybackHeadPosition()} returns a value intended to be
* interpreted as an unsigned 32 bit integer, which also wraps around periodically. This method
* returns the playback head position as a long that will only wrap around if the value exceeds
* {@link Long#MAX_VALUE} (which in practice will never happen).
*
* @return {@link android.media.AudioTrack#getPlaybackHeadPosition()} of {@link #audioTrack}
* expressed as a long.
*/
private
long
getPlaybackPositionFrames
()
{
long
rawPlaybackHeadPosition
=
0xFFFFFFFF
L
&
audioTrack
.
getPlaybackHeadPosition
();
if
(
lastRawPlaybackHeadPosition
>
rawPlaybackHeadPosition
)
{
// The value must have wrapped around.
rawPlaybackHeadWrapCount
++;
}
lastRawPlaybackHeadPosition
=
rawPlaybackHeadPosition
;
return
rawPlaybackHeadPosition
+
(
rawPlaybackHeadWrapCount
<<
32
);
}
private
long
getPlaybackPositionUs
()
{
return
framesToDurationUs
(
getPlaybackPositionFrames
());
}
private
long
framesToBytes
(
long
frameCount
)
{
return
frameCount
*
frameSize
;
}
private
long
bytesToFrames
(
long
byteCount
)
{
return
byteCount
/
frameSize
;
}
private
long
framesToDurationUs
(
long
frameCount
)
{
return
(
frameCount
*
MICROS_PER_SECOND
)
/
sampleRate
;
}
private
long
durationUsToFrames
(
long
durationUs
)
{
return
(
durationUs
*
sampleRate
)
/
MICROS_PER_SECOND
;
}
private
void
resetSyncParams
()
{
smoothedPlayheadOffsetUs
=
0
;
playheadOffsetCount
=
0
;
nextPlayheadOffsetIndex
=
0
;
lastPlayheadSampleTimeUs
=
0
;
audioTimestampSet
=
false
;
lastTimestampSampleTimeUs
=
0
;
}
/**
* Interface exposing the {@link android.media.AudioTimestamp} methods we need that were added in
* SDK 19.
*/
private
interface
AudioTimestampCompat
{
/**
* Returns true if the audioTimestamp was retrieved from the audioTrack.
*/
boolean
update
(
android
.
media
.
AudioTrack
audioTrack
);
long
getNanoTime
();
long
getFramePosition
();
}
/**
* The AudioTimestampCompat implementation for SDK < 19 that does nothing or throws an exception.
*/
private
static
final
class
NoopAudioTimestampCompat
implements
AudioTimestampCompat
{
@Override
public
boolean
update
(
android
.
media
.
AudioTrack
audioTrack
)
{
return
false
;
}
@Override
public
long
getNanoTime
()
{
// Should never be called if initTimestamp() returned false.
throw
new
UnsupportedOperationException
();
}
@Override
public
long
getFramePosition
()
{
// Should never be called if initTimestamp() returned false.
throw
new
UnsupportedOperationException
();
}
}
/**
* The AudioTimestampCompat implementation for SDK >= 19 that simply calls through to the actual
* implementations added in SDK 19.
*/
@TargetApi
(
19
)
private
static
final
class
AudioTimestampCompatV19
implements
AudioTimestampCompat
{
private
final
AudioTimestamp
audioTimestamp
;
public
AudioTimestampCompatV19
()
{
audioTimestamp
=
new
AudioTimestamp
();
}
@Override
public
boolean
update
(
android
.
media
.
AudioTrack
audioTrack
)
{
return
audioTrack
.
getTimestamp
(
audioTimestamp
);
}
@Override
public
long
getNanoTime
()
{
return
audioTimestamp
.
nanoTime
;
}
@Override
public
long
getFramePosition
()
{
return
audioTimestamp
.
framePosition
;
}
}
}
library/src/main/java/com/google/android/exoplayer/dash/DashChunkSource.java
View file @
87d0be25
...
...
@@ -87,7 +87,7 @@ public class DashChunkSource implements ChunkSource {
formats
[
i
]
=
representations
[
i
].
format
;
maxWidth
=
Math
.
max
(
formats
[
i
].
width
,
maxWidth
);
maxHeight
=
Math
.
max
(
formats
[
i
].
height
,
maxHeight
);
Extractor
extractor
=
formats
[
i
].
mimeType
.
startsWith
(
MimeTypes
.
VIDEO_WEBM
)
Extractor
extractor
=
mimeTypeIsWebm
(
formats
[
i
].
mimeType
)
?
new
WebmExtractor
()
:
new
FragmentedMp4Extractor
();
extractors
.
put
(
formats
[
i
].
id
,
extractor
);
this
.
representations
.
put
(
formats
[
i
].
id
,
representations
[
i
]);
...
...
@@ -197,6 +197,10 @@ public class DashChunkSource implements ChunkSource {
// Do nothing.
}
private
boolean
mimeTypeIsWebm
(
String
mimeType
)
{
return
mimeType
.
startsWith
(
MimeTypes
.
VIDEO_WEBM
)
||
mimeType
.
startsWith
(
MimeTypes
.
AUDIO_WEBM
);
}
private
Chunk
newInitializationChunk
(
RangedUri
initializationUri
,
RangedUri
indexUri
,
Representation
representation
,
Extractor
extractor
,
DataSource
dataSource
,
int
trigger
)
{
...
...
library/src/main/java/com/google/android/exoplayer/parser/webm/DefaultEbmlReader.java
View file @
87d0be25
...
...
@@ -16,6 +16,7 @@
package
com
.
google
.
android
.
exoplayer
.
parser
.
webm
;
import
com.google.android.exoplayer.C
;
import
com.google.android.exoplayer.ParserException
;
import
com.google.android.exoplayer.upstream.NonBlockingInputStream
;
import
com.google.android.exoplayer.util.Assertions
;
...
...
@@ -134,7 +135,7 @@ import java.util.Stack;
}
@Override
public
int
read
(
NonBlockingInputStream
inputStream
)
{
public
int
read
(
NonBlockingInputStream
inputStream
)
throws
ParserException
{
Assertions
.
checkState
(
eventHandler
!=
null
);
while
(
true
)
{
while
(!
masterElementsStack
.
isEmpty
()
...
...
library/src/main/java/com/google/android/exoplayer/parser/webm/EbmlEventHandler.java
View file @
87d0be25
...
...
@@ -15,6 +15,7 @@
*/
package
com
.
google
.
android
.
exoplayer
.
parser
.
webm
;
import
com.google.android.exoplayer.ParserException
;
import
com.google.android.exoplayer.upstream.NonBlockingInputStream
;
import
java.nio.ByteBuffer
;
...
...
@@ -46,41 +47,47 @@ import java.nio.ByteBuffer;
* @param elementOffsetBytes The byte offset where this element starts
* @param headerSizeBytes The byte length of this element's ID and size header
* @param contentsSizeBytes The byte length of this element's children
* @throws ParserException If a parsing error occurs.
*/
public
void
onMasterElementStart
(
int
id
,
long
elementOffsetBytes
,
int
headerSizeBytes
,
long
contentsSizeBytes
);
int
id
,
long
elementOffsetBytes
,
int
headerSizeBytes
,
long
contentsSizeBytes
)
throws
ParserException
;
/**
* Called when a master element has finished reading in all of its children from the
* {@link NonBlockingInputStream}.
*
* @param id The integer ID of this element
* @throws ParserException If a parsing error occurs.
*/
public
void
onMasterElementEnd
(
int
id
);
public
void
onMasterElementEnd
(
int
id
)
throws
ParserException
;
/**
* Called when an integer element is encountered in the {@link NonBlockingInputStream}.
*
* @param id The integer ID of this element
* @param value The integer value this element contains
* @throws ParserException If a parsing error occurs.
*/
public
void
onIntegerElement
(
int
id
,
long
value
);
public
void
onIntegerElement
(
int
id
,
long
value
)
throws
ParserException
;
/**
* Called when a float element is encountered in the {@link NonBlockingInputStream}.
*
* @param id The integer ID of this element
* @param value The float value this element contains
* @throws ParserException If a parsing error occurs.
*/
public
void
onFloatElement
(
int
id
,
double
value
);
public
void
onFloatElement
(
int
id
,
double
value
)
throws
ParserException
;
/**
* Called when a string element is encountered in the {@link NonBlockingInputStream}.
*
* @param id The integer ID of this element
* @param value The string value this element contains
* @throws ParserException If a parsing error occurs.
*/
public
void
onStringElement
(
int
id
,
String
value
);
public
void
onStringElement
(
int
id
,
String
value
)
throws
ParserException
;
/**
* Called when a binary element is encountered in the {@link NonBlockingInputStream}.
...
...
@@ -109,9 +116,10 @@ import java.nio.ByteBuffer;
* @param inputStream The {@link NonBlockingInputStream} from which this
* element's contents should be read
* @return True if the element was read. False otherwise.
* @throws ParserException If a parsing error occurs.
*/
public
boolean
onBinaryElement
(
int
id
,
long
elementOffsetBytes
,
int
headerSizeBytes
,
int
contentsSizeBytes
,
NonBlockingInputStream
inputStream
);
NonBlockingInputStream
inputStream
)
throws
ParserException
;
}
library/src/main/java/com/google/android/exoplayer/parser/webm/EbmlReader.java
View file @
87d0be25
...
...
@@ -15,6 +15,7 @@
*/
package
com
.
google
.
android
.
exoplayer
.
parser
.
webm
;
import
com.google.android.exoplayer.ParserException
;
import
com.google.android.exoplayer.upstream.NonBlockingInputStream
;
import
java.nio.ByteBuffer
;
...
...
@@ -53,8 +54,9 @@ import java.nio.ByteBuffer;
*
* @param inputStream The input stream from which data should be read
* @return One of the {@code RESULT_*} flags defined in this interface
* @throws ParserException If parsing fails.
*/
public
int
read
(
NonBlockingInputStream
inputStream
);
public
int
read
(
NonBlockingInputStream
inputStream
)
throws
ParserException
;
/**
* The total number of bytes consumed by the reader since first created or last {@link #reset()}.
...
...
library/src/main/java/com/google/android/exoplayer/parser/webm/WebmExtractor.java
View file @
87d0be25
...
...
@@ -16,6 +16,7 @@
package
com
.
google
.
android
.
exoplayer
.
parser
.
webm
;
import
com.google.android.exoplayer.MediaFormat
;
import
com.google.android.exoplayer.ParserException
;
import
com.google.android.exoplayer.SampleHolder
;
import
com.google.android.exoplayer.parser.Extractor
;
import
com.google.android.exoplayer.parser.SegmentIndex
;
...
...
@@ -27,6 +28,7 @@ import android.annotation.TargetApi;
import
android.media.MediaExtractor
;
import
java.nio.ByteBuffer
;
import
java.util.ArrayList
;
import
java.util.Arrays
;
import
java.util.Map
;
import
java.util.UUID
;
...
...
@@ -44,6 +46,8 @@ public final class WebmExtractor implements Extractor {
private
static
final
String
DOC_TYPE_WEBM
=
"webm"
;
private
static
final
String
CODEC_ID_VP9
=
"V_VP9"
;
private
static
final
String
CODEC_ID_VORBIS
=
"A_VORBIS"
;
private
static
final
int
VORBIS_MAX_INPUT_SIZE
=
8192
;
private
static
final
int
UNKNOWN
=
-
1
;
// Element IDs
...
...
@@ -65,9 +69,13 @@ public final class WebmExtractor implements Extractor {
private
static
final
int
ID_TRACKS
=
0x1654AE6B
;
private
static
final
int
ID_TRACK_ENTRY
=
0xAE
;
private
static
final
int
ID_CODEC_ID
=
0x86
;
private
static
final
int
ID_CODEC_PRIVATE
=
0x63A2
;
private
static
final
int
ID_VIDEO
=
0xE0
;
private
static
final
int
ID_PIXEL_WIDTH
=
0xB0
;
private
static
final
int
ID_PIXEL_HEIGHT
=
0xBA
;
private
static
final
int
ID_AUDIO
=
0xE1
;
private
static
final
int
ID_CHANNELS
=
0x9F
;
private
static
final
int
ID_SAMPLING_FREQUENCY
=
0xB5
;
private
static
final
int
ID_CUES
=
0x1C53BB6B
;
private
static
final
int
ID_CUE_POINT
=
0xBB
;
...
...
@@ -96,6 +104,10 @@ public final class WebmExtractor implements Extractor {
private
long
durationUs
=
UNKNOWN
;
private
int
pixelWidth
=
UNKNOWN
;
private
int
pixelHeight
=
UNKNOWN
;
private
int
channelCount
=
UNKNOWN
;
private
int
sampleRate
=
UNKNOWN
;
private
byte
[]
codecPrivate
;
private
boolean
seenAudioTrack
;
private
long
cuesSizeBytes
=
UNKNOWN
;
private
long
clusterTimecodeUs
=
UNKNOWN
;
private
long
simpleBlockTimecodeUs
=
UNKNOWN
;
...
...
@@ -114,7 +126,8 @@ public final class WebmExtractor implements Extractor {
}
@Override
public
int
read
(
NonBlockingInputStream
inputStream
,
SampleHolder
sampleHolder
)
{
public
int
read
(
NonBlockingInputStream
inputStream
,
SampleHolder
sampleHolder
)
throws
ParserException
{
this
.
sampleHolder
=
sampleHolder
;
this
.
readResults
=
0
;
while
((
readResults
&
READ_TERMINATING_RESULTS
)
==
0
)
{
...
...
@@ -176,6 +189,7 @@ public final class WebmExtractor implements Extractor {
case
ID_CLUSTER:
case
ID_TRACKS:
case
ID_TRACK_ENTRY:
case
ID_AUDIO:
case
ID_VIDEO:
case
ID_CUES:
case
ID_CUE_POINT:
...
...
@@ -187,6 +201,7 @@ public final class WebmExtractor implements Extractor {
case
ID_TIME_CODE:
case
ID_PIXEL_WIDTH:
case
ID_PIXEL_HEIGHT:
case
ID_CHANNELS:
case
ID_CUE_TIME:
case
ID_CUE_CLUSTER_POSITION:
return
EbmlReader
.
TYPE_UNSIGNED_INT
;
...
...
@@ -194,8 +209,10 @@ public final class WebmExtractor implements Extractor {
case
ID_CODEC_ID:
return
EbmlReader
.
TYPE_STRING
;
case
ID_SIMPLE_BLOCK:
case
ID_CODEC_PRIVATE:
return
EbmlReader
.
TYPE_BINARY
;
case
ID_DURATION:
case
ID_SAMPLING_FREQUENCY:
return
EbmlReader
.
TYPE_FLOAT
;
default
:
return
EbmlReader
.
TYPE_UNKNOWN
;
...
...
@@ -203,11 +220,12 @@ public final class WebmExtractor implements Extractor {
}
/* package */
boolean
onMasterElementStart
(
int
id
,
long
elementOffsetBytes
,
int
headerSizeBytes
,
long
contentsSizeBytes
)
{
int
id
,
long
elementOffsetBytes
,
int
headerSizeBytes
,
long
contentsSizeBytes
)
throws
ParserException
{
switch
(
id
)
{
case
ID_SEGMENT:
if
(
segmentStartOffsetBytes
!=
UNKNOWN
||
segmentEndOffsetBytes
!=
UNKNOWN
)
{
throw
new
IllegalState
Exception
(
"Multiple Segment elements not supported"
);
throw
new
Parser
Exception
(
"Multiple Segment elements not supported"
);
}
segmentStartOffsetBytes
=
elementOffsetBytes
+
headerSizeBytes
;
segmentEndOffsetBytes
=
elementOffsetBytes
+
headerSizeBytes
+
contentsSizeBytes
;
...
...
@@ -223,31 +241,41 @@ public final class WebmExtractor implements Extractor {
return
true
;
}
/* package */
boolean
onMasterElementEnd
(
int
id
)
{
/* package */
boolean
onMasterElementEnd
(
int
id
)
throws
ParserException
{
switch
(
id
)
{
case
ID_CUES:
buildCues
();
return
false
;
case
ID_VIDEO:
buildFormat
();
buildVideoFormat
();
return
true
;
case
ID_AUDIO:
seenAudioTrack
=
true
;
return
true
;
case
ID_TRACK_ENTRY:
if
(
seenAudioTrack
)
{
// Audio format has to be built here since codec private may not be available at the end
// of ID_AUDIO.
buildAudioFormat
();
}
return
true
;
default
:
return
true
;
}
}
/* package */
boolean
onIntegerElement
(
int
id
,
long
value
)
{
/* package */
boolean
onIntegerElement
(
int
id
,
long
value
)
throws
ParserException
{
switch
(
id
)
{
case
ID_EBML_READ_VERSION:
// Validate that EBMLReadVersion is supported. This extractor only supports v1.
if
(
value
!=
1
)
{
throw
new
IllegalArgument
Exception
(
"EBMLReadVersion "
+
value
+
" not supported"
);
throw
new
Parser
Exception
(
"EBMLReadVersion "
+
value
+
" not supported"
);
}
break
;
case
ID_DOC_TYPE_READ_VERSION:
// Validate that DocTypeReadVersion is supported. This extractor only supports up to v2.
if
(
value
<
1
||
value
>
2
)
{
throw
new
IllegalArgument
Exception
(
"DocTypeReadVersion "
+
value
+
" not supported"
);
throw
new
Parser
Exception
(
"DocTypeReadVersion "
+
value
+
" not supported"
);
}
break
;
case
ID_TIMECODE_SCALE:
...
...
@@ -259,6 +287,9 @@ public final class WebmExtractor implements Extractor {
case
ID_PIXEL_HEIGHT:
pixelHeight
=
(
int
)
value
;
break
;
case
ID_CHANNELS:
channelCount
=
(
int
)
value
;
break
;
case
ID_CUE_TIME:
cueTimesUs
.
add
(
scaleTimecodeToUs
(
value
));
break
;
...
...
@@ -275,24 +306,31 @@ public final class WebmExtractor implements Extractor {
}
/* package */
boolean
onFloatElement
(
int
id
,
double
value
)
{
if
(
id
==
ID_DURATION
)
{
switch
(
id
)
{
case
ID_DURATION:
durationUs
=
scaleTimecodeToUs
((
long
)
value
);
break
;
case
ID_SAMPLING_FREQUENCY:
sampleRate
=
(
int
)
value
;
break
;
default
:
// pass
}
return
true
;
}
/* package */
boolean
onStringElement
(
int
id
,
String
value
)
{
/* package */
boolean
onStringElement
(
int
id
,
String
value
)
throws
ParserException
{
switch
(
id
)
{
case
ID_DOC_TYPE:
// Validate that DocType is supported. This extractor only supports "webm".
if
(!
DOC_TYPE_WEBM
.
equals
(
value
))
{
throw
new
IllegalArgument
Exception
(
"DocType "
+
value
+
" not supported"
);
throw
new
Parser
Exception
(
"DocType "
+
value
+
" not supported"
);
}
break
;
case
ID_CODEC_ID:
// Validate that CodecID is supported. This extractor only supports "V_VP9".
if
(!
CODEC_ID_VP9
.
equals
(
value
))
{
throw
new
IllegalArgument
Exception
(
"CodecID "
+
value
+
" not supported"
);
// Validate that CodecID is supported. This extractor only supports "V_VP9"
and "A_VORBIS"
.
if
(!
CODEC_ID_VP9
.
equals
(
value
)
&&
!
CODEC_ID_VORBIS
.
equals
(
value
)
)
{
throw
new
Parser
Exception
(
"CodecID "
+
value
+
" not supported"
);
}
break
;
default
:
...
...
@@ -303,8 +341,9 @@ public final class WebmExtractor implements Extractor {
/* package */
boolean
onBinaryElement
(
int
id
,
long
elementOffsetBytes
,
int
headerSizeBytes
,
int
contentsSizeBytes
,
NonBlockingInputStream
inputStream
)
{
if
(
id
==
ID_SIMPLE_BLOCK
)
{
NonBlockingInputStream
inputStream
)
throws
ParserException
{
switch
(
id
)
{
case
ID_SIMPLE_BLOCK:
// Please refer to http://www.matroska.org/technical/specs/index.html#simpleblock_structure
// for info about how data is organized in a SimpleBlock element.
...
...
@@ -344,7 +383,7 @@ public final class WebmExtractor implements Extractor {
case
LACING_FIXED:
case
LACING_XIPH:
default
:
throw
new
IllegalState
Exception
(
"Lacing mode "
+
lacing
+
" not supported"
);
throw
new
Parser
Exception
(
"Lacing mode "
+
lacing
+
" not supported"
);
}
if
(
sampleHolder
.
data
==
null
||
sampleHolder
.
data
.
capacity
()
<
sampleHolder
.
size
)
{
...
...
@@ -359,6 +398,13 @@ public final class WebmExtractor implements Extractor {
reader
.
readBytes
(
inputStream
,
outputData
,
sampleHolder
.
size
);
}
readResults
|=
RESULT_READ_SAMPLE
;
break
;
case
ID_CODEC_PRIVATE:
codecPrivate
=
new
byte
[
contentsSizeBytes
];
reader
.
readBytes
(
inputStream
,
codecPrivate
,
contentsSizeBytes
);
break
;
default
:
// pass
}
return
true
;
}
...
...
@@ -372,16 +418,38 @@ public final class WebmExtractor implements Extractor {
*
* <p>Replaces the previous {@link #format} only if video width/height have changed.
* {@link #format} is guaranteed to not be null after calling this method. In
* the event that it can't be built, an {@link
IllegalState
Exception} will be thrown.
* the event that it can't be built, an {@link
Parser
Exception} will be thrown.
*/
private
void
build
Format
()
{
private
void
build
VideoFormat
()
throws
ParserException
{
if
(
pixelWidth
!=
UNKNOWN
&&
pixelHeight
!=
UNKNOWN
&&
(
format
==
null
||
format
.
width
!=
pixelWidth
||
format
.
height
!=
pixelHeight
))
{
format
=
MediaFormat
.
createVideoFormat
(
MimeTypes
.
VIDEO_VP9
,
MediaFormat
.
NO_VALUE
,
pixelWidth
,
pixelHeight
,
null
);
readResults
|=
RESULT_READ_INIT
;
}
else
if
(
format
==
null
)
{
throw
new
IllegalStateException
(
"Unable to build format"
);
throw
new
ParserException
(
"Unable to build format"
);
}
}
/**
* Build an audio {@link MediaFormat} containing recently gathered Audio information, if needed.
*
* <p>Replaces the previous {@link #format} only if audio channel count/sample rate have changed.
* {@link #format} is guaranteed to not be null after calling this method.
*
* @throws ParserException If an error occurs when parsing codec's private data or if the format
* can't be built.
*/
private
void
buildAudioFormat
()
throws
ParserException
{
if
(
channelCount
!=
UNKNOWN
&&
sampleRate
!=
UNKNOWN
&&
(
format
==
null
||
format
.
channelCount
!=
channelCount
||
format
.
sampleRate
!=
sampleRate
))
{
format
=
MediaFormat
.
createAudioFormat
(
MimeTypes
.
AUDIO_VORBIS
,
VORBIS_MAX_INPUT_SIZE
,
sampleRate
,
channelCount
,
parseVorbisCodecPrivate
());
readResults
|=
RESULT_READ_INIT
;
}
else
if
(
format
==
null
)
{
throw
new
ParserException
(
"Unable to build format"
);
}
}
...
...
@@ -389,18 +457,18 @@ public final class WebmExtractor implements Extractor {
* Build a {@link SegmentIndex} containing recently gathered Cues information.
*
* <p>{@link #cues} is guaranteed to not be null after calling this method. In
* the event that it can't be built, an {@link
IllegalState
Exception} will be thrown.
* the event that it can't be built, an {@link
Parser
Exception} will be thrown.
*/
private
void
buildCues
()
{
private
void
buildCues
()
throws
ParserException
{
if
(
segmentStartOffsetBytes
==
UNKNOWN
)
{
throw
new
IllegalState
Exception
(
"Segment start/end offsets unknown"
);
throw
new
Parser
Exception
(
"Segment start/end offsets unknown"
);
}
else
if
(
durationUs
==
UNKNOWN
)
{
throw
new
IllegalState
Exception
(
"Duration unknown"
);
throw
new
Parser
Exception
(
"Duration unknown"
);
}
else
if
(
cuesSizeBytes
==
UNKNOWN
)
{
throw
new
IllegalState
Exception
(
"Cues size unknown"
);
throw
new
Parser
Exception
(
"Cues size unknown"
);
}
else
if
(
cueTimesUs
==
null
||
cueClusterPositions
==
null
||
cueTimesUs
.
size
()
==
0
||
cueTimesUs
.
size
()
!=
cueClusterPositions
.
size
())
{
throw
new
IllegalState
Exception
(
"Invalid/missing cue points"
);
throw
new
Parser
Exception
(
"Invalid/missing cue points"
);
}
int
cuePointsSize
=
cueTimesUs
.
size
();
int
[]
sizes
=
new
int
[
cuePointsSize
];
...
...
@@ -424,6 +492,58 @@ public final class WebmExtractor implements Extractor {
}
/**
* Parses Vorbis Codec Private data and adds it as initialization data to the {@link #format}.
* WebM Vorbis Codec Private data specification can be found
* <a href="http://matroska.org/technical/specs/codecid/index.html">here</a>.
*
* @return ArrayList of byte arrays containing the initialization data on success.
* @throws ParserException If parsing codec private data fails.
*/
private
ArrayList
<
byte
[]>
parseVorbisCodecPrivate
()
throws
ParserException
{
try
{
if
(
codecPrivate
[
0
]
!=
0x02
)
{
throw
new
ParserException
(
"Error parsing vorbis codec private"
);
}
int
offset
=
1
;
int
vorbisInfoLength
=
0
;
while
(
codecPrivate
[
offset
]
==
(
byte
)
0xFF
)
{
vorbisInfoLength
+=
0xFF
;
offset
++;
}
vorbisInfoLength
+=
codecPrivate
[
offset
++];
int
vorbisSkipLength
=
0
;
while
(
codecPrivate
[
offset
]
==
(
byte
)
0xFF
)
{
vorbisSkipLength
+=
0xFF
;
offset
++;
}
vorbisSkipLength
+=
codecPrivate
[
offset
++];
if
(
codecPrivate
[
offset
]
!=
0x01
)
{
throw
new
ParserException
(
"Error parsing vorbis codec private"
);
}
byte
[]
vorbisInfo
=
new
byte
[
vorbisInfoLength
];
System
.
arraycopy
(
codecPrivate
,
offset
,
vorbisInfo
,
0
,
vorbisInfoLength
);
offset
+=
vorbisInfoLength
;
if
(
codecPrivate
[
offset
]
!=
0x03
)
{
throw
new
ParserException
(
"Error parsing vorbis codec private"
);
}
offset
+=
vorbisSkipLength
;
if
(
codecPrivate
[
offset
]
!=
0x05
)
{
throw
new
ParserException
(
"Error parsing vorbis codec private"
);
}
byte
[]
vorbisBooks
=
new
byte
[
codecPrivate
.
length
-
offset
];
System
.
arraycopy
(
codecPrivate
,
offset
,
vorbisBooks
,
0
,
codecPrivate
.
length
-
offset
);
ArrayList
<
byte
[]>
initializationData
=
new
ArrayList
<
byte
[]>(
2
);
initializationData
.
add
(
vorbisInfo
);
initializationData
.
add
(
vorbisBooks
);
return
initializationData
;
}
catch
(
ArrayIndexOutOfBoundsException
e
)
{
throw
new
ParserException
(
"Error parsing vorbis codec private"
);
}
}
/**
* Passes events through to {@link WebmExtractor} as
* callbacks from {@link EbmlReader} are received.
*/
...
...
@@ -436,18 +556,19 @@ public final class WebmExtractor implements Extractor {
@Override
public
void
onMasterElementStart
(
int
id
,
long
elementOffsetBytes
,
int
headerSizeBytes
,
long
contentsSizeBytes
)
{
int
id
,
long
elementOffsetBytes
,
int
headerSizeBytes
,
long
contentsSizeBytes
)
throws
ParserException
{
WebmExtractor
.
this
.
onMasterElementStart
(
id
,
elementOffsetBytes
,
headerSizeBytes
,
contentsSizeBytes
);
}
@Override
public
void
onMasterElementEnd
(
int
id
)
{
public
void
onMasterElementEnd
(
int
id
)
throws
ParserException
{
WebmExtractor
.
this
.
onMasterElementEnd
(
id
);
}
@Override
public
void
onIntegerElement
(
int
id
,
long
value
)
{
public
void
onIntegerElement
(
int
id
,
long
value
)
throws
ParserException
{
WebmExtractor
.
this
.
onIntegerElement
(
id
,
value
);
}
...
...
@@ -457,14 +578,14 @@ public final class WebmExtractor implements Extractor {
}
@Override
public
void
onStringElement
(
int
id
,
String
value
)
{
public
void
onStringElement
(
int
id
,
String
value
)
throws
ParserException
{
WebmExtractor
.
this
.
onStringElement
(
id
,
value
);
}
@Override
public
boolean
onBinaryElement
(
int
id
,
long
elementOffsetBytes
,
int
headerSizeBytes
,
int
contentsSizeBytes
,
NonBlockingInputStream
inputStream
)
{
NonBlockingInputStream
inputStream
)
throws
ParserException
{
return
WebmExtractor
.
this
.
onBinaryElement
(
id
,
elementOffsetBytes
,
headerSizeBytes
,
contentsSizeBytes
,
inputStream
);
}
...
...
library/src/main/java/com/google/android/exoplayer/util/MimeTypes.java
View file @
87d0be25
...
...
@@ -34,6 +34,8 @@ public class MimeTypes {
public
static
final
String
AUDIO_AAC
=
BASE_TYPE_AUDIO
+
"/mp4a-latm"
;
public
static
final
String
AUDIO_AC3
=
BASE_TYPE_AUDIO
+
"/ac3"
;
public
static
final
String
AUDIO_EC3
=
BASE_TYPE_AUDIO
+
"/eac3"
;
public
static
final
String
AUDIO_WEBM
=
BASE_TYPE_AUDIO
+
"/webm"
;
public
static
final
String
AUDIO_VORBIS
=
BASE_TYPE_AUDIO
+
"/vorbis"
;
public
static
final
String
TEXT_VTT
=
BASE_TYPE_TEXT
+
"/vtt"
;
...
...
library/src/main/java/com/google/android/exoplayer/util/PriorityHandlerThread.java
0 → 100644
View file @
87d0be25
/*
* Copyright (C) 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package
com
.
google
.
android
.
exoplayer
.
util
;
import
android.os.HandlerThread
;
import
android.os.Process
;
/**
* A {@link HandlerThread} with a specified process priority.
*/
public
class
PriorityHandlerThread
extends
HandlerThread
{
private
final
int
priority
;
/**
* @param name The name of the thread.
* @param priority The priority level. See {@link Process#setThreadPriority(int)} for details.
*/
public
PriorityHandlerThread
(
String
name
,
int
priority
)
{
super
(
name
);
this
.
priority
=
priority
;
}
@Override
public
void
run
()
{
Process
.
setThreadPriority
(
priority
);
super
.
run
();
}
}
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment