Skip to content
Toggle navigation
P
Projects
G
Groups
S
Snippets
Help
SDK
/
exoplayer
This project
Loading...
Sign in
Toggle navigation
Go to a project
Project
Repository
Issues
0
Merge Requests
0
Pipelines
Wiki
Snippets
Settings
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Commit
160a88ba
authored
Jun 18, 2015
by
Oliver Woodman
Browse files
Options
_('Browse Files')
Download
Email Patches
Plain Diff
Steps toward enabling seeking in DASH live
parent
8e58a3f5
Hide whitespace changes
Inline
Side-by-side
Showing
4 changed files
with
503 additions
and
220 deletions
demo/src/main/java/com/google/android/exoplayer/demo/player/DashRendererBuilder.java
demo/src/main/java/com/google/android/exoplayer/demo/player/DemoPlayer.java
library/src/main/java/com/google/android/exoplayer/dash/DashChunkSource.java
library/src/test/java/com/google/android/exoplayer/dash/DashChunkSourceTest.java
demo/src/main/java/com/google/android/exoplayer/demo/player/DashRendererBuilder.java
View file @
160a88ba
...
...
@@ -224,7 +224,8 @@ public class DashRendererBuilder implements RendererBuilder,
DataSource
videoDataSource
=
new
DefaultUriDataSource
(
context
,
bandwidthMeter
,
userAgent
);
ChunkSource
videoChunkSource
=
new
DashChunkSource
(
manifestFetcher
,
videoAdaptationSetIndex
,
videoRepresentationIndices
,
videoDataSource
,
new
AdaptiveEvaluator
(
bandwidthMeter
),
LIVE_EDGE_LATENCY_MS
,
elapsedRealtimeOffset
);
new
AdaptiveEvaluator
(
bandwidthMeter
),
LIVE_EDGE_LATENCY_MS
,
elapsedRealtimeOffset
,
mainHandler
,
player
);
ChunkSampleSource
videoSampleSource
=
new
ChunkSampleSource
(
videoChunkSource
,
loadControl
,
VIDEO_BUFFER_SEGMENTS
*
BUFFER_SEGMENT_SIZE
,
true
,
mainHandler
,
player
,
DemoPlayer
.
TYPE_VIDEO
);
...
...
@@ -246,7 +247,7 @@ public class DashRendererBuilder implements RendererBuilder,
format
.
audioSamplingRate
+
"Hz)"
);
audioChunkSourceList
.
add
(
new
DashChunkSource
(
manifestFetcher
,
audioAdaptationSetIndex
,
new
int
[]
{
i
},
audioDataSource
,
audioEvaluator
,
LIVE_EDGE_LATENCY_MS
,
elapsedRealtimeOffset
));
elapsedRealtimeOffset
,
mainHandler
,
player
));
codecs
.
add
(
format
.
codecs
);
}
...
...
@@ -303,7 +304,8 @@ public class DashRendererBuilder implements RendererBuilder,
Representation
representation
=
representations
.
get
(
j
);
textTrackNameList
.
add
(
representation
.
format
.
id
);
textChunkSourceList
.
add
(
new
DashChunkSource
(
manifestFetcher
,
i
,
new
int
[]
{
j
},
textDataSource
,
textEvaluator
,
LIVE_EDGE_LATENCY_MS
,
elapsedRealtimeOffset
));
textDataSource
,
textEvaluator
,
LIVE_EDGE_LATENCY_MS
,
elapsedRealtimeOffset
,
mainHandler
,
player
));
}
}
}
...
...
demo/src/main/java/com/google/android/exoplayer/demo/player/DemoPlayer.java
View file @
160a88ba
...
...
@@ -29,6 +29,7 @@ import com.google.android.exoplayer.audio.AudioTrack;
import
com.google.android.exoplayer.chunk.ChunkSampleSource
;
import
com.google.android.exoplayer.chunk.Format
;
import
com.google.android.exoplayer.chunk.MultiTrackChunkSource
;
import
com.google.android.exoplayer.dash.DashChunkSource
;
import
com.google.android.exoplayer.drm.StreamingDrmSessionManager
;
import
com.google.android.exoplayer.hls.HlsSampleSource
;
import
com.google.android.exoplayer.metadata.MetadataTrackRenderer.MetadataRenderer
;
...
...
@@ -58,7 +59,7 @@ import java.util.concurrent.CopyOnWriteArrayList;
public
class
DemoPlayer
implements
ExoPlayer
.
Listener
,
ChunkSampleSource
.
EventListener
,
HlsSampleSource
.
EventListener
,
DefaultBandwidthMeter
.
EventListener
,
MediaCodecVideoTrackRenderer
.
EventListener
,
MediaCodecAudioTrackRenderer
.
EventListener
,
StreamingDrmSessionManager
.
EventListener
,
TextRenderer
,
StreamingDrmSessionManager
.
EventListener
,
DashChunkSource
.
EventListener
,
TextRenderer
,
MetadataRenderer
<
Map
<
String
,
Object
>>,
DebugTextViewHelper
.
Provider
{
/**
...
...
@@ -553,6 +554,13 @@ public class DemoPlayer implements ExoPlayer.Listener, ChunkSampleSource.EventLi
}
@Override
public
void
onSeekRangeChanged
(
TimeRange
seekRange
)
{
if
(
infoListener
!=
null
)
{
infoListener
.
onSeekRangeChanged
(
seekRange
);
}
}
@Override
public
void
onPlayWhenReadyCommitted
()
{
// Do nothing.
}
...
...
library/src/main/java/com/google/android/exoplayer/dash/DashChunkSource.java
View file @
160a88ba
...
...
@@ -18,6 +18,7 @@ package com.google.android.exoplayer.dash;
import
com.google.android.exoplayer.BehindLiveWindowException
;
import
com.google.android.exoplayer.C
;
import
com.google.android.exoplayer.MediaFormat
;
import
com.google.android.exoplayer.TimeRange
;
import
com.google.android.exoplayer.TrackInfo
;
import
com.google.android.exoplayer.TrackRenderer
;
import
com.google.android.exoplayer.chunk.Chunk
;
...
...
@@ -50,6 +51,8 @@ import com.google.android.exoplayer.util.ManifestFetcher;
import
com.google.android.exoplayer.util.MimeTypes
;
import
com.google.android.exoplayer.util.SystemClock
;
import
android.os.Handler
;
import
java.io.IOException
;
import
java.util.Arrays
;
import
java.util.Collections
;
...
...
@@ -64,6 +67,20 @@ import java.util.List;
public
class
DashChunkSource
implements
ChunkSource
{
/**
* Interface definition for a callback to be notified of {@link DashChunkSource} events.
*/
public
interface
EventListener
{
/**
* Invoked when the available seek range of the stream has changed.
*
* @param seekRange The range which specifies available content that can be seeked to.
*/
public
void
onSeekRangeChanged
(
TimeRange
seekRange
);
}
/**
* Thrown when an AdaptationSet is missing from the MPD.
*/
public
static
class
NoAdaptationSetException
extends
IOException
{
...
...
@@ -79,6 +96,9 @@ public class DashChunkSource implements ChunkSource {
*/
public
static
final
int
USE_ALL_TRACKS
=
-
1
;
private
final
Handler
eventHandler
;
private
final
EventListener
eventListener
;
private
final
TrackInfo
trackInfo
;
private
final
DataSource
dataSource
;
private
final
FormatEvaluator
formatEvaluator
;
...
...
@@ -101,6 +121,12 @@ public class DashChunkSource implements ChunkSource {
private
boolean
finishedCurrentManifest
;
private
DrmInitData
drmInitData
;
private
TimeRange
seekRange
;
private
long
[]
seekRangeValues
;
private
int
firstAvailableSegmentNum
;
private
int
lastAvailableSegmentNum
;
private
boolean
startAtLiveEdge
;
private
boolean
lastChunkWasInitialization
;
private
IOException
fatalError
;
...
...
@@ -142,7 +168,7 @@ public class DashChunkSource implements ChunkSource {
public
DashChunkSource
(
MediaPresentationDescription
manifest
,
int
adaptationSetIndex
,
int
[]
representationIndices
,
DataSource
dataSource
,
FormatEvaluator
formatEvaluator
)
{
this
(
null
,
manifest
,
adaptationSetIndex
,
representationIndices
,
dataSource
,
formatEvaluator
,
new
SystemClock
(),
0
,
0
);
new
SystemClock
(),
0
,
0
,
false
,
null
,
null
);
}
/**
...
...
@@ -167,19 +193,58 @@ public class DashChunkSource implements ChunkSource {
* @param elapsedRealtimeOffsetMs If known, an estimate of the instantaneous difference between
* server-side unix time and {@link SystemClock#elapsedRealtime()} in milliseconds, specified
* as the server's unix time minus the local elapsed time. It unknown, set to 0.
* @param eventHandler A handler to use when delivering events to {@code EventListener}. May be
* null if delivery of events is not required.
* @param eventListener A listener of events. May be null if delivery of events is not required.
*/
public
DashChunkSource
(
ManifestFetcher
<
MediaPresentationDescription
>
manifestFetcher
,
int
adaptationSetIndex
,
int
[]
representationIndices
,
DataSource
dataSource
,
FormatEvaluator
formatEvaluator
,
long
liveEdgeLatencyMs
,
long
elapsedRealtimeOffsetMs
,
Handler
eventHandler
,
EventListener
eventListener
)
{
this
(
manifestFetcher
,
manifestFetcher
.
getManifest
(),
adaptationSetIndex
,
representationIndices
,
dataSource
,
formatEvaluator
,
new
SystemClock
(),
liveEdgeLatencyMs
*
1000
,
elapsedRealtimeOffsetMs
*
1000
,
true
,
eventHandler
,
eventListener
);
}
/**
* Constructor to use for live DVR streaming.
*
* @param manifestFetcher A fetcher for the manifest, which must have already successfully
* completed an initial load.
* @param adaptationSetIndex The index of the adaptation set that should be used.
* @param representationIndices The indices of the representations within the adaptations set
* that should be used. May be null if all representations within the adaptation set should
* be considered.
* @param dataSource A {@link DataSource} suitable for loading the media data.
* @param formatEvaluator Selects from the available formats.
* @param liveEdgeLatencyMs For live streams, the number of milliseconds that the playback should
* lag behind the "live edge" (i.e. the end of the most recently defined media in the
* manifest). Choosing a small value will minimize latency introduced by the player, however
* note that the value sets an upper bound on the length of media that the player can buffer.
* Hence a small value may increase the probability of rebuffering and playback failures.
* @param elapsedRealtimeOffsetMs If known, an estimate of the instantaneous difference between
* server-side unix time and {@link SystemClock#elapsedRealtime()} in milliseconds, specified
* as the server's unix time minus the local elapsed time. It unknown, set to 0.
* @param startAtLiveEdge True if the stream should start at the live edge; false if it should
* at the beginning of the live window.
* @param eventHandler A handler to use when delivering events to {@code EventListener}. May be
* null if delivery of events is not required.
* @param eventListener A listener of events. May be null if delivery of events is not required.
*/
public
DashChunkSource
(
ManifestFetcher
<
MediaPresentationDescription
>
manifestFetcher
,
int
adaptationSetIndex
,
int
[]
representationIndices
,
DataSource
dataSource
,
FormatEvaluator
formatEvaluator
,
long
liveEdgeLatencyMs
,
long
elapsedRealtimeOffsetMs
)
{
FormatEvaluator
formatEvaluator
,
long
liveEdgeLatencyMs
,
long
elapsedRealtimeOffsetMs
,
boolean
startAtLiveEdge
,
Handler
eventHandler
,
EventListener
eventListener
)
{
this
(
manifestFetcher
,
manifestFetcher
.
getManifest
(),
adaptationSetIndex
,
representationIndices
,
dataSource
,
formatEvaluator
,
new
SystemClock
(),
liveEdgeLatencyMs
*
1000
,
elapsedRealtimeOffsetMs
*
1000
);
elapsedRealtimeOffsetMs
*
1000
,
startAtLiveEdge
,
eventHandler
,
eventListener
);
}
/* package */
DashChunkSource
(
ManifestFetcher
<
MediaPresentationDescription
>
manifestFetcher
,
MediaPresentationDescription
initialManifest
,
int
adaptationSetIndex
,
int
[]
representationIndices
,
DataSource
dataSource
,
FormatEvaluator
formatEvaluator
,
Clock
systemClock
,
long
liveEdgeLatencyUs
,
long
elapsedRealtimeOffsetUs
)
{
Clock
systemClock
,
long
liveEdgeLatencyUs
,
long
elapsedRealtimeOffsetUs
,
boolean
startAtLiveEdge
,
Handler
eventHandler
,
EventListener
eventListener
)
{
this
.
manifestFetcher
=
manifestFetcher
;
this
.
currentManifest
=
initialManifest
;
this
.
adaptationSetIndex
=
adaptationSetIndex
;
...
...
@@ -189,8 +254,12 @@ public class DashChunkSource implements ChunkSource {
this
.
systemClock
=
systemClock
;
this
.
liveEdgeLatencyUs
=
liveEdgeLatencyUs
;
this
.
elapsedRealtimeOffsetUs
=
elapsedRealtimeOffsetUs
;
this
.
startAtLiveEdge
=
startAtLiveEdge
;
this
.
eventHandler
=
eventHandler
;
this
.
eventListener
=
eventListener
;
this
.
evaluation
=
new
Evaluation
();
this
.
headerBuilder
=
new
StringBuilder
();
this
.
seekRangeValues
=
new
long
[
2
];
drmInitData
=
getDrmInitData
(
currentManifest
,
adaptationSetIndex
);
Representation
[]
representations
=
getFilteredRepresentations
(
currentManifest
,
...
...
@@ -229,6 +298,11 @@ public class DashChunkSource implements ChunkSource {
return
trackInfo
;
}
// VisibleForTesting
/* package */
TimeRange
getSeekRange
()
{
return
seekRange
;
}
@Override
public
void
enable
()
{
fatalError
=
null
;
...
...
@@ -236,6 +310,16 @@ public class DashChunkSource implements ChunkSource {
if
(
manifestFetcher
!=
null
)
{
manifestFetcher
.
enable
();
}
DashSegmentIndex
segmentIndex
=
representationHolders
.
get
(
formats
[
0
].
id
).
representation
.
getIndex
();
if
(
segmentIndex
==
null
)
{
seekRange
=
new
TimeRange
(
TimeRange
.
TYPE_SNAPSHOT
,
0
,
currentManifest
.
duration
*
1000
);
notifySeekRangeChanged
(
seekRange
);
}
else
{
long
nowUs
=
getNowUs
();
updateAvailableSegmentBounds
(
segmentIndex
,
nowUs
);
updateSeekRange
(
segmentIndex
,
nowUs
);
}
}
@Override
...
...
@@ -244,6 +328,7 @@ public class DashChunkSource implements ChunkSource {
if
(
manifestFetcher
!=
null
)
{
manifestFetcher
.
disable
();
}
seekRange
=
null
;
}
@Override
...
...
@@ -285,6 +370,10 @@ public class DashChunkSource implements ChunkSource {
}
currentManifest
=
newManifest
;
finishedCurrentManifest
=
false
;
long
nowUs
=
getNowUs
();
updateAvailableSegmentBounds
(
newRepresentations
[
0
].
getIndex
(),
nowUs
);
updateSeekRange
(
newRepresentations
[
0
].
getIndex
(),
nowUs
);
}
// TODO: This is a temporary hack to avoid constantly refreshing the MPD in cases where
...
...
@@ -354,36 +443,41 @@ public class DashChunkSource implements ChunkSource {
return
;
}
long
nowUs
;
if
(
elapsedRealtimeOffsetUs
!=
0
)
{
nowUs
=
(
systemClock
.
elapsedRealtime
()
*
1000
)
+
elapsedRealtimeOffsetUs
;
}
else
{
nowUs
=
System
.
currentTimeMillis
()
*
1000
;
}
int
firstAvailableSegmentNum
=
segmentIndex
.
getFirstSegmentNum
();
int
lastAvailableSegmentNum
=
segmentIndex
.
getLastSegmentNum
();
boolean
indexUnbounded
=
lastAvailableSegmentNum
==
DashSegmentIndex
.
INDEX_UNBOUNDED
;
int
segmentNum
;
boolean
indexUnbounded
=
segmentIndex
.
getLastSegmentNum
()
==
DashSegmentIndex
.
INDEX_UNBOUNDED
;
if
(
indexUnbounded
)
{
// The index is itself unbounded. We need to use the current time to calculate the range of
// available segments.
long
liveEdgeTimestampUs
=
nowUs
-
currentManifest
.
availabilityStartTime
*
1000
;
if
(
currentManifest
.
timeShiftBufferDepth
!=
-
1
)
{
long
bufferDepthUs
=
currentManifest
.
timeShiftBufferDepth
*
1000
;
firstAvailableSegmentNum
=
Math
.
max
(
firstAvailableSegmentNum
,
segmentIndex
.
getSegmentNum
(
liveEdgeTimestampUs
-
bufferDepthUs
));
// Manifests with unbounded indexes aren't updated regularly, so we need to update the
// segment bounds before use to ensure that they are accurate to the current time; also if
// the bounds have changed, we should update the seek range
long
nowUs
=
getNowUs
();
int
oldFirstAvailableSegmentNum
=
firstAvailableSegmentNum
;
int
oldLastAvailableSegmentNum
=
lastAvailableSegmentNum
;
updateAvailableSegmentBounds
(
segmentIndex
,
nowUs
);
if
(
oldFirstAvailableSegmentNum
!=
firstAvailableSegmentNum
||
oldLastAvailableSegmentNum
!=
lastAvailableSegmentNum
)
{
updateSeekRange
(
segmentIndex
,
nowUs
);
}
// getSegmentNum(liveEdgeTimestampUs) will not be completed yet, so subtract one to get the
// index of the last completed segment.
lastAvailableSegmentNum
=
segmentIndex
.
getSegmentNum
(
liveEdgeTimestampUs
)
-
1
;
}
int
segmentNum
;
if
(
queue
.
isEmpty
())
{
if
(
currentManifest
.
dynamic
)
{
seekPositionUs
=
getLiveSeekPosition
(
nowUs
,
indexUnbounded
,
segmentIndex
.
isExplicit
());
seekRangeValues
=
seekRange
.
getCurrentBoundsUs
(
seekRangeValues
);
if
(
startAtLiveEdge
)
{
// We want live streams to start at the live edge instead of the beginning of the
// manifest
startAtLiveEdge
=
false
;
seekPositionUs
=
seekRangeValues
[
1
];
}
else
{
seekPositionUs
=
Math
.
max
(
seekPositionUs
,
seekRangeValues
[
0
]);
seekPositionUs
=
Math
.
min
(
seekPositionUs
,
seekRangeValues
[
1
]);
}
}
segmentNum
=
segmentIndex
.
getSegmentNum
(
seekPositionUs
);
// if the index is unbounded then the result of getSegmentNum isn't clamped to ensure that
// it doesn't exceed the last available segment. Clamp it here.
if
(
indexUnbounded
)
{
segmentNum
=
Math
.
min
(
segmentNum
,
lastAvailableSegmentNum
);
}
}
else
{
MediaChunk
previous
=
queue
.
get
(
out
.
queueSize
-
1
);
segmentNum
=
previous
.
isLastChunk
?
-
1
...
...
@@ -452,6 +546,59 @@ public class DashChunkSource implements ChunkSource {
// Do nothing.
}
private
void
updateAvailableSegmentBounds
(
DashSegmentIndex
segmentIndex
,
long
nowUs
)
{
int
indexFirstAvailableSegmentNum
=
segmentIndex
.
getFirstSegmentNum
();
int
indexLastAvailableSegmentNum
=
segmentIndex
.
getLastSegmentNum
();
if
(
indexLastAvailableSegmentNum
==
DashSegmentIndex
.
INDEX_UNBOUNDED
)
{
// The index is itself unbounded. We need to use the current time to calculate the range of
// available segments.
long
liveEdgeTimestampUs
=
nowUs
-
currentManifest
.
availabilityStartTime
*
1000
;
if
(
currentManifest
.
timeShiftBufferDepth
!=
-
1
)
{
long
bufferDepthUs
=
currentManifest
.
timeShiftBufferDepth
*
1000
;
indexFirstAvailableSegmentNum
=
Math
.
max
(
indexFirstAvailableSegmentNum
,
segmentIndex
.
getSegmentNum
(
liveEdgeTimestampUs
-
bufferDepthUs
));
}
// getSegmentNum(liveEdgeTimestampUs) will not be completed yet, so subtract one to get the
// index of the last completed segment.
indexLastAvailableSegmentNum
=
segmentIndex
.
getSegmentNum
(
liveEdgeTimestampUs
)
-
1
;
}
firstAvailableSegmentNum
=
indexFirstAvailableSegmentNum
;
lastAvailableSegmentNum
=
indexLastAvailableSegmentNum
;
}
private
void
updateSeekRange
(
DashSegmentIndex
segmentIndex
,
long
nowUs
)
{
long
earliestSeekPosition
=
segmentIndex
.
getTimeUs
(
firstAvailableSegmentNum
);
long
latestSeekPosition
=
segmentIndex
.
getTimeUs
(
lastAvailableSegmentNum
)
+
segmentIndex
.
getDurationUs
(
lastAvailableSegmentNum
);
if
(
currentManifest
.
dynamic
)
{
long
liveEdgeTimestampUs
;
if
(
segmentIndex
.
getLastSegmentNum
()
==
DashSegmentIndex
.
INDEX_UNBOUNDED
)
{
liveEdgeTimestampUs
=
nowUs
-
currentManifest
.
availabilityStartTime
*
1000
;
}
else
{
liveEdgeTimestampUs
=
segmentIndex
.
getTimeUs
(
segmentIndex
.
getLastSegmentNum
())
+
segmentIndex
.
getDurationUs
(
segmentIndex
.
getLastSegmentNum
());
if
(!
segmentIndex
.
isExplicit
())
{
// Some segments defined by the index may not be available yet. Bound the calculated live
// edge based on the elapsed time since the manifest became available.
liveEdgeTimestampUs
=
Math
.
min
(
liveEdgeTimestampUs
,
nowUs
-
currentManifest
.
availabilityStartTime
*
1000
);
}
}
// it's possible that the live edge latency actually puts our latest position before
// the earliest position in the case of a DVR-like stream that's just starting up, so
// in that case just return the earliest position instead
latestSeekPosition
=
Math
.
max
(
earliestSeekPosition
,
liveEdgeTimestampUs
-
liveEdgeLatencyUs
);
}
TimeRange
newSeekRange
=
new
TimeRange
(
TimeRange
.
TYPE_SNAPSHOT
,
earliestSeekPosition
,
latestSeekPosition
);
if
(
seekRange
==
null
||
!
seekRange
.
equals
(
newSeekRange
))
{
seekRange
=
newSeekRange
;
notifySeekRangeChanged
(
seekRange
);
}
}
private
static
boolean
mimeTypeIsWebm
(
String
mimeType
)
{
return
mimeType
.
startsWith
(
MimeTypes
.
VIDEO_WEBM
)
||
mimeType
.
startsWith
(
MimeTypes
.
AUDIO_WEBM
);
}
...
...
@@ -512,36 +659,12 @@ public class DashChunkSource implements ChunkSource {
}
}
/**
* For live playbacks, determines the seek position that snaps playback to be
* {@link #liveEdgeLatencyUs} behind the live edge of the current manifest
*
* @param nowUs An estimate of the current server time, in microseconds.
* @param indexUnbounded True if the segment index for this source is unbounded. False otherwise.
* @param indexExplicit True if the segment index is explicit. False otherwise.
* @return The seek position in microseconds.
*/
private
long
getLiveSeekPosition
(
long
nowUs
,
boolean
indexUnbounded
,
boolean
indexExplicit
)
{
long
liveEdgeTimestampUs
;
if
(
indexUnbounded
)
{
liveEdgeTimestampUs
=
nowUs
-
currentManifest
.
availabilityStartTime
*
1000
;
private
long
getNowUs
()
{
if
(
elapsedRealtimeOffsetUs
!=
0
)
{
return
(
systemClock
.
elapsedRealtime
()
*
1000
)
+
elapsedRealtimeOffsetUs
;
}
else
{
liveEdgeTimestampUs
=
Long
.
MIN_VALUE
;
for
(
RepresentationHolder
representationHolder
:
representationHolders
.
values
())
{
DashSegmentIndex
segmentIndex
=
representationHolder
.
segmentIndex
;
int
lastSegmentNum
=
segmentIndex
.
getLastSegmentNum
();
long
indexLiveEdgeTimestampUs
=
segmentIndex
.
getTimeUs
(
lastSegmentNum
)
+
segmentIndex
.
getDurationUs
(
lastSegmentNum
);
liveEdgeTimestampUs
=
Math
.
max
(
liveEdgeTimestampUs
,
indexLiveEdgeTimestampUs
);
}
if
(!
indexExplicit
)
{
// Some segments defined by the index may not be available yet. Bound the calculated live
// edge based on the elapsed time since the manifest became available.
liveEdgeTimestampUs
=
Math
.
min
(
liveEdgeTimestampUs
,
nowUs
-
currentManifest
.
availabilityStartTime
*
1000
);
}
return
System
.
currentTimeMillis
()
*
1000
;
}
return
liveEdgeTimestampUs
-
liveEdgeLatencyUs
;
}
private
static
Representation
[]
getFilteredRepresentations
(
MediaPresentationDescription
manifest
,
...
...
@@ -592,6 +715,17 @@ public class DashChunkSource implements ChunkSource {
Collections
.
singletonList
(
period
));
}
private
void
notifySeekRangeChanged
(
final
TimeRange
seekRange
)
{
if
(
eventHandler
!=
null
&&
eventListener
!=
null
)
{
eventHandler
.
post
(
new
Runnable
()
{
@Override
public
void
run
()
{
eventListener
.
onSeekRangeChanged
(
seekRange
);
}
});
}
}
private
static
class
RepresentationHolder
{
public
final
Representation
representation
;
...
...
library/src/test/java/com/google/android/exoplayer/dash/DashChunkSourceTest.java
View file @
160a88ba
...
...
@@ -15,9 +15,11 @@
*/
package
com
.
google
.
android
.
exoplayer
.
dash
;
import
static
org
.
mockito
.
Mockito
.
mock
;
import
static
org
.
mockito
.
Mockito
.
when
;
import
com.google.android.exoplayer.MediaFormat
;
import
com.google.android.exoplayer.TimeRange
;
import
com.google.android.exoplayer.TrackRenderer
;
import
com.google.android.exoplayer.chunk.ChunkOperationHolder
;
import
com.google.android.exoplayer.chunk.Format
;
...
...
@@ -55,12 +57,19 @@ public class DashChunkSourceTest extends InstrumentationTestCase {
private
static
final
FormatEvaluator
EVALUATOR
=
new
FixedEvaluator
();
private
static
final
long
AVAILABILITY_START_TIME
=
0
;
private
static
final
long
AVAILABILITY_LATENCY
=
5000
;
private
static
final
long
AVAILABILITY_REALTIME_OFFSET
=
1000
;
private
static
final
long
AVAILABILITY_CURRENT_TIME
=
AVAILABILITY_START_TIME
+
AVAILABILITY_LATENCY
-
AVAILABILITY_REALTIME_OFFSET
;
private
static
final
FakeClock
AVAILABILITY_CLOCK
=
new
FakeClock
(
AVAILABILITY_CURRENT_TIME
);
private
static
final
long
VOD_DURATION_MS
=
30000
;
private
static
final
long
LIVE_SEGMENT_COUNT
=
5
;
private
static
final
long
LIVE_SEGMENT_DURATION_MS
=
1000
;
private
static
final
long
LIVE_DURATION_MS
=
LIVE_SEGMENT_COUNT
*
LIVE_SEGMENT_DURATION_MS
;
private
static
final
long
LIVE_TIMESHIFT_BUFFER_DEPTH_MS
=
LIVE_DURATION_MS
;
private
static
final
long
AVAILABILITY_START_TIME_MS
=
60000
;
private
static
final
long
AVAILABILITY_REALTIME_OFFSET_MS
=
1000
;
private
static
final
long
AVAILABILITY_CURRENT_TIME_MS
=
AVAILABILITY_START_TIME_MS
+
LIVE_TIMESHIFT_BUFFER_DEPTH_MS
-
AVAILABILITY_REALTIME_OFFSET_MS
;
private
static
final
long
LIVE_SEEK_BEYOND_EDGE_MS
=
60000
;
private
static
final
int
TALL_HEIGHT
=
200
;
private
static
final
int
WIDE_WIDTH
=
400
;
...
...
@@ -90,6 +99,19 @@ public class DashChunkSourceTest extends InstrumentationTestCase {
assertEquals
(
TALL_HEIGHT
,
out
.
getMaxVideoHeight
());
}
public
void
testGetSeekRangeOnVod
()
{
DashChunkSource
chunkSource
=
new
DashChunkSource
(
generateVodMpd
(),
AdaptationSet
.
TYPE_VIDEO
,
null
,
null
,
mock
(
FormatEvaluator
.
class
));
chunkSource
.
enable
();
TimeRange
seekRange
=
chunkSource
.
getSeekRange
();
checkSeekRange
(
seekRange
,
0
,
VOD_DURATION_MS
*
1000
);
long
[]
seekRangeValuesMs
=
seekRange
.
getCurrentBoundsMs
(
null
);
assertEquals
(
0
,
seekRangeValuesMs
[
0
]);
assertEquals
(
VOD_DURATION_MS
,
seekRangeValuesMs
[
1
]);
}
public
void
testMaxVideoDimensionsLegacy
()
{
SingleSegmentBase
segmentBase1
=
new
SingleSegmentBase
(
"https://example.com/1.mp4"
);
Representation
representation1
=
...
...
@@ -107,221 +129,338 @@ public class DashChunkSourceTest extends InstrumentationTestCase {
assertEquals
(
TALL_HEIGHT
,
out
.
getMaxVideoHeight
());
}
public
void
testLiveEdgeNoLatencyWithTimeline
()
{
DashChunkSource
chunkSource
=
setupLiveEdgeTimelineTest
(
0L
);
List
<
MediaChunk
>
queue
=
new
ArrayList
<>();
ChunkOperationHolder
out
=
new
ChunkOperationHolder
();
chunkSource
.
getChunkOperation
(
queue
,
0
,
0
,
out
);
assertEquals
(
4000000L
,
((
MediaChunk
)
out
.
chunk
).
startTimeUs
);
assertEquals
(
5000000L
,
((
MediaChunk
)
out
.
chunk
).
endTimeUs
);
public
void
testLiveEdgeNoLatency
()
{
long
startTimeMs
=
0
;
long
liveEdgeLatencyMs
=
0
;
long
seekPositionMs
=
LIVE_SEEK_BEYOND_EDGE_MS
;
long
seekRangeStartMs
=
0
;
long
seekRangeEndMs
=
LIVE_DURATION_MS
-
liveEdgeLatencyMs
;
long
chunkStartTimeMs
=
4000
;
long
chunkEndTimeMs
=
5000
;
checkLiveTimelineConsistency
(
startTimeMs
,
liveEdgeLatencyMs
,
seekPositionMs
,
seekRangeStartMs
,
seekRangeEndMs
,
chunkStartTimeMs
,
chunkEndTimeMs
);
}
public
void
testLiveEdge500msLatencyWithTimeline
()
{
DashChunkSource
chunkSource
=
setupLiveEdgeTimelineTest
(
500L
);
List
<
MediaChunk
>
queue
=
new
ArrayList
<>();
ChunkOperationHolder
out
=
new
ChunkOperationHolder
();
chunkSource
.
getChunkOperation
(
queue
,
0
,
0
,
out
);
assertEquals
(
4000000L
,
((
MediaChunk
)
out
.
chunk
).
startTimeUs
);
assertEquals
(
5000000L
,
((
MediaChunk
)
out
.
chunk
).
endTimeUs
);
public
void
testLiveEdgeAlmostNoLatency
()
{
long
startTimeMs
=
0
;
long
liveEdgeLatencyMs
=
1
;
long
seekPositionMs
=
LIVE_SEEK_BEYOND_EDGE_MS
;
long
seekRangeStartMs
=
0
;
long
seekRangeEndMs
=
LIVE_DURATION_MS
-
liveEdgeLatencyMs
;
long
chunkStartTimeMs
=
4000
;
long
chunkEndTimeMs
=
5000
;
checkLiveTimelineConsistency
(
startTimeMs
,
liveEdgeLatencyMs
,
seekPositionMs
,
seekRangeStartMs
,
seekRangeEndMs
,
chunkStartTimeMs
,
chunkEndTimeMs
);
}
public
void
testLiveEdge1000msLatencyWithTimeline
()
{
DashChunkSource
chunkSource
=
setupLiveEdgeTimelineTest
(
1000L
);
List
<
MediaChunk
>
queue
=
new
ArrayList
<>();
ChunkOperationHolder
out
=
new
ChunkOperationHolder
();
chunkSource
.
getChunkOperation
(
queue
,
0
,
0
,
out
);
assertEquals
(
4000000L
,
((
MediaChunk
)
out
.
chunk
).
startTimeUs
);
assertEquals
(
5000000L
,
((
MediaChunk
)
out
.
chunk
).
endTimeUs
);
public
void
testLiveEdge500msLatency
()
{
long
startTimeMs
=
0
;
long
liveEdgeLatencyMs
=
500
;
long
seekPositionMs
=
LIVE_SEEK_BEYOND_EDGE_MS
;
long
seekRangeStartMs
=
0
;
long
seekRangeEndMs
=
LIVE_DURATION_MS
-
liveEdgeLatencyMs
;
long
chunkStartTimeMs
=
4000
;
long
chunkEndTimeMs
=
5000
;
checkLiveTimelineConsistency
(
startTimeMs
,
liveEdgeLatencyMs
,
seekPositionMs
,
seekRangeStartMs
,
seekRangeEndMs
,
chunkStartTimeMs
,
chunkEndTimeMs
);
}
public
void
testLiveEdge1001msLatencyWithTimeline
()
{
DashChunkSource
chunkSource
=
setupLiveEdgeTimelineTest
(
1001L
);
List
<
MediaChunk
>
queue
=
new
ArrayList
<>();
ChunkOperationHolder
out
=
new
ChunkOperationHolder
();
chunkSource
.
getChunkOperation
(
queue
,
0
,
0
,
out
);
assertEquals
(
3000000L
,
((
MediaChunk
)
out
.
chunk
).
startTimeUs
);
assertEquals
(
4000000L
,
((
MediaChunk
)
out
.
chunk
).
endTimeUs
);
public
void
testLiveEdge1000msLatency
()
{
long
startTimeMs
=
0
;
long
liveEdgeLatencyMs
=
1000
;
long
seekPositionMs
=
LIVE_SEEK_BEYOND_EDGE_MS
;
long
seekRangeStartMs
=
0
;
long
seekRangeEndMs
=
LIVE_DURATION_MS
-
liveEdgeLatencyMs
;
long
chunkStartTimeMs
=
4000
;
long
chunkEndTimeMs
=
5000
;
checkLiveTimelineConsistency
(
startTimeMs
,
liveEdgeLatencyMs
,
seekPositionMs
,
seekRangeStartMs
,
seekRangeEndMs
,
chunkStartTimeMs
,
chunkEndTimeMs
);
}
public
void
testLiveEdge2500msLatencyWithTimeline
()
{
DashChunkSource
chunkSource
=
setupLiveEdgeTimelineTest
(
2500L
);
List
<
MediaChunk
>
queue
=
new
ArrayList
<>();
ChunkOperationHolder
out
=
new
ChunkOperationHolder
();
chunkSource
.
getChunkOperation
(
queue
,
0
,
0
,
out
);
assertEquals
(
2000000L
,
((
MediaChunk
)
out
.
chunk
).
startTimeUs
);
assertEquals
(
3000000L
,
((
MediaChunk
)
out
.
chunk
).
endTimeUs
);
public
void
testLiveEdge1001msLatency
()
{
long
startTimeMs
=
0
;
long
liveEdgeLatencyMs
=
1001
;
long
seekPositionMs
=
LIVE_SEEK_BEYOND_EDGE_MS
;
long
seekRangeStartMs
=
0
;
long
seekRangeEndMs
=
LIVE_DURATION_MS
-
liveEdgeLatencyMs
;
long
chunkStartTimeMs
=
3000
;
long
chunkEndTimeMs
=
4000
;
checkLiveTimelineConsistency
(
startTimeMs
,
liveEdgeLatencyMs
,
seekPositionMs
,
seekRangeStartMs
,
seekRangeEndMs
,
chunkStartTimeMs
,
chunkEndTimeMs
);
}
public
void
testLiveEdgeVeryHighLatencyWithTimeline
()
{
DashChunkSource
chunkSource
=
setupLiveEdgeTimelineTest
(
10000L
);
List
<
MediaChunk
>
queue
=
new
ArrayList
<>();
ChunkOperationHolder
out
=
new
ChunkOperationHolder
();
chunkSource
.
getChunkOperation
(
queue
,
0
,
0
,
out
);
assertEquals
(
0L
,
((
MediaChunk
)
out
.
chunk
).
startTimeUs
);
assertEquals
(
1000000L
,
((
MediaChunk
)
out
.
chunk
).
endTimeUs
);
public
void
testLiveEdge2500msLatency
()
{
long
startTimeMs
=
0
;
long
liveEdgeLatencyMs
=
2500
;
long
seekPositionMs
=
LIVE_SEEK_BEYOND_EDGE_MS
;
long
seekRangeStartMs
=
0
;
long
seekRangeEndMs
=
LIVE_DURATION_MS
-
liveEdgeLatencyMs
;
long
chunkStartTimeMs
=
2000
;
long
chunkEndTimeMs
=
3000
;
checkLiveTimelineConsistency
(
startTimeMs
,
liveEdgeLatencyMs
,
seekPositionMs
,
seekRangeStartMs
,
seekRangeEndMs
,
chunkStartTimeMs
,
chunkEndTimeMs
);
}
public
void
testLiveEdgeNoLatencyWithTemplate
()
{
DashChunkSource
chunkSource
=
setupLiveEdgeTemplateTest
(
0L
);
List
<
MediaChunk
>
queue
=
new
ArrayList
<>();
ChunkOperationHolder
out
=
new
ChunkOperationHolder
();
chunkSource
.
getChunkOperation
(
queue
,
0
,
0
,
out
);
// this should actually return the "5th" segment, but it currently returns the "6th", which
// doesn't actually exist yet; this will be resolved in a subsequent cl (cl/87518875).
//assertEquals(4000000L, ((MediaChunk) out.chunk).startTimeUs);
//assertEquals(5000000L, ((MediaChunk) out.chunk).endTimeUs);
public
void
testLiveEdgeVeryHighLatency
()
{
long
startTimeMs
=
0
;
long
liveEdgeLatencyMs
=
10000
;
long
seekPositionMs
=
LIVE_SEEK_BEYOND_EDGE_MS
;
long
seekRangeStartMs
=
0
;
long
seekRangeEndMs
=
0
;
long
chunkStartTimeMs
=
0
;
long
chunkEndTimeMs
=
1000
;
checkLiveTimelineConsistency
(
startTimeMs
,
liveEdgeLatencyMs
,
seekPositionMs
,
seekRangeStartMs
,
seekRangeEndMs
,
chunkStartTimeMs
,
chunkEndTimeMs
);
}
public
void
testLiveEdgeAlmostNoLatencyWithTemplate
()
{
DashChunkSource
chunkSource
=
setupLiveEdgeTemplateTest
(
1L
);
List
<
MediaChunk
>
queue
=
new
ArrayList
<>();
ChunkOperationHolder
out
=
new
ChunkOperationHolder
();
chunkSource
.
getChunkOperation
(
queue
,
0
,
0
,
out
);
assertEquals
(
4000000L
,
((
MediaChunk
)
out
.
chunk
).
startTimeUs
);
assertEquals
(
5000000L
,
((
MediaChunk
)
out
.
chunk
).
endTimeUs
);
public
void
testLiveEdgeNoLatencyInProgress
()
{
long
startTimeMs
=
3000
;
long
liveEdgeLatencyMs
=
0
;
long
seekPositionMs
=
LIVE_SEEK_BEYOND_EDGE_MS
;
long
seekRangeStartMs
=
3000
;
long
seekRangeEndMs
=
3000
+
LIVE_DURATION_MS
-
liveEdgeLatencyMs
;
long
chunkStartTimeMs
=
7000
;
long
chunkEndTimeMs
=
8000
;
checkLiveTimelineConsistency
(
startTimeMs
,
liveEdgeLatencyMs
,
seekPositionMs
,
seekRangeStartMs
,
seekRangeEndMs
,
chunkStartTimeMs
,
chunkEndTimeMs
);
}
public
void
testLiveEdge500msLatencyWithTemplate
()
{
DashChunkSource
chunkSource
=
setupLiveEdgeTemplateTest
(
500L
);
List
<
MediaChunk
>
queue
=
new
ArrayList
<>();
ChunkOperationHolder
out
=
new
ChunkOperationHolder
();
chunkSource
.
getChunkOperation
(
queue
,
0
,
0
,
out
);
assertEquals
(
4000000L
,
((
MediaChunk
)
out
.
chunk
).
startTimeUs
);
assertEquals
(
5000000L
,
((
MediaChunk
)
out
.
chunk
).
endTimeUs
);
public
void
testLiveEdgeAlmostNoLatencyInProgress
()
{
long
startTimeMs
=
3000
;
long
liveEdgeLatencyMs
=
1
;
long
seekPositionMs
=
LIVE_SEEK_BEYOND_EDGE_MS
;
long
seekRangeStartMs
=
3000
;
long
seekRangeEndMs
=
3000
+
LIVE_DURATION_MS
-
liveEdgeLatencyMs
;
long
chunkStartTimeMs
=
7000
;
long
chunkEndTimeMs
=
8000
;
checkLiveTimelineConsistency
(
startTimeMs
,
liveEdgeLatencyMs
,
seekPositionMs
,
seekRangeStartMs
,
seekRangeEndMs
,
chunkStartTimeMs
,
chunkEndTimeMs
);
}
public
void
testLiveEdge1000msLatencyWithTemplate
()
{
DashChunkSource
chunkSource
=
setupLiveEdgeTemplateTest
(
1000L
);
List
<
MediaChunk
>
queue
=
new
ArrayList
<>();
ChunkOperationHolder
out
=
new
ChunkOperationHolder
();
chunkSource
.
getChunkOperation
(
queue
,
0
,
0
,
out
);
assertEquals
(
4000000L
,
((
MediaChunk
)
out
.
chunk
).
startTimeUs
);
assertEquals
(
5000000L
,
((
MediaChunk
)
out
.
chunk
).
endTimeUs
);
public
void
testLiveEdge500msLatencyInProgress
()
{
long
startTimeMs
=
3000
;
long
liveEdgeLatencyMs
=
500
;
long
seekPositionMs
=
LIVE_SEEK_BEYOND_EDGE_MS
;
long
seekRangeStartMs
=
3000
;
long
seekRangeEndMs
=
3000
+
LIVE_DURATION_MS
-
liveEdgeLatencyMs
;
long
chunkStartTimeMs
=
7000
;
long
chunkEndTimeMs
=
8000
;
checkLiveTimelineConsistency
(
startTimeMs
,
liveEdgeLatencyMs
,
seekPositionMs
,
seekRangeStartMs
,
seekRangeEndMs
,
chunkStartTimeMs
,
chunkEndTimeMs
);
}
public
void
testLiveEdge1001msLatencyWithTemplate
()
{
DashChunkSource
chunkSource
=
setupLiveEdgeTemplateTest
(
1001L
);
List
<
MediaChunk
>
queue
=
new
ArrayList
<>();
ChunkOperationHolder
out
=
new
ChunkOperationHolder
();
chunkSource
.
getChunkOperation
(
queue
,
0
,
0
,
out
);
public
void
testLiveEdge1000msLatencyInProgress
()
{
long
startTimeMs
=
3000
;
long
liveEdgeLatencyMs
=
1000
;
long
seekPositionMs
=
LIVE_SEEK_BEYOND_EDGE_MS
;
long
seekRangeStartMs
=
3000
;
long
seekRangeEndMs
=
3000
+
LIVE_DURATION_MS
-
liveEdgeLatencyMs
;
long
chunkStartTimeMs
=
7000
;
long
chunkEndTimeMs
=
8000
;
checkLiveTimelineConsistency
(
startTimeMs
,
liveEdgeLatencyMs
,
seekPositionMs
,
seekRangeStartMs
,
seekRangeEndMs
,
chunkStartTimeMs
,
chunkEndTimeMs
);
}
assertEquals
(
3000000L
,
((
MediaChunk
)
out
.
chunk
).
startTimeUs
);
assertEquals
(
4000000L
,
((
MediaChunk
)
out
.
chunk
).
endTimeUs
);
public
void
testLiveEdge1001msLatencyInProgress
()
{
long
startTimeMs
=
3000
;
long
liveEdgeLatencyMs
=
1001
;
long
seekPositionMs
=
LIVE_SEEK_BEYOND_EDGE_MS
;
long
seekRangeStartMs
=
3000
;
long
seekRangeEndMs
=
3000
+
LIVE_DURATION_MS
-
liveEdgeLatencyMs
;
long
chunkStartTimeMs
=
6000
;
long
chunkEndTimeMs
=
7000
;
checkLiveTimelineConsistency
(
startTimeMs
,
liveEdgeLatencyMs
,
seekPositionMs
,
seekRangeStartMs
,
seekRangeEndMs
,
chunkStartTimeMs
,
chunkEndTimeMs
);
}
public
void
testLiveEdge2500msLatencyWithTemplate
()
{
DashChunkSource
chunkSource
=
setupLiveEdgeTemplateTest
(
2500L
);
List
<
MediaChunk
>
queue
=
new
ArrayList
<>();
ChunkOperationHolder
out
=
new
ChunkOperationHolder
();
chunkSource
.
getChunkOperation
(
queue
,
0
,
0
,
out
);
public
void
testLiveEdge2500msLatencyInProgress
()
{
long
startTimeMs
=
3000
;
long
liveEdgeLatencyMs
=
2500
;
long
seekPositionMs
=
LIVE_SEEK_BEYOND_EDGE_MS
;
long
seekRangeStartMs
=
3000
;
long
seekRangeEndMs
=
3000
+
LIVE_DURATION_MS
-
liveEdgeLatencyMs
;
long
chunkStartTimeMs
=
5000
;
long
chunkEndTimeMs
=
6000
;
checkLiveTimelineConsistency
(
startTimeMs
,
liveEdgeLatencyMs
,
seekPositionMs
,
seekRangeStartMs
,
seekRangeEndMs
,
chunkStartTimeMs
,
chunkEndTimeMs
);
}
assertEquals
(
2000000L
,
((
MediaChunk
)
out
.
chunk
).
startTimeUs
);
assertEquals
(
3000000L
,
((
MediaChunk
)
out
.
chunk
).
endTimeUs
);
public
void
testLiveEdgeVeryHighLatencyInProgress
()
{
long
startTimeMs
=
3000
;
long
liveEdgeLatencyMs
=
10000
;
long
seekPositionMs
=
LIVE_SEEK_BEYOND_EDGE_MS
;
long
seekRangeStartMs
=
3000
;
long
seekRangeEndMs
=
3000
;
long
chunkStartTimeMs
=
3000
;
long
chunkEndTimeMs
=
4000
;
checkLiveEdgeLatencyWithTimeline
(
startTimeMs
,
0
,
liveEdgeLatencyMs
,
seekPositionMs
,
seekRangeStartMs
,
seekRangeEndMs
,
chunkStartTimeMs
,
chunkEndTimeMs
);
checkLiveEdgeLatencyWithTemplateAndUnlimitedTimeshift
(
startTimeMs
,
liveEdgeLatencyMs
,
seekPositionMs
,
0
,
0
,
1000
);
checkLiveEdgeLatencyWithTemplateAndLimitedTimeshift
(
startTimeMs
,
liveEdgeLatencyMs
,
seekPositionMs
,
seekRangeStartMs
,
seekRangeEndMs
,
chunkStartTimeMs
,
chunkEndTimeMs
);
}
p
ublic
void
testLiveEdgeVeryHighLatencyWithTemplate
()
{
DashChunkSource
chunkSource
=
setupLiveEdgeTemplateTest
(
10000L
);
List
<
MediaChunk
>
queue
=
new
ArrayList
<>(
);
ChunkOperationHolder
out
=
new
ChunkOperationHolder
(
);
chunkSource
.
getChunkOperation
(
queue
,
0
,
0
,
out
);
p
rivate
static
Representation
generateVodRepresentation
(
long
startTimeMs
,
long
duration
,
Format
format
)
{
SingleSegmentBase
segmentBase
=
new
SingleSegmentBase
(
"https://example.com/1.mp4"
);
return
Representation
.
newInstance
(
startTimeMs
,
duration
,
null
,
0
,
format
,
segmentBase
);
}
assertEquals
(
0L
,
((
MediaChunk
)
out
.
chunk
).
startTimeUs
);
assertEquals
(
1000000L
,
((
MediaChunk
)
out
.
chunk
).
endTimeUs
);
private
static
Representation
generateSegmentTimelineRepresentation
(
long
segmentStartMs
,
long
periodStartMs
,
long
duration
)
{
List
<
SegmentTimelineElement
>
segmentTimeline
=
new
ArrayList
<>();
List
<
RangedUri
>
mediaSegments
=
new
ArrayList
<>();
long
segmentStartTimeMs
=
segmentStartMs
;
long
byteStart
=
0
;
for
(
int
i
=
0
;
i
<
(
duration
/
LIVE_SEGMENT_DURATION_MS
);
i
++)
{
segmentTimeline
.
add
(
new
SegmentTimelineElement
(
segmentStartTimeMs
,
LIVE_SEGMENT_DURATION_MS
));
mediaSegments
.
add
(
new
RangedUri
(
""
,
""
,
byteStart
,
500L
));
segmentStartTimeMs
+=
LIVE_SEGMENT_DURATION_MS
;
byteStart
+=
500
;
}
int
startNumber
=
(
int
)
((
periodStartMs
+
segmentStartMs
)
/
LIVE_SEGMENT_DURATION_MS
);
MultiSegmentBase
segmentBase
=
new
SegmentList
(
null
,
1000
,
0
,
TrackRenderer
.
UNKNOWN_TIME_US
,
startNumber
,
TrackRenderer
.
UNKNOWN_TIME_US
,
segmentTimeline
,
mediaSegments
);
return
Representation
.
newInstance
(
periodStartMs
,
TrackRenderer
.
UNKNOWN_TIME_US
,
null
,
0
,
REGULAR_VIDEO
,
segmentBase
);
}
private
static
MediaPresentationDescription
generateMpd
(
boolean
live
,
List
<
Representation
>
representations
)
{
List
<
Representation
>
representations
,
boolean
limitTimeshiftBuffer
)
{
Representation
firstRepresentation
=
representations
.
get
(
0
);
AdaptationSet
adaptationSet
=
new
AdaptationSet
(
0
,
AdaptationSet
.
TYPE_UNKNOWN
,
representations
);
Period
period
=
new
Period
(
null
,
firstRepresentation
.
periodStartMs
,
firstRepresentation
.
periodDurationMs
,
Collections
.
singletonList
(
adaptationSet
));
long
duration
=
(
live
)
?
TrackRenderer
.
UNKNOWN_TIME_US
:
firstRepresentation
.
periodDurationMs
-
firstRepresentation
.
periodStartMs
;
return
new
MediaPresentationDescription
(
AVAILABILITY_START_TIME
,
duration
,
-
1
,
live
,
-
1
,
-
1
,
null
,
null
,
Collections
.
singletonList
(
period
));
return
new
MediaPresentationDescription
(
AVAILABILITY_START_TIME_MS
,
duration
,
-
1
,
live
,
-
1
,
(
limitTimeshiftBuffer
)
?
LIVE_TIMESHIFT_BUFFER_DEPTH_MS
:
-
1
,
null
,
null
,
Collections
.
singletonList
(
period
));
}
private
static
MediaPresentationDescription
generateVodMpd
()
{
List
<
Representation
>
representations
=
new
ArrayList
<>();
SingleSegmentBase
segmentBase1
=
new
SingleSegmentBase
(
"https://example.com/1.mp4"
);
Representation
representation1
=
Representation
.
newInstance
(
0
,
0
,
null
,
0
,
TALL_VIDEO
,
segmentBase1
);
representations
.
add
(
representation1
);
SingleSegmentBase
segmentBase2
=
new
SingleSegmentBase
(
"https://example.com/2.mp4"
);
Representation
representation2
=
Representation
.
newInstance
(
0
,
0
,
null
,
0
,
WIDE_VIDEO
,
segmentBase2
);
representations
.
add
(
representation2
);
representations
.
add
(
generateVodRepresentation
(
0
,
VOD_DURATION_MS
,
TALL_VIDEO
));
representations
.
add
(
generateVodRepresentation
(
0
,
VOD_DURATION_MS
,
WIDE_VIDEO
));
return
generateMpd
(
false
,
representations
);
return
generateMpd
(
false
,
representations
,
false
);
}
private
static
MediaPresentationDescription
generateLiveMpdWithTimeline
()
{
List
<
Representation
>
representations
=
new
ArrayList
<>();
List
<
SegmentTimelineElement
>
segmentTimeline
=
new
ArrayList
<>();
segmentTimeline
.
add
(
new
SegmentTimelineElement
(
0L
,
1000L
));
segmentTimeline
.
add
(
new
SegmentTimelineElement
(
1000L
,
1000L
));
segmentTimeline
.
add
(
new
SegmentTimelineElement
(
2000L
,
1000L
));
segmentTimeline
.
add
(
new
SegmentTimelineElement
(
3000L
,
1000L
));
segmentTimeline
.
add
(
new
SegmentTimelineElement
(
4000L
,
1000L
));
List
<
RangedUri
>
mediaSegments
=
new
ArrayList
<>();
mediaSegments
.
add
(
new
RangedUri
(
""
,
""
,
0L
,
500L
));
mediaSegments
.
add
(
new
RangedUri
(
""
,
""
,
500L
,
500L
));
mediaSegments
.
add
(
new
RangedUri
(
""
,
""
,
1000L
,
500L
));
mediaSegments
.
add
(
new
RangedUri
(
""
,
""
,
1500L
,
500L
));
mediaSegments
.
add
(
new
RangedUri
(
""
,
""
,
2000L
,
500L
));
MultiSegmentBase
segmentBase
=
new
SegmentList
(
null
,
1000
,
0
,
TrackRenderer
.
UNKNOWN_TIME_US
,
1
,
TrackRenderer
.
UNKNOWN_TIME_US
,
segmentTimeline
,
mediaSegments
);
Representation
representation
=
Representation
.
newInstance
(
0
,
TrackRenderer
.
UNKNOWN_TIME_US
,
null
,
0
,
REGULAR_VIDEO
,
segmentBase
);
representations
.
add
(
representation
);
return
generateMpd
(
true
,
representations
);
private
static
MediaPresentationDescription
generateLiveMpdWithTimeline
(
long
segmentStartMs
,
long
periodStartMs
,
long
durationMs
)
{
return
generateMpd
(
true
,
Collections
.
singletonList
(
generateSegmentTimelineRepresentation
(
segmentStartMs
,
periodStartMs
,
durationMs
)),
false
);
}
private
static
MediaPresentationDescription
generateLiveMpdWithTemplate
()
{
private
static
MediaPresentationDescription
generateLiveMpdWithTemplate
(
boolean
limitTimeshiftBuffer
)
{
List
<
Representation
>
representations
=
new
ArrayList
<>();
UrlTemplate
initializationTemplate
=
null
;
UrlTemplate
mediaTemplate
=
UrlTemplate
.
compile
(
"$RepresentationID$/$Number$"
);
MultiSegmentBase
segmentBase
=
new
SegmentTemplate
(
null
,
1000
,
0
,
TrackRenderer
.
UNKNOWN_TIME_US
,
1
,
1000
,
null
,
TrackRenderer
.
UNKNOWN_TIME_US
,
0
,
LIVE_SEGMENT_DURATION_MS
,
null
,
initializationTemplate
,
mediaTemplate
,
"http://www.youtube.com"
);
Representation
representation
=
Representation
.
newInstance
(
0
,
TrackRenderer
.
UNKNOWN_TIME_US
,
null
,
0
,
REGULAR_VIDEO
,
segmentBase
);
representations
.
add
(
representation
);
return
generateMpd
(
true
,
representations
);
return
generateMpd
(
true
,
representations
,
limitTimeshiftBuffer
);
}
private
DashChunkSource
setupDashChunkSource
(
MediaPresentationDescription
mpd
,
long
periodStartMs
,
long
liveEdgeLatencyMs
)
{
@SuppressWarnings
(
"unchecked"
)
ManifestFetcher
<
MediaPresentationDescription
>
manifestFetcher
=
mock
(
ManifestFetcher
.
class
);
when
(
manifestFetcher
.
getManifest
()).
thenReturn
(
mpd
);
DashChunkSource
chunkSource
=
new
DashChunkSource
(
manifestFetcher
,
mpd
,
AdaptationSet
.
TYPE_VIDEO
,
null
,
mockDataSource
,
EVALUATOR
,
new
FakeClock
(
AVAILABILITY_CURRENT_TIME_MS
+
periodStartMs
),
liveEdgeLatencyMs
*
1000
,
AVAILABILITY_REALTIME_OFFSET_MS
*
1000
,
false
,
null
,
null
);
chunkSource
.
enable
();
return
chunkSource
;
}
private
void
checkSeekRange
(
TimeRange
seekRange
,
long
startTimeUs
,
long
endTimeUs
)
{
long
[]
seekRangeValuesUs
=
seekRange
.
getCurrentBoundsUs
(
null
);
assertEquals
(
startTimeUs
,
seekRangeValuesUs
[
0
]);
assertEquals
(
endTimeUs
,
seekRangeValuesUs
[
1
]);
}
private
void
checkLiveEdgeLatency
(
DashChunkSource
chunkSource
,
List
<
MediaChunk
>
queue
,
ChunkOperationHolder
out
,
long
seekPositionMs
,
long
seekRangeStartMs
,
long
seekRangeEndMs
,
long
chunkStartTimeMs
,
long
chunkEndTimeMs
)
{
chunkSource
.
getChunkOperation
(
queue
,
seekPositionMs
*
1000
,
0
,
out
);
TimeRange
seekRange
=
chunkSource
.
getSeekRange
();
assertNotNull
(
out
.
chunk
);
checkSeekRange
(
seekRange
,
seekRangeStartMs
*
1000
,
seekRangeEndMs
*
1000
);
assertEquals
(
chunkStartTimeMs
*
1000
,
((
MediaChunk
)
out
.
chunk
).
startTimeUs
);
assertEquals
(
chunkEndTimeMs
*
1000
,
((
MediaChunk
)
out
.
chunk
).
endTimeUs
);
}
private
void
checkLiveEdgeLatency
(
MediaPresentationDescription
mpd
,
long
periodStartMs
,
long
liveEdgeLatencyMs
,
long
seekPositionMs
,
long
seekRangeStartMs
,
long
seekRangeEndMs
,
long
chunkStartTimeMs
,
long
chunkEndTimeMs
)
{
DashChunkSource
chunkSource
=
setupDashChunkSource
(
mpd
,
periodStartMs
,
liveEdgeLatencyMs
);
List
<
MediaChunk
>
queue
=
new
ArrayList
<>();
ChunkOperationHolder
out
=
new
ChunkOperationHolder
();
checkLiveEdgeLatency
(
chunkSource
,
queue
,
out
,
seekPositionMs
,
seekRangeStartMs
,
seekRangeEndMs
,
chunkStartTimeMs
,
chunkEndTimeMs
);
}
private
void
checkLiveEdgeLatencyWithTimeline
(
long
segmentStartMs
,
long
periodStartMs
,
long
liveEdgeLatencyMs
,
long
seekPositionMs
,
long
seekRangeStartMs
,
long
seekRangeEndMs
,
long
chunkStartTimeMs
,
long
chunkEndTimeMs
)
{
MediaPresentationDescription
mpd
=
generateLiveMpdWithTimeline
(
segmentStartMs
,
periodStartMs
,
LIVE_DURATION_MS
);
checkLiveEdgeLatency
(
mpd
,
periodStartMs
,
liveEdgeLatencyMs
,
seekPositionMs
,
seekRangeStartMs
,
seekRangeEndMs
,
chunkStartTimeMs
,
chunkEndTimeMs
);
}
private
void
checkLiveEdgeLatencyWithTemplateAndUnlimitedTimeshift
(
long
startTimeMs
,
long
liveEdgeLatencyMs
,
long
seekPositionMs
,
long
seekRangeEndMs
,
long
chunkStartTimeMs
,
long
chunkEndTimeMs
)
{
MediaPresentationDescription
mpd
=
generateLiveMpdWithTemplate
(
false
);
checkLiveEdgeLatency
(
mpd
,
startTimeMs
,
liveEdgeLatencyMs
,
seekPositionMs
,
0
,
seekRangeEndMs
,
chunkStartTimeMs
,
chunkEndTimeMs
);
}
private
DashChunkSource
setupLiveEdgeTimelineTest
(
long
liveEdgeLatencyMs
)
{
MediaPresentationDescription
manifest
=
generateLiveMpdWithTimeline
();
when
(
mockManifestFetcher
.
getManifest
()).
thenReturn
(
manifest
);
return
new
DashChunkSource
(
mockManifestFetcher
,
manifest
,
AdaptationSet
.
TYPE_VIDEO
,
null
,
mockDataSource
,
EVALUATOR
,
AVAILABILITY_CLOCK
,
liveEdgeLatencyMs
*
1000
,
AVAILABILITY_REALTIME_OFFSET
*
1000
);
private
void
checkLiveEdgeLatencyWithTemplateAndLimitedTimeshift
(
long
startTimeMs
,
long
liveEdgeLatencyMs
,
long
seekPositionMs
,
long
seekRangeStartMs
,
long
seekRangeEndMs
,
long
chunkStartTimeMs
,
long
chunkEndTimeMs
)
{
MediaPresentationDescription
mpd
=
generateLiveMpdWithTemplate
(
true
);
checkLiveEdgeLatency
(
mpd
,
startTimeMs
,
liveEdgeLatencyMs
,
seekPositionMs
,
seekRangeStartMs
,
seekRangeEndMs
,
chunkStartTimeMs
,
chunkEndTimeMs
);
}
private
DashChunkSource
setupLiveEdgeTemplateTest
(
long
liveEdgeLatencyMs
)
{
MediaPresentationDescription
manifest
=
generateLiveMpdWithTemplate
();
when
(
mockManifestFetcher
.
getManifest
()).
thenReturn
(
manifest
);
return
new
DashChunkSource
(
mockManifestFetcher
,
manifest
,
AdaptationSet
.
TYPE_VIDEO
,
null
,
mockDataSource
,
EVALUATOR
,
AVAILABILITY_CLOCK
,
liveEdgeLatencyMs
*
1000
,
AVAILABILITY_REALTIME_OFFSET
*
1000
);
private
void
checkLiveTimelineConsistency
(
long
startTimeMs
,
long
liveEdgeLatencyMs
,
long
seekPositionMs
,
long
seekRangeStartMs
,
long
seekRangeEndMs
,
long
chunkStartTimeMs
,
long
chunkEndTimeMs
)
{
checkLiveEdgeLatencyWithTimeline
(
startTimeMs
,
0
,
liveEdgeLatencyMs
,
seekPositionMs
,
seekRangeStartMs
,
seekRangeEndMs
,
chunkStartTimeMs
,
chunkEndTimeMs
);
checkLiveEdgeLatencyWithTemplateAndUnlimitedTimeshift
(
startTimeMs
,
liveEdgeLatencyMs
,
seekPositionMs
,
seekRangeEndMs
,
chunkStartTimeMs
,
chunkEndTimeMs
);
checkLiveEdgeLatencyWithTemplateAndLimitedTimeshift
(
startTimeMs
,
liveEdgeLatencyMs
,
seekPositionMs
,
seekRangeStartMs
,
seekRangeEndMs
,
chunkStartTimeMs
,
chunkEndTimeMs
);
}
}
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment