Commit 829282fa by olly Committed by Oliver Woodman

Replace DataSpec.absoluteStreamPosition with uriPositionOffset

This is a preliminary step toward adding a DataSpec.Builder,
which is needed for sanity when adding DataSpec.customData.

The existing absoluteStreamPosition field is very error prone,
because anyone using a Builder to adjust the request position
will need to remember to adjust two values:

dataSpec
    .buildUpon()
    .setAbsoluteStreamPosition(x)
    .setPosition(x)
    .build();

Furthermore, the difference between position and
absoluteStreamPosition is irrelevant in nearly all cases. In
the core library, the difference is only relevant when initializing
AES encryption/decryption to write/read cache files.

Replacing absoluteStreamPosition with uriPositionOffset will
simplify the code block above to:

dataSpec
    .buildUpon()
    .setPosition(x)
    .build();

PiperOrigin-RevId: 294485644
parent 58bc460b
......@@ -267,7 +267,7 @@ public abstract class SegmentDownloader<M extends FilterableManifest<M>> impleme
private static boolean canMergeSegments(DataSpec dataSpec1, DataSpec dataSpec2) {
return dataSpec1.uri.equals(dataSpec2.uri)
&& dataSpec1.length != C.LENGTH_UNSET
&& (dataSpec1.absoluteStreamPosition + dataSpec1.length == dataSpec2.absoluteStreamPosition)
&& (dataSpec1.position + dataSpec1.length == dataSpec2.position)
&& Util.areEqual(dataSpec1.key, dataSpec2.key)
&& dataSpec1.flags == dataSpec2.flags
&& dataSpec1.httpMethod == dataSpec2.httpMethod
......
......@@ -126,7 +126,7 @@ public class ContainerMediaChunk extends BaseMediaChunk {
DataSpec loadDataSpec = dataSpec.subrange(nextLoadPosition);
ExtractorInput input =
new DefaultExtractorInput(
dataSource, loadDataSpec.absoluteStreamPosition, dataSource.open(loadDataSpec));
dataSource, loadDataSpec.position, dataSource.open(loadDataSpec));
// Load and decode the sample data.
try {
Extractor extractor = extractorWrapper.extractor;
......@@ -136,7 +136,7 @@ public class ContainerMediaChunk extends BaseMediaChunk {
}
Assertions.checkState(result != Extractor.RESULT_SEEK);
} finally {
nextLoadPosition = input.getPosition() - dataSpec.absoluteStreamPosition;
nextLoadPosition = input.getPosition() - dataSpec.position;
}
} finally {
Util.closeQuietly(dataSource);
......
......@@ -93,7 +93,7 @@ public final class InitializationChunk extends Chunk {
DataSpec loadDataSpec = dataSpec.subrange(nextLoadPosition);
ExtractorInput input =
new DefaultExtractorInput(
dataSource, loadDataSpec.absoluteStreamPosition, dataSource.open(loadDataSpec));
dataSource, loadDataSpec.position, dataSource.open(loadDataSpec));
// Load and decode the initialization data.
try {
Extractor extractor = extractorWrapper.extractor;
......@@ -103,7 +103,7 @@ public final class InitializationChunk extends Chunk {
}
Assertions.checkState(result != Extractor.RESULT_SEEK);
} finally {
nextLoadPosition = input.getPosition() - dataSpec.absoluteStreamPosition;
nextLoadPosition = input.getPosition() - dataSpec.position;
}
} finally {
Util.closeQuietly(dataSource);
......
......@@ -169,9 +169,7 @@ public final class CacheDataSink implements DataSink {
dataSpec.length == C.LENGTH_UNSET
? C.LENGTH_UNSET
: Math.min(dataSpec.length - dataSpecBytesWritten, dataSpecFragmentSize);
file =
cache.startFile(
dataSpec.key, dataSpec.absoluteStreamPosition + dataSpecBytesWritten, length);
file = cache.startFile(dataSpec.key, dataSpec.position + dataSpecBytesWritten, length);
FileOutputStream underlyingFileOutputStream = new FileOutputStream(file);
if (bufferSize > 0) {
if (bufferedOutputStream == null) {
......
......@@ -79,7 +79,7 @@ public final class CacheUtil {
public static Pair<Long, Long> getCached(
DataSpec dataSpec, Cache cache, @Nullable CacheKeyFactory cacheKeyFactory) {
String key = buildCacheKey(dataSpec, cacheKeyFactory);
long position = dataSpec.absoluteStreamPosition;
long position = dataSpec.position;
long requestLength = getRequestLength(dataSpec, cache, key);
long bytesAlreadyCached = 0;
long bytesLeft = requestLength;
......@@ -193,7 +193,7 @@ public final class CacheUtil {
bytesLeft = getRequestLength(dataSpec, cache, key);
}
long position = dataSpec.absoluteStreamPosition;
long position = dataSpec.position;
boolean lengthUnset = bytesLeft == C.LENGTH_UNSET;
while (bytesLeft != 0) {
throwExceptionIfInterruptedOrCancelled(isCanceled);
......@@ -238,18 +238,16 @@ public final class CacheUtil {
return dataSpec.length;
} else {
long contentLength = ContentMetadata.getContentLength(cache.getContentMetadata(key));
return contentLength == C.LENGTH_UNSET
? C.LENGTH_UNSET
: contentLength - dataSpec.absoluteStreamPosition;
return contentLength == C.LENGTH_UNSET ? C.LENGTH_UNSET : contentLength - dataSpec.position;
}
}
/**
* Reads and discards all data specified by the {@code dataSpec}.
*
* @param dataSpec Defines the data to be read. {@code absoluteStreamPosition} and {@code length}
* fields are overwritten by the following parameters.
* @param absoluteStreamPosition The absolute position of the data to be read.
* @param dataSpec Defines the data to be read. The {@code position} and {@code length} fields are
* overwritten by the following parameters.
* @param position The position of the data to be read.
* @param length Length of the data to be read, or {@link C#LENGTH_UNSET} if it is unknown.
* @param dataSource The {@link DataSource} to read the data from.
* @param buffer The buffer to be used while downloading.
......@@ -264,7 +262,7 @@ public final class CacheUtil {
*/
private static long readAndDiscard(
DataSpec dataSpec,
long absoluteStreamPosition,
long position,
long length,
DataSource dataSource,
byte[] buffer,
......@@ -274,7 +272,7 @@ public final class CacheUtil {
boolean isLastBlock,
@Nullable AtomicBoolean isCanceled)
throws IOException, InterruptedException {
long positionOffset = absoluteStreamPosition - dataSpec.absoluteStreamPosition;
long positionOffset = position - dataSpec.position;
long initialPositionOffset = positionOffset;
long endOffset = length != C.LENGTH_UNSET ? positionOffset + length : C.POSITION_UNSET;
while (true) {
......
......@@ -68,8 +68,9 @@ public final class AesCipherDataSink implements DataSink {
public void open(DataSpec dataSpec) throws IOException {
wrappedDataSink.open(dataSpec);
long nonce = CryptoUtil.getFNV64Hash(dataSpec.key);
cipher = new AesFlushingCipher(Cipher.ENCRYPT_MODE, secretKey, nonce,
dataSpec.absoluteStreamPosition);
cipher =
new AesFlushingCipher(
Cipher.ENCRYPT_MODE, secretKey, nonce, dataSpec.uriPositionOffset + dataSpec.position);
}
@Override
......
......@@ -52,8 +52,9 @@ public final class AesCipherDataSource implements DataSource {
public long open(DataSpec dataSpec) throws IOException {
long dataLength = upstream.open(dataSpec);
long nonce = CryptoUtil.getFNV64Hash(dataSpec.key);
cipher = new AesFlushingCipher(Cipher.DECRYPT_MODE, secretKey, nonce,
dataSpec.absoluteStreamPosition);
cipher =
new AesFlushingCipher(
Cipher.DECRYPT_MODE, secretKey, nonce, dataSpec.uriPositionOffset + dataSpec.position);
return dataLength;
}
......
......@@ -546,7 +546,7 @@ public final class CacheDataSourceTest {
private void assertReadData(
CacheDataSource cacheDataSource, DataSpec dataSpec, boolean unknownLength)
throws IOException {
int position = (int) dataSpec.absoluteStreamPosition;
int position = (int) dataSpec.position;
int requestLength = (int) dataSpec.length;
int readLength = TEST_DATA.length - position;
if (requestLength != C.LENGTH_UNSET) {
......
......@@ -112,7 +112,7 @@ public final class CacheDataSourceTest2 {
byte[] scratch = new byte[4096];
Random random = new Random(0);
source.open(dataSpec);
int position = (int) dataSpec.absoluteStreamPosition;
int position = (int) dataSpec.position;
int bytesRead = 0;
while (bytesRead != C.RESULT_END_OF_INPUT) {
int maxBytesToRead = random.nextInt(scratch.length) + 1;
......@@ -134,7 +134,6 @@ public final class CacheDataSourceTest2 {
DataSpec[] openedDataSpecs = upstreamSource.getAndClearOpenedDataSpecs();
assertThat(openedDataSpecs).hasLength(1);
assertThat(openedDataSpecs[0].position).isEqualTo(start);
assertThat(openedDataSpecs[0].absoluteStreamPosition).isEqualTo(start);
assertThat(openedDataSpecs[0].length).isEqualTo(end - start);
}
......
......@@ -376,7 +376,7 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull;
result = extractor.read(input, DUMMY_POSITION_HOLDER);
}
} finally {
nextLoadPosition = (int) (input.getPosition() - dataSpec.absoluteStreamPosition);
nextLoadPosition = (int) (input.getPosition() - dataSpec.position);
}
} finally {
Util.closeQuietly(dataSource);
......@@ -389,7 +389,7 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull;
throws IOException, InterruptedException {
long bytesToRead = dataSource.open(dataSpec);
DefaultExtractorInput extractorInput =
new DefaultExtractorInput(dataSource, dataSpec.absoluteStreamPosition, bytesToRead);
new DefaultExtractorInput(dataSource, dataSpec.position, bytesToRead);
if (extractor == null) {
long id3Timestamp = peekId3PrivTimestamp(extractorInput);
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or sign in to comment