Commit 4a0f224e by eguven Committed by Oliver Woodman

Add a new flag to DataSpec which permits content to be cached even if its length…

Add a new flag to DataSpec which permits content to be cached even if its length can not be resolved

-------------
Created by MOE: https://github.com/google/moe
MOE_MIGRATED_REVID=143560360
parent 5082d1f2
...@@ -261,7 +261,7 @@ public class OkHttpDataSource implements HttpDataSource { ...@@ -261,7 +261,7 @@ public class OkHttpDataSource implements HttpDataSource {
private Request makeRequest(DataSpec dataSpec) { private Request makeRequest(DataSpec dataSpec) {
long position = dataSpec.position; long position = dataSpec.position;
long length = dataSpec.length; long length = dataSpec.length;
boolean allowGzip = (dataSpec.flags & DataSpec.FLAG_ALLOW_GZIP) != 0; boolean allowGzip = dataSpec.isFlagSet(DataSpec.FLAG_ALLOW_GZIP);
HttpUrl url = HttpUrl.parse(dataSpec.uri.toString()); HttpUrl url = HttpUrl.parse(dataSpec.uri.toString());
Request.Builder builder = new Request.Builder().url(url); Request.Builder builder = new Request.Builder().url(url);
......
...@@ -32,7 +32,7 @@ public final class DataSpec { ...@@ -32,7 +32,7 @@ public final class DataSpec {
* The flags that apply to any request for data. * The flags that apply to any request for data.
*/ */
@Retention(RetentionPolicy.SOURCE) @Retention(RetentionPolicy.SOURCE)
@IntDef(flag = true, value = {FLAG_ALLOW_GZIP}) @IntDef(flag = true, value = {FLAG_ALLOW_GZIP, FLAG_ALLOW_CACHING_UNKNOWN_LENGTH})
public @interface Flags {} public @interface Flags {}
/** /**
* Permits an underlying network stack to request that the server use gzip compression. * Permits an underlying network stack to request that the server use gzip compression.
...@@ -45,7 +45,10 @@ public final class DataSpec { ...@@ -45,7 +45,10 @@ public final class DataSpec {
* {@link DataSource#open(DataSpec)} will typically be {@link C#LENGTH_UNSET}. The data read from * {@link DataSource#open(DataSpec)} will typically be {@link C#LENGTH_UNSET}. The data read from
* {@link DataSource#read(byte[], int, int)} will be the decompressed data. * {@link DataSource#read(byte[], int, int)} will be the decompressed data.
*/ */
public static final int FLAG_ALLOW_GZIP = 1; public static final int FLAG_ALLOW_GZIP = 1 << 0;
/** Permits content to be cached even if its length can not be resolved. */
public static final int FLAG_ALLOW_CACHING_UNKNOWN_LENGTH = 1 << 1;
/** /**
* The source from which data should be read. * The source from which data should be read.
...@@ -76,7 +79,8 @@ public final class DataSpec { ...@@ -76,7 +79,8 @@ public final class DataSpec {
*/ */
public final String key; public final String key;
/** /**
* Request flags. Currently {@link #FLAG_ALLOW_GZIP} is the only supported flag. * Request flags. Currently {@link #FLAG_ALLOW_GZIP} and
* {@link #FLAG_ALLOW_CACHING_UNKNOWN_LENGTH} are the only supported flags.
*/ */
@Flags @Flags
public final int flags; public final int flags;
...@@ -167,6 +171,15 @@ public final class DataSpec { ...@@ -167,6 +171,15 @@ public final class DataSpec {
this.flags = flags; this.flags = flags;
} }
/**
* Returns whether the given flag is set.
*
* @param flag Flag to be checked if it is set.
*/
public boolean isFlagSet(@Flags int flag) {
return (this.flags & flag) == flag;
}
@Override @Override
public String toString() { public String toString() {
return "DataSpec[" + uri + ", " + Arrays.toString(postBody) + ", " + absoluteStreamPosition return "DataSpec[" + uri + ", " + Arrays.toString(postBody) + ", " + absoluteStreamPosition
......
...@@ -230,7 +230,7 @@ public class DefaultHttpDataSource implements HttpDataSource { ...@@ -230,7 +230,7 @@ public class DefaultHttpDataSource implements HttpDataSource {
bytesToSkip = responseCode == 200 && dataSpec.position != 0 ? dataSpec.position : 0; bytesToSkip = responseCode == 200 && dataSpec.position != 0 ? dataSpec.position : 0;
// Determine the length of the data to be read, after skipping. // Determine the length of the data to be read, after skipping.
if ((dataSpec.flags & DataSpec.FLAG_ALLOW_GZIP) == 0) { if (!dataSpec.isFlagSet(DataSpec.FLAG_ALLOW_GZIP)) {
if (dataSpec.length != C.LENGTH_UNSET) { if (dataSpec.length != C.LENGTH_UNSET) {
bytesToRead = dataSpec.length; bytesToRead = dataSpec.length;
} else { } else {
...@@ -343,7 +343,7 @@ public class DefaultHttpDataSource implements HttpDataSource { ...@@ -343,7 +343,7 @@ public class DefaultHttpDataSource implements HttpDataSource {
byte[] postBody = dataSpec.postBody; byte[] postBody = dataSpec.postBody;
long position = dataSpec.position; long position = dataSpec.position;
long length = dataSpec.length; long length = dataSpec.length;
boolean allowGzip = (dataSpec.flags & DataSpec.FLAG_ALLOW_GZIP) != 0; boolean allowGzip = dataSpec.isFlagSet(DataSpec.FLAG_ALLOW_GZIP);
if (!allowCrossProtocolRedirects) { if (!allowCrossProtocolRedirects) {
// HttpURLConnection disallows cross-protocol redirects, but otherwise performs redirection // HttpURLConnection disallows cross-protocol redirects, but otherwise performs redirection
......
...@@ -81,10 +81,12 @@ public final class CacheDataSink implements DataSink { ...@@ -81,10 +81,12 @@ public final class CacheDataSink implements DataSink {
@Override @Override
public void open(DataSpec dataSpec) throws CacheDataSinkException { public void open(DataSpec dataSpec) throws CacheDataSinkException {
this.dataSpec = dataSpec; if (dataSpec.length == C.LENGTH_UNSET
if (dataSpec.length == C.LENGTH_UNSET) { && !dataSpec.isFlagSet(DataSpec.FLAG_ALLOW_CACHING_UNKNOWN_LENGTH)) {
this.dataSpec = null;
return; return;
} }
this.dataSpec = dataSpec;
dataSpecBytesWritten = 0; dataSpecBytesWritten = 0;
try { try {
openNextOutputStream(); openNextOutputStream();
...@@ -95,7 +97,7 @@ public final class CacheDataSink implements DataSink { ...@@ -95,7 +97,7 @@ public final class CacheDataSink implements DataSink {
@Override @Override
public void write(byte[] buffer, int offset, int length) throws CacheDataSinkException { public void write(byte[] buffer, int offset, int length) throws CacheDataSinkException {
if (dataSpec.length == C.LENGTH_UNSET) { if (dataSpec == null) {
return; return;
} }
try { try {
...@@ -119,7 +121,7 @@ public final class CacheDataSink implements DataSink { ...@@ -119,7 +121,7 @@ public final class CacheDataSink implements DataSink {
@Override @Override
public void close() throws CacheDataSinkException { public void close() throws CacheDataSinkException {
if (dataSpec == null || dataSpec.length == C.LENGTH_UNSET) { if (dataSpec == null) {
return; return;
} }
try { try {
...@@ -130,8 +132,10 @@ public final class CacheDataSink implements DataSink { ...@@ -130,8 +132,10 @@ public final class CacheDataSink implements DataSink {
} }
private void openNextOutputStream() throws IOException { private void openNextOutputStream() throws IOException {
long maxLength = dataSpec.length == C.LENGTH_UNSET ? maxCacheFileSize
: Math.min(dataSpec.length - dataSpecBytesWritten, maxCacheFileSize);
file = cache.startFile(dataSpec.key, dataSpec.absoluteStreamPosition + dataSpecBytesWritten, file = cache.startFile(dataSpec.key, dataSpec.absoluteStreamPosition + dataSpecBytesWritten,
Math.min(dataSpec.length - dataSpecBytesWritten, maxCacheFileSize)); maxLength);
underlyingFileOutputStream = new FileOutputStream(file); underlyingFileOutputStream = new FileOutputStream(file);
if (bufferSize > 0) { if (bufferSize > 0) {
if (bufferedOutputStream == null) { if (bufferedOutputStream == null) {
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or sign in to comment