Skip to content

Commit

Permalink
fix: update boundary checking of BlobReadChannel when limit() is used (
Browse files Browse the repository at this point in the history
…#1324)

Add several new integration tests to exercise various boundary handling.
  • Loading branch information
BenWhitehead committed Mar 30, 2022
1 parent 2bacf92 commit f21f624
Show file tree
Hide file tree
Showing 2 changed files with 100 additions and 30 deletions.
Expand Up @@ -125,6 +125,10 @@ public int read(ByteBuffer byteBuffer) throws IOException {
}
final int toRead =
Math.toIntExact(Math.min(limit - position, Math.max(byteBuffer.remaining(), chunkSize)));
if (toRead <= 0) {
endOfStream = true;
return -1;
}
try {
ResultRetryAlgorithm<?> algorithm =
retryAlgorithmManager.getForObjectsGet(storageObject, requestOptions);
Expand Down
Expand Up @@ -18,37 +18,60 @@

import static com.google.common.truth.Truth.assertThat;

import com.google.cloud.NoCredentials;
import com.google.cloud.ReadChannel;
import com.google.cloud.WriteChannel;
import com.google.cloud.storage.BlobId;
import com.google.cloud.storage.BlobInfo;
import com.google.cloud.storage.Bucket;
import com.google.cloud.storage.BucketInfo;
import com.google.cloud.storage.DataGeneration;
import com.google.cloud.storage.Storage;
import com.google.cloud.storage.StorageOptions;
import com.google.cloud.storage.conformance.retry.TestBench;
import com.google.cloud.storage.testing.RemoteStorageHelper;
import com.google.common.io.ByteStreams;
import java.io.File;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.nio.channels.FileChannel;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.nio.file.StandardOpenOption;
import java.util.Random;
import org.junit.ClassRule;
import org.junit.After;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TemporaryFolder;
import org.junit.rules.TestName;

public final class ITBlobReadChannelTest {

private static final int _16MiB = 16 * 1024 * 1024;
private static final int _256KiB = 256 * 1024;

@ClassRule
public static final TestBench testBench =
TestBench.newBuilder().setContainerName("blob-read-channel-test").build();

@Rule public final TestName testName = new TestName();

@Rule public final DataGeneration dataGeneration = new DataGeneration(new Random(872364872));

@Rule public final TemporaryFolder tmp = new TemporaryFolder();

private Storage storage;
private String bucketName;
private String blobName;

@Before
public void setUp() throws Exception {
storage = StorageOptions.newBuilder().build().getService();

bucketName = RemoteStorageHelper.generateBucketName();
storage.create(BucketInfo.of(bucketName));
blobName = String.format("%s/src", testName.getMethodName());
}

@After
public void tearDown() throws Exception {
RemoteStorageHelper.forceDelete(storage, bucketName);
}

@Test
public void testLimit_smallerThanOneChunk() throws IOException {
int srcContentSize = _256KiB;
Expand All @@ -58,6 +81,24 @@ public void testLimit_smallerThanOneChunk() throws IOException {
doLimitTest(srcContentSize, rangeBegin, rangeEnd, chunkSize);
}

@Test
public void testLimit_pastEndOfBlob() throws IOException {
int srcContentSize = _256KiB;
int rangeBegin = _256KiB - 20;
int rangeEnd = _256KiB + 20;
int chunkSize = _16MiB;
doLimitTest(srcContentSize, rangeBegin, rangeEnd, chunkSize);
}

@Test
public void testLimit_endBeforeBegin() throws IOException {
int srcContentSize = _256KiB;
int rangeBegin = 4;
int rangeEnd = 3;
int chunkSize = _16MiB;
doLimitTest(srcContentSize, rangeBegin, rangeEnd, chunkSize);
}

@Test
public void testLimit_largerThanOneChunk() throws IOException {
int srcContentSize = _16MiB + (_256KiB * 3);
Expand All @@ -68,40 +109,65 @@ public void testLimit_largerThanOneChunk() throws IOException {
doLimitTest(srcContentSize, rangeBegin, rangeEnd, chunkSize);
}

@Test
public void testLimit_downloadToFile() throws IOException {
BlobId blobId = BlobId.of(bucketName, blobName);
ByteBuffer content = dataGeneration.randByteBuffer(108);
try (WriteChannel writer = storage.writer(BlobInfo.newBuilder(blobId).build())) {
writer.write(content);
}

File file = tmp.newFile();
String destFileName = file.getAbsolutePath();
byte[] expectedBytes = new byte[37 - 14];
ByteBuffer duplicate = content.duplicate();
duplicate.position(14);
duplicate.limit(37);
duplicate.get(expectedBytes);

try {
try (ReadChannel from = storage.reader(blobId);
FileChannel to = FileChannel.open(Paths.get(destFileName), StandardOpenOption.WRITE)) {
from.seek(14);
from.limit(37);

ByteStreams.copy(from, to);
}

byte[] readBytes = Files.readAllBytes(Paths.get(destFileName));
assertThat(readBytes).isEqualTo(expectedBytes);
} finally {
file.delete();
}
}

private void doLimitTest(int srcContentSize, int rangeBegin, int rangeEnd, int chunkSize)
throws IOException {
Storage s =
StorageOptions.newBuilder()
.setProjectId("blob-read-channel-test")
.setHost(testBench.getBaseUri())
.setCredentials(NoCredentials.getInstance())
.build()
.getService();

String testNameMethodName = testName.getMethodName();
String bucketName = String.format("bucket-%s", testNameMethodName.toLowerCase());
String blobName = String.format("%s/src", testNameMethodName);

Bucket bucket = s.create(BucketInfo.of(bucketName));
BlobInfo src = BlobInfo.newBuilder(bucket, blobName).build();
BlobInfo src = BlobInfo.newBuilder(bucketName, blobName).build();
ByteBuffer content = dataGeneration.randByteBuffer(srcContentSize);
ByteBuffer expectedSubContent = content.duplicate();
expectedSubContent.position(rangeBegin);
expectedSubContent.limit(rangeEnd);
try (WriteChannel writer = s.writer(src)) {
ByteBuffer dup = content.duplicate();
dup.position(rangeBegin);
dup.limit(Math.min(dup.capacity(), rangeEnd));
byte[] expectedSubContent = new byte[dup.remaining()];
dup.get(expectedSubContent);

try (WriteChannel writer = storage.writer(src)) {
writer.write(content);
}

ByteBuffer actual = ByteBuffer.allocate(rangeEnd - rangeBegin);
ByteBuffer buffer = ByteBuffer.allocate(srcContentSize);

try (ReadChannel reader = s.reader(src.getBlobId())) {
try (ReadChannel reader = storage.reader(src.getBlobId())) {
reader.setChunkSize(chunkSize);
reader.seek(rangeBegin);
reader.limit(rangeEnd);
reader.read(actual);
actual.flip();
reader.read(buffer);
buffer.flip();
}

byte[] actual = new byte[buffer.limit()];
buffer.get(actual);

assertThat(actual).isEqualTo(expectedSubContent);
}
}

0 comments on commit f21f624

Please sign in to comment.