Skip to content

Commit

Permalink
fix read() issue
Browse files Browse the repository at this point in the history
  • Loading branch information
kszucs committed Sep 25, 2019
1 parent c1df10b commit 4478458
Show file tree
Hide file tree
Showing 2 changed files with 9 additions and 4 deletions.
6 changes: 6 additions & 0 deletions cpp/src/arrow/filesystem/s3fs.cc
Original file line number Diff line number Diff line change
Expand Up @@ -340,6 +340,12 @@ class ObjectInputFile : public io::RandomAccessFile {
RETURN_NOT_OK(CheckClosed());
RETURN_NOT_OK(CheckPosition(position, "read"));

nbytes = std::min(nbytes, content_length_ - position);
if (nbytes == 0) {
*bytes_read = 0;
return Status::OK();
}

// Read the desired range of bytes
S3Model::GetObjectResult result;
RETURN_NOT_OK(GetObjectRange(client_, path_, position, nbytes, &result));
Expand Down
7 changes: 3 additions & 4 deletions python/pyarrow/tests/test_fs.py
Original file line number Diff line number Diff line change
Expand Up @@ -319,12 +319,12 @@ def identity(v):
def test_open_input_stream(fs, pathfn, compression, buffer_size, compressor):
p = pathfn('open-input-stream')

data = b'some data for reading' * 1024
data = b'some data for reading\n' * 512
with fs.open_output_stream(p) as s:
s.write(compressor(data))

with fs.open_input_stream(p, compression, buffer_size) as s:
result = s.read(len(data))
result = s.read()

assert result == data

Expand Down Expand Up @@ -386,9 +386,8 @@ def test_open_append_stream(fs, pathfn, compression, buffer_size, compressor,
with fs.open_append_stream(p, compression, buffer_size) as f:
f.write(b'\nnewly added')

appended = compressor(b'\nnewly added')
with fs.open_input_stream(p) as f:
result = f.read(len(initial) + len(appended))
result = f.read()

result = decompressor(result)
assert result == b'already existing\nnewly added'
Expand Down

0 comments on commit 4478458

Please sign in to comment.