Skip to content

Commit

Permalink
Parquet: Introducing more bad_data for testing
Browse files Browse the repository at this point in the history
  • Loading branch information
mapleFU committed Aug 15, 2024
1 parent f518d6b commit 060e0d5
Show file tree
Hide file tree
Showing 3 changed files with 11 additions and 4 deletions.
1 change: 1 addition & 0 deletions cpp/build-support/fuzzing/generate_corpuses.sh
Original file line number Diff line number Diff line change
Expand Up @@ -56,4 +56,5 @@ rm -rf ${CORPUS_DIR}
${OUT}/parquet-arrow-generate-fuzz-corpus ${CORPUS_DIR}
# Add Parquet testing examples
cp ${ARROW_CPP}/submodules/parquet-testing/data/*.parquet ${CORPUS_DIR}
cp ${ARROW_CPP}/submodules/parquet-testing/bad_data/*.parquet ${CORPUS_DIR}
${ARROW_CPP}/build-support/fuzzing/pack_corpus.py ${CORPUS_DIR} ${OUT}/parquet-arrow-fuzz_seed_corpus.zip
12 changes: 9 additions & 3 deletions cpp/src/parquet/arrow/arrow_reader_writer_test.cc
Original file line number Diff line number Diff line change
Expand Up @@ -5298,14 +5298,20 @@ TEST(TestArrowReadWrite, MultithreadedWrite) {

TEST(TestArrowReadWrite, FuzzReader) {
constexpr size_t kMaxFileSize = 1024 * 1024 * 1;
{
auto path = test::get_data_file("PARQUET-1481.parquet", /*is_good=*/false);
auto check_bad_file = [](const std::string& file_name) {
SCOPED_TRACE(file_name);
auto path = test::get_data_file(file_name, /*is_good=*/false);
PARQUET_ASSIGN_OR_THROW(auto source, ::arrow::io::MemoryMappedFile::Open(
path, ::arrow::io::FileMode::READ));
PARQUET_ASSIGN_OR_THROW(auto buffer, source->Read(kMaxFileSize));
auto s = internal::FuzzReader(buffer->data(), buffer->size());
ASSERT_NOT_OK(s);
}
};
check_bad_file("PARQUET-1481.parquet");
check_bad_file("ARROW-GH-41317.parquet");
check_bad_file("ARROW-GH-41321.parquet");
check_bad_file("ARROW-RS-GH-6229-LEVELS.parquet");
check_bad_file("ARROW-RS-GH-6229-DICTHEADER.parquet");
{
auto path = test::get_data_file("alltypes_plain.parquet", /*is_good=*/true);
PARQUET_ASSIGN_OR_THROW(auto source, ::arrow::io::MemoryMappedFile::Open(
Expand Down

0 comments on commit 060e0d5

Please sign in to comment.