From d8b9cfb2773bbea4ba76f7b6f609fc1772f068d3 Mon Sep 17 00:00:00 2001 From: Marcel Martin Date: Tue, 4 Jun 2024 12:50:57 +0200 Subject: [PATCH] Test reading from a large(r) compressed file There was already a test for this, but only the uncompressed data was "large". Issue #160 is only triggered if the *compressed* data is large (in this case, larger than 128 kB), which apparently exceeds some input buffer. See #160 --- tests/conftest.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index 33ad423..2a63905 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -10,10 +10,12 @@ def create_large_file(tmp_path): def _create_large_file(extension): path = tmp_path / f"large{extension}" - random_text = "".join(random.choices(string.ascii_lowercase, k=1024)) - # Make the text a lot bigger in order to ensure that it is larger than the - # pipe buffer size. - random_text *= 2048 + random.seed(0) + chars = string.ascii_lowercase + "\n" + # Do not decrease this length. The generated file needs to have + # a certain length after compression to trigger some bugs + # (in particular, 512 kB is not sufficient). + random_text = "".join(random.choices(chars, k=1024 * 1024)) with xopen(path, "w") as f: f.write(random_text) return path