From 2148a24f9e47edfa385d5d14c19d38a902690f7a Mon Sep 17 00:00:00 2001 From: David Stansby Date: Tue, 28 Feb 2023 10:30:04 +0000 Subject: [PATCH] Use random arrays for memory testing --- benchmarks/detect_and_classify.py | 33 +++++++++++-------------------- 1 file changed, 12 insertions(+), 21 deletions(-) diff --git a/benchmarks/detect_and_classify.py b/benchmarks/detect_and_classify.py index a9df2ed8..191ae16f 100644 --- a/benchmarks/detect_and_classify.py +++ b/benchmarks/detect_and_classify.py @@ -1,31 +1,22 @@ -from pathlib import Path - import dask.array as da +import numpy as np from cellfinder_core.main import main -from cellfinder_core.tools.IO import read_with_dask - -data_dir = ( - Path(__file__).parent - / ".." - / "tests" - / "data" - / "integration" - / "detection" -).resolve() -signal_data_path = data_dir / "crop_planes" / "ch0" -background_data_path = data_dir / "crop_planes" / "ch1" voxel_sizes = [5, 2, 2] -# Read data -signal_array = read_with_dask(str(signal_data_path)) -background_array = read_with_dask(str(background_data_path)) +# Use random data for signal/background data +repeats = 2 +shape = (30 * repeats, 510, 667) + +signal_array = da.random.random(shape) +signal_array = (signal_array * 65535).astype(np.uint16) + +background_array = da.random.random(shape) +background_array = (signal_array * 65535).astype(np.uint16) -# Artificially increase size of the test data -repeats = 5 -signal_array = da.repeat(signal_array, repeats=repeats, axis=0) -background_array = da.repeat(background_array, repeats=repeats, axis=0) +array_size_MB = signal_array.nbytes / 1024 / 1024 +print(f"Signal array size = {array_size_MB:.02f} MB") if __name__ == "__main__": # Run detection & classification