From 1057da065a2718dfd0aed63f0f79f4f628e5a3ad Mon Sep 17 00:00:00 2001 From: Noah Biederbeck Date: Mon, 31 May 2021 11:36:35 +0200 Subject: [PATCH 1/5] Rename function. Fix #1733. --- ctapipe/io/astropy_helpers.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ctapipe/io/astropy_helpers.py b/ctapipe/io/astropy_helpers.py index cd4adfe4185..875a213c2eb 100644 --- a/ctapipe/io/astropy_helpers.py +++ b/ctapipe/io/astropy_helpers.py @@ -18,7 +18,7 @@ from contextlib import ExitStack -__all__ = ["h5_table_to_astropy"] +__all__ = ["read_table"] def read_table(h5file, path, start=None, stop=None, step=None) -> Table: From 9aaf83229719369c1295f9285fb66216414f181f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Maximilian=20N=C3=B6the?= Date: Mon, 31 May 2021 11:50:33 +0200 Subject: [PATCH 2/5] Add pyflakes to the CI config --- .github/workflows/ci.yml | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 62091e68fca..fc24799bfdb 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -32,10 +32,19 @@ jobs: run: | source .github/install.sh python --version - pip install codecov pytest-cov + pip install codecov pytest-cov pyflakes pip install -e .[all] pip freeze + - name: Static codechecks + env: + INSTALL_METHOD: ${{ matrix.install-method }} + run: | + if [[ "$INSTALL_METHOD" == "conda" ]]; then + source $CONDA/etc/profile.d/conda.sh + conda activate ci; + fi + pyflakes ctapipe - name: Tests env: INSTALL_METHOD: ${{ matrix.install-method }} From f906e01e77ef23e199b0548b6b73c556077910dc Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Maximilian=20N=C3=B6the?= Date: Mon, 31 May 2021 12:07:18 +0200 Subject: [PATCH 3/5] Fix pyflakes errors --- ctapipe/core/tests/test_component.py | 2 +- ctapipe/core/tool.py | 4 ++-- ctapipe/image/tests/test_cleaning.py | 1 - ctapipe/instrument/camera/readout.py | 1 - ctapipe/instrument/camera/tests/test_readout.py | 2 +- ctapipe/instrument/subarray.py | 1 - ctapipe/io/astropy_helpers.py | 2 +- ctapipe/io/tests/test_simteleventsource.py | 2 +- ctapipe/plotting/bokeh_event_viewer.py | 3 +-- ctapipe/reco/hillas_intersection.py | 1 - ctapipe/reco/impact.py | 3 +-- ctapipe/reco/tests/test_HillasReconstructor.py | 2 +- ctapipe/reco/tests/test_shower_processor.py | 14 +++++--------- ctapipe/tests/setup_package.py | 2 -- ctapipe/tests/test_traitles_configurable.py | 1 - ctapipe/tools/bokeh/file_viewer.py | 2 +- ctapipe/tools/info.py | 1 - ctapipe/tools/stage1.py | 2 +- ctapipe/tools/tests/test_merge.py | 1 - ctapipe/tools/tests/test_tools.py | 4 +--- ctapipe/utils/unstructured_interpolator.py | 13 +++++-------- ctapipe/visualization/mpl_camera.py | 1 - 22 files changed, 22 insertions(+), 43 deletions(-) delete mode 100644 ctapipe/tests/setup_package.py diff --git a/ctapipe/core/tests/test_component.py b/ctapipe/core/tests/test_component.py index 371ed8b74b9..f871d8932e9 100644 --- a/ctapipe/core/tests/test_component.py +++ b/ctapipe/core/tests/test_component.py @@ -290,7 +290,7 @@ def test_extra_config_missing(): config["ExampleSubclass1"]["extra"] = 199.0 with pytest.raises(TraitError): - comp = ExampleSubclass1(config=config) + ExampleSubclass1(config=config) def test_default(): diff --git a/ctapipe/core/tool.py b/ctapipe/core/tool.py index f2b7c093a41..8110ed807c7 100644 --- a/ctapipe/core/tool.py +++ b/ctapipe/core/tool.py @@ -7,7 +7,7 @@ import os import re -from traitlets import default, TraitError +from traitlets import default from traitlets.config import Application, Configurable from .. import __version__ as version @@ -286,7 +286,7 @@ def run(self, argv=None): # check for any traitlets warnings using our custom handler if len(self.trait_warning_handler.errors) > 0: - raise ToolConfigurationError(f"Found config errors") + raise ToolConfigurationError("Found config errors") # remove handler to not impact performance with regex matching self.log.removeHandler(self.trait_warning_handler) diff --git a/ctapipe/image/tests/test_cleaning.py b/ctapipe/image/tests/test_cleaning.py index 07aeb329f6e..eb921f8b078 100644 --- a/ctapipe/image/tests/test_cleaning.py +++ b/ctapipe/image/tests/test_cleaning.py @@ -1,4 +1,3 @@ -import os import numpy as np from numpy.testing import assert_allclose from ctapipe.image import cleaning diff --git a/ctapipe/instrument/camera/readout.py b/ctapipe/instrument/camera/readout.py index 39923bc8147..98e44b52bcd 100644 --- a/ctapipe/instrument/camera/readout.py +++ b/ctapipe/instrument/camera/readout.py @@ -7,7 +7,6 @@ import numpy as np from astropy import units as u from astropy.table import Table -from scipy.stats import norm from ctapipe.utils import get_table_dataset diff --git a/ctapipe/instrument/camera/tests/test_readout.py b/ctapipe/instrument/camera/tests/test_readout.py index 25974905a3b..5d67af394ab 100644 --- a/ctapipe/instrument/camera/tests/test_readout.py +++ b/ctapipe/instrument/camera/tests/test_readout.py @@ -151,5 +151,5 @@ def test_camera_from_name(camera_name): assert str(camera) == camera_name except FileNotFoundError: # these two don't have readout definitions on the dataserver - if camera_name not in ["MAGICCam", "Whipple109"]: + if camera_name not in ["MAGICCam", "Whipple109", "FACT"]: raise diff --git a/ctapipe/instrument/subarray.py b/ctapipe/instrument/subarray.py index 42bd8013c0d..47a993225da 100644 --- a/ctapipe/instrument/subarray.py +++ b/ctapipe/instrument/subarray.py @@ -1,7 +1,6 @@ """ Description of Arrays or Subarrays of telescopes """ -from collections import defaultdict from pathlib import Path import numpy as np diff --git a/ctapipe/io/astropy_helpers.py b/ctapipe/io/astropy_helpers.py index cd4adfe4185..875a213c2eb 100644 --- a/ctapipe/io/astropy_helpers.py +++ b/ctapipe/io/astropy_helpers.py @@ -18,7 +18,7 @@ from contextlib import ExitStack -__all__ = ["h5_table_to_astropy"] +__all__ = ["read_table"] def read_table(h5file, path, start=None, stop=None, step=None) -> Table: diff --git a/ctapipe/io/tests/test_simteleventsource.py b/ctapipe/io/tests/test_simteleventsource.py index f059b39698a..373024338b0 100644 --- a/ctapipe/io/tests/test_simteleventsource.py +++ b/ctapipe/io/tests/test_simteleventsource.py @@ -79,7 +79,7 @@ def test_that_event_is_not_modified_after_loop(): def test_additional_meta_data_from_simulation_config(): with SimTelEventSource(input_url=gamma_test_large_path) as reader: - data = next(iter(reader)) + next(iter(reader)) # for expectation values from astropy import units as u diff --git a/ctapipe/plotting/bokeh_event_viewer.py b/ctapipe/plotting/bokeh_event_viewer.py index dc1000613d9..c17e30f1512 100644 --- a/ctapipe/plotting/bokeh_event_viewer.py +++ b/ctapipe/plotting/bokeh_event_viewer.py @@ -1,6 +1,5 @@ -import numpy as np from bokeh.layouts import layout, column -from bokeh.models import Select, Span +from bokeh.models import Select from ctapipe.core import Component from ctapipe.visualization.bokeh import CameraDisplay, WaveformDisplay diff --git a/ctapipe/reco/hillas_intersection.py b/ctapipe/reco/hillas_intersection.py index 8235c160f85..1222001d707 100644 --- a/ctapipe/reco/hillas_intersection.py +++ b/ctapipe/reco/hillas_intersection.py @@ -25,7 +25,6 @@ CameraFrame, TiltedGroundFrame, project_to_ground, - GroundFrame, MissingFrameAttributeWarning, ) import copy diff --git a/ctapipe/reco/impact.py b/ctapipe/reco/impact.py index d4a0724265d..39a67c25576 100644 --- a/ctapipe/reco/impact.py +++ b/ctapipe/reco/impact.py @@ -404,7 +404,6 @@ def get_likelihood( # and ignore them from then on zenith = (np.pi / 2) - self.array_direction.alt.to(u.rad).value - azimuth = self.array_direction.az # Geometrically calculate the depth of maximum given this test position x_max = self.get_shower_max(source_x, source_y, core_x, core_y, zenith) @@ -834,7 +833,7 @@ def minimise(self, params, step, limits, minimiser_name="minuit", max_calls=0): self.min.tol *= 1000 self.min.set_strategy(1) - migrad = self.min.migrad() + self.min.migrad() fit_params = self.min.values errors = self.min.errors diff --git a/ctapipe/reco/tests/test_HillasReconstructor.py b/ctapipe/reco/tests/test_HillasReconstructor.py index 6dec71ef041..4c5a6aa2d40 100644 --- a/ctapipe/reco/tests/test_HillasReconstructor.py +++ b/ctapipe/reco/tests/test_HillasReconstructor.py @@ -200,7 +200,7 @@ def test_invalid_events(): try: moments = hillas_parameters(geom[mask], dl1.image[mask]) hillas_dict[tel_id] = moments - except HillasParameterizationError as e: + except HillasParameterizationError: continue # construct a dict only containing the last telescope events diff --git a/ctapipe/reco/tests/test_shower_processor.py b/ctapipe/reco/tests/test_shower_processor.py index bb7666ce4ae..3c6c5e6530a 100644 --- a/ctapipe/reco/tests/test_shower_processor.py +++ b/ctapipe/reco/tests/test_shower_processor.py @@ -1,7 +1,6 @@ """ Tests for ShowerProcessor functionalities. """ -import pytest from numpy import isfinite from traitlets.config.loader import Config @@ -24,9 +23,7 @@ def test_shower_processor_geometry(example_event, example_subarray): image_cleaner_type="MARSImageCleaner", ) - process_shower = ShowerProcessor( - subarray=example_subarray - ) + process_shower = ShowerProcessor(subarray=example_subarray) calibrate(example_event) process_images(example_event) @@ -45,12 +42,11 @@ def test_shower_processor_geometry(example_event, example_subarray): assert isfinite(DL2a.average_intensity) # Increase some quality cuts and check that we get defaults - config.ShowerQualityQuery.quality_criteria = [("> 500 phes", "lambda p: p.hillas.intensity > 500")] + config.ShowerQualityQuery.quality_criteria = [ + ("> 500 phes", "lambda p: p.hillas.intensity > 500") + ] - process_shower = ShowerProcessor( - config=config, - subarray=example_subarray - ) + process_shower = ShowerProcessor(config=config, subarray=example_subarray) process_shower(example_event) print(process_shower.check_shower.to_table()) diff --git a/ctapipe/tests/setup_package.py b/ctapipe/tests/setup_package.py deleted file mode 100644 index 17b092d110f..00000000000 --- a/ctapipe/tests/setup_package.py +++ /dev/null @@ -1,2 +0,0 @@ -def get_package_data(): - return {_ASTROPY_PACKAGE_NAME_ + ".tests": ["coveragerc"]} diff --git a/ctapipe/tests/test_traitles_configurable.py b/ctapipe/tests/test_traitles_configurable.py index 05987a981e7..eb891aa75a8 100644 --- a/ctapipe/tests/test_traitles_configurable.py +++ b/ctapipe/tests/test_traitles_configurable.py @@ -2,7 +2,6 @@ import pkgutil import importlib from collections import defaultdict -import sys from ctapipe.core import Component, Tool diff --git a/ctapipe/tools/bokeh/file_viewer.py b/ctapipe/tools/bokeh/file_viewer.py index ece4991cb16..f0c4d411e7c 100644 --- a/ctapipe/tools/bokeh/file_viewer.py +++ b/ctapipe/tools/bokeh/file_viewer.py @@ -5,7 +5,7 @@ from bokeh.models import Button, PreText, Select, TextInput from bokeh.server.server import Server from bokeh.themes import Theme -from traitlets import Bool, Dict, Int, List +from traitlets import Bool, Dict, Int from ctapipe.calib import CameraCalibrator from ctapipe.core import Tool, traits diff --git a/ctapipe/tools/info.py b/ctapipe/tools/info.py index 2bcbd8e482b..0c4ab83cc54 100644 --- a/ctapipe/tools/info.py +++ b/ctapipe/tools/info.py @@ -1,6 +1,5 @@ # Licensed under a 3-clause BSD style license - see LICENSE.rst """ print information about ctapipe and its command-line tools. """ -import importlib import logging import os import sys diff --git a/ctapipe/tools/stage1.py b/ctapipe/tools/stage1.py index 5ed4ee13dd4..8dbfc2733f6 100644 --- a/ctapipe/tools/stage1.py +++ b/ctapipe/tools/stage1.py @@ -7,7 +7,7 @@ from ..calib.camera import CameraCalibrator, GainSelector from ..core import Tool -from ..core.traits import Bool, List, classes_with_traits +from ..core.traits import Bool, classes_with_traits from ..image import ImageCleaner, ImageProcessor from ..image.extractor import ImageExtractor from ..io import DataLevel, DataWriter, EventSource, SimTelEventSource diff --git a/ctapipe/tools/tests/test_merge.py b/ctapipe/tools/tests/test_merge.py index 8af8deb324c..6462d9c98e8 100644 --- a/ctapipe/tools/tests/test_merge.py +++ b/ctapipe/tools/tests/test_merge.py @@ -1,4 +1,3 @@ -import shutil import pytest import tempfile diff --git a/ctapipe/tools/tests/test_tools.py b/ctapipe/tools/tests/test_tools.py index 748408cf55f..4b73cd02e68 100644 --- a/ctapipe/tools/tests/test_tools.py +++ b/ctapipe/tools/tests/test_tools.py @@ -1,8 +1,6 @@ """ Test individual tool functionality """ - -import os import shlex import sys import subprocess @@ -16,7 +14,7 @@ from ctapipe.utils import get_dataset_path from ctapipe.core import run_tool -from ctapipe.io import DataLevel, EventSource +from ctapipe.io import DataLevel import numpy as np from pathlib import Path diff --git a/ctapipe/utils/unstructured_interpolator.py b/ctapipe/utils/unstructured_interpolator.py index d50acdaf9e9..7580b2c9994 100644 --- a/ctapipe/utils/unstructured_interpolator.py +++ b/ctapipe/utils/unstructured_interpolator.py @@ -12,7 +12,6 @@ import numpy as np from scipy.spatial import Delaunay -import time from scipy.ndimage import map_coordinates import numpy.ma as ma @@ -70,18 +69,16 @@ def __init__( self._function_name = "__call__" self._remember = remember_last - self._previous_v = 0 - self._previous_m = 0 - self._previous_shape = 0 + self.reset() self._bounds = bounds def reset(self): """ Function used to reset some class values stored after previous event """ - self._previous_v = 0 - self._previous_m = 0 - self._previous_shape = 0 + self._previous_v = None + self._previous_m = None + self._previous_shape = None def __call__(self, points, eval_points=None): @@ -93,7 +90,7 @@ def __call__(self, points, eval_points=None): # First find simplexes that contain interpolated points # In - if self._remember and self._previous_v is not 0: + if self._remember and self._previous_v is not None: previous_keys = self.keys[self._previous_v.ravel()] hull = Delaunay(previous_keys) diff --git a/ctapipe/visualization/mpl_camera.py b/ctapipe/visualization/mpl_camera.py index a8167e810c8..2b28e9fb20d 100644 --- a/ctapipe/visualization/mpl_camera.py +++ b/ctapipe/visualization/mpl_camera.py @@ -11,7 +11,6 @@ from matplotlib.collections import PatchCollection from matplotlib.colors import Normalize, LogNorm, SymLogNorm from matplotlib.patches import Ellipse, RegularPolygon, Rectangle, Circle -from numpy import sqrt from ctapipe.instrument import PixelShape From 8588f429ac6130a3ff8e7a59ef6db5b4df2fcd3d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Maximilian=20N=C3=B6the?= Date: Mon, 31 May 2021 12:38:49 +0200 Subject: [PATCH 4/5] Fix wrong comment --- ctapipe/instrument/camera/tests/test_readout.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ctapipe/instrument/camera/tests/test_readout.py b/ctapipe/instrument/camera/tests/test_readout.py index 5d67af394ab..1982d8af104 100644 --- a/ctapipe/instrument/camera/tests/test_readout.py +++ b/ctapipe/instrument/camera/tests/test_readout.py @@ -150,6 +150,6 @@ def test_camera_from_name(camera_name): camera = CameraReadout.from_name(camera_name) assert str(camera) == camera_name except FileNotFoundError: - # these two don't have readout definitions on the dataserver + # these don't have readout definitions on the dataserver if camera_name not in ["MAGICCam", "Whipple109", "FACT"]: raise From 6d0051e1320e9bb2e30ea09ae6bd67441afc7bc3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Maximilian=20N=C3=B6the?= Date: Wed, 2 Jun 2021 13:56:31 +0200 Subject: [PATCH 5/5] Parallel tests (#1740) * Add and use parametrized fixture for camera geometries, fixes #1641 * Fix tmp_path and fixture usage in tool and hdf5 writer tests * Run tests in parallel in CI * Fix unused imports * Fix a race condition in download_file_cached * Use n=auto for parallel tests in CI * Use mamba in CI --- .github/install.sh | 5 +- .github/workflows/ci.yml | 21 +- ctapipe/conftest.py | 27 +- .../image/tests/test_geometry_converter.py | 41 +- .../camera/tests/test_description.py | 12 +- .../instrument/camera/tests/test_geometry.py | 34 +- .../instrument/camera/tests/test_readout.py | 15 +- ctapipe/instrument/tests/test_telescope.py | 12 +- ctapipe/io/tests/test_hdf5.py | 644 +++++++++--------- ctapipe/tools/camdemo.py | 12 +- ctapipe/tools/tests/test_tools.py | 152 +++-- ctapipe/utils/download.py | 63 +- 12 files changed, 491 insertions(+), 547 deletions(-) diff --git a/.github/install.sh b/.github/install.sh index 47a20805bbd..23aadaff19b 100644 --- a/.github/install.sh +++ b/.github/install.sh @@ -8,10 +8,13 @@ if [[ "$INSTALL_METHOD" == "conda" ]]; then conda update -q conda # get latest conda version # Useful for debugging any issues with conda conda info -a + conda install -c conda-forge mamba sed -i -e "s/- python=.*/- python=$PYTHON_VERSION/g" environment.yml - conda env create -n ci --file environment.yml + mamba env create -n ci --file environment.yml conda activate ci + echo 'source $CONDA/etc/profile.d/conda.sh' >> ~/.bash_profile + echo 'conda activate ci' >> ~/.bash_profile else echo "Using pip" pip install -U pip setuptools wheel diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index fc24799bfdb..a52fa7fe0b7 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -32,28 +32,21 @@ jobs: run: | source .github/install.sh python --version - pip install codecov pytest-cov pyflakes + pip install codecov pytest-cov pyflakes pytest-xdist pip install -e .[all] pip freeze - name: Static codechecks - env: - INSTALL_METHOD: ${{ matrix.install-method }} + # need to use a login shell for the conda setup to work + shell: bash -leo pipefail {0} run: | - if [[ "$INSTALL_METHOD" == "conda" ]]; then - source $CONDA/etc/profile.d/conda.sh - conda activate ci; - fi pyflakes ctapipe + - name: Tests - env: - INSTALL_METHOD: ${{ matrix.install-method }} + # need to use a login shell for the conda setup to work + shell: bash -leo pipefail {0} run: | - if [[ "$INSTALL_METHOD" == "conda" ]]; then - source $CONDA/etc/profile.d/conda.sh - conda activate ci; - fi - pytest --cov --cov-report=xml + pytest --cov --cov-report=xml -n auto --dist loadscope ctapipe-info --version - uses: codecov/codecov-action@v1 diff --git a/ctapipe/conftest.py b/ctapipe/conftest.py index aaa7f89d0a9..65cfeb747db 100644 --- a/ctapipe/conftest.py +++ b/ctapipe/conftest.py @@ -11,12 +11,27 @@ from ctapipe.instrument import CameraGeometry -@pytest.fixture(scope="session") -def camera_geometries(): - return [ - CameraGeometry.from_name(name) - for name in ["LSTCam", "NectarCam", "CHEC", "FlashCam", "MAGICCam"] - ] +# names of camera geometries available on the data server +camera_names = [ + "ASTRICam", + "CHEC", + "DigiCam", + "FACT", + "FlashCam", + "HESS-I", + "HESS-II", + "LSTCam", + "MAGICCam", + "NectarCam", + "SCTCam", + "VERITAS", + "Whipple490", +] + + +@pytest.fixture(scope="session", params=camera_names) +def camera_geometry(request): + return CameraGeometry.from_name(request.param) @pytest.fixture(scope="session") diff --git a/ctapipe/image/tests/test_geometry_converter.py b/ctapipe/image/tests/test_geometry_converter.py index be1bc8bcef2..31b3a2b976d 100644 --- a/ctapipe/image/tests/test_geometry_converter.py +++ b/ctapipe/image/tests/test_geometry_converter.py @@ -9,14 +9,10 @@ array_2d_to_chec, ) from ctapipe.image.hillas import hillas_parameters -from ctapipe.instrument import CameraDescription, CameraGeometry from ctapipe.image.toymodel import Gaussian import astropy.units as u -camera_names = CameraDescription.get_known_camera_names() - - def create_mock_image(geom): """ creates a mock image, which parameters are adapted to the camera size @@ -32,16 +28,14 @@ def create_mock_image(geom): ) _, image, _ = model.generate_image( - geom, intensity=0.5 * geom.n_pixels, nsb_level_pe=3, + geom, intensity=0.5 * geom.n_pixels, nsb_level_pe=3 ) return image -@pytest.mark.parametrize("rot", [3,]) -@pytest.mark.parametrize("camera_name", camera_names) -def test_convert_geometry(camera_name, rot): - - geom = CameraGeometry.from_name(camera_name) +@pytest.mark.parametrize("rot", [3]) +def test_convert_geometry(camera_geometry, rot): + geom = camera_geometry image = create_mock_image(geom) hillas_0 = hillas_parameters(geom, image) @@ -81,13 +75,11 @@ def test_convert_geometry(camera_name, rot): # TODO: test other parameters -@pytest.mark.parametrize("rot", [3,]) -@pytest.mark.parametrize("camera_name", camera_names) -def test_convert_geometry_mock(camera_name, rot): +@pytest.mark.parametrize("rot", [3]) +def test_convert_geometry_mock(camera_geometry, rot): """here we use a different key for the back conversion to trigger the mock conversion """ - - geom = CameraGeometry.from_name(camera_name) + geom = camera_geometry image = create_mock_image(geom) hillas_0 = hillas_parameters(geom, image) @@ -106,22 +98,3 @@ def test_convert_geometry_mock(camera_name, rot): hillas_1 = hillas_parameters(geom, image1d) assert np.abs(hillas_1.phi - hillas_0.phi).deg < 1.0 - - -# def plot_cam(geom, geom2d, geom1d, image, image2d, image1d): -# # plt.viridis() -# plt.figure(figsize=(12, 4)) -# ax = plt.subplot(1, 3, 1) -# CameraDisplay(geom, image=image).add_colorbar() -# plt.subplot(1, 3, 2, sharex=ax, sharey=ax) -# CameraDisplay(geom2d, image=image2d).add_colorbar() -# plt.subplot(1, 3, 3, sharex=ax, sharey=ax) -# CameraDisplay(geom1d, image=image1d).add_colorbar() -# -# -# if __name__ == "__main__": -# import logging -# logging.basicConfig(level=logging.DEBUG) -# for camera_name in CameraGeometry.get_known_camera_names(): -# test_convert_geometry(camera_name, 3) -# plt.show() diff --git a/ctapipe/instrument/camera/tests/test_description.py b/ctapipe/instrument/camera/tests/test_description.py index 7cfcf578f9b..0761b03790f 100644 --- a/ctapipe/instrument/camera/tests/test_description.py +++ b/ctapipe/instrument/camera/tests/test_description.py @@ -1,14 +1,6 @@ from ctapipe.instrument import CameraDescription -def test_known_camera_names(camera_geometries): +def test_known_camera_names(camera_geometry): """ Check that we can get a list of known camera names """ - cams = CameraDescription.get_known_camera_names() - assert len(cams) > 4 - assert "FlashCam" in cams - assert "NectarCam" in cams - - # TODO: Requires camreadout files to be generated - # for cam in cams: - # camera = CameraDescription.from_name(cam) - # camera.info() + assert camera_geometry.camera_name in CameraDescription.get_known_camera_names() diff --git a/ctapipe/instrument/camera/tests/test_geometry.py b/ctapipe/instrument/camera/tests/test_geometry.py index c54ba7747d8..eb8e3adb688 100644 --- a/ctapipe/instrument/camera/tests/test_geometry.py +++ b/ctapipe/instrument/camera/tests/test_geometry.py @@ -1,11 +1,9 @@ """ Tests for CameraGeometry """ import numpy as np from astropy import units as u -from ctapipe.instrument import CameraDescription, CameraGeometry, PixelShape +from ctapipe.instrument import CameraGeometry, PixelShape import pytest -camera_names = CameraDescription.get_known_camera_names() - def test_construct(): """ Check we can make a CameraGeometry from scratch """ @@ -93,15 +91,13 @@ def test_find_neighbor_pixels(): assert set(neigh[11]) == {16, 6, 10, 12} -@pytest.mark.parametrize("camera_name", camera_names) -def test_neighbor_pixels(camera_name): +def test_neighbor_pixels(camera_geometry): """ test if each camera has a reasonable number of neighbor pixels (4 for rectangular, and 6 for hexagonal. Other than edge pixels, the majority should have the same value """ - - geom = CameraGeometry.from_name(camera_name) + geom = camera_geometry n_pix = len(geom.pix_id) n_neighbors = [len(x) for x in geom.neighbors] @@ -116,7 +112,7 @@ def test_neighbor_pixels(camera_name): # whipple has inhomogenious pixels that mess with pixel neighborhood # calculation - if camera_name != "Whipple490": + if not geom.camera_name.startswith("Whipple"): assert np.all(geom.neighbor_matrix == geom.neighbor_matrix.T) assert n_neighbors.count(1) == 0 # no pixel should have a single neighbor @@ -229,15 +225,13 @@ def test_slicing(): assert len(sliced2.pix_x) == 5 -@pytest.mark.parametrize("camera_name", camera_names) -def test_slicing_rotation(camera_name): +def test_slicing_rotation(camera_geometry): """ Check that we can rotate and slice """ - cam = CameraGeometry.from_name(camera_name) - cam.rotate("25d") + camera_geometry.rotate("25d") - sliced1 = cam[5:10] + sliced1 = camera_geometry[5:10] - assert sliced1.pix_x[0] == cam.pix_x[5] + assert sliced1.pix_x[0] == camera_geometry.pix_x[5] def test_rectangle_patch_neighbors(): @@ -296,19 +290,17 @@ def test_hashing(): assert len(set([cam1, cam2, cam3])) == 2 -@pytest.mark.parametrize("camera_name", camera_names) -def test_camera_from_name(camera_name): +def test_camera_from_name(camera_geometry): """ check we can construct all cameras from name""" - camera = CameraGeometry.from_name(camera_name) - assert str(camera) == camera_name + camera = CameraGeometry.from_name(camera_geometry.camera_name) + assert str(camera) == camera_geometry.camera_name -@pytest.mark.parametrize("camera_name", camera_names) -def test_camera_coordinate_transform(camera_name): +def test_camera_coordinate_transform(camera_geometry): """test conversion of the coordinates stored in a camera frame""" from ctapipe.coordinates import EngineeringCameraFrame, CameraFrame, TelescopeFrame - geom = CameraGeometry.from_name(camera_name) + geom = camera_geometry trans_geom = geom.transform_to(EngineeringCameraFrame()) unit = geom.pix_x.unit diff --git a/ctapipe/instrument/camera/tests/test_readout.py b/ctapipe/instrument/camera/tests/test_readout.py index 1982d8af104..3445e49fae4 100644 --- a/ctapipe/instrument/camera/tests/test_readout.py +++ b/ctapipe/instrument/camera/tests/test_readout.py @@ -1,11 +1,9 @@ """ Tests for CameraGeometry """ import numpy as np from astropy import units as u -from ctapipe.instrument import CameraDescription, CameraReadout +from ctapipe.instrument import CameraReadout import pytest -camera_names = CameraDescription.get_known_camera_names() - def test_construct(): """ Check we can make a CameraReadout from scratch """ @@ -142,14 +140,13 @@ def test_hashing(): assert len({readout1, readout2, readout3}) == 2 -@pytest.mark.parametrize("camera_name", camera_names) -def test_camera_from_name(camera_name): +def test_camera_from_name(camera_geometry): """ check we can construct all cameras from name""" try: - camera = CameraReadout.from_name(camera_name) - assert str(camera) == camera_name + camera = CameraReadout.from_name(camera_geometry.camera_name) + assert str(camera) == camera_geometry.camera_name except FileNotFoundError: - # these don't have readout definitions on the dataserver - if camera_name not in ["MAGICCam", "Whipple109", "FACT"]: + # Most non-cta cameras don't have readout provided on the data server + if camera_geometry.camera_name in ["LSTCam", "NectarCam", "FlashCam", "CHEC"]: raise diff --git a/ctapipe/instrument/tests/test_telescope.py b/ctapipe/instrument/tests/test_telescope.py index 7faa74755ee..4be192bd18f 100644 --- a/ctapipe/instrument/tests/test_telescope.py +++ b/ctapipe/instrument/tests/test_telescope.py @@ -29,17 +29,13 @@ def test_hash(): assert len(set(telescopes)) == 3 -OPTICS_NAMES = OpticsDescription.get_known_optics_names() -CAMERA_NAMES = CameraDescription.get_known_camera_names() - - -@pytest.mark.parametrize("camera_name", CAMERA_NAMES) -@pytest.mark.parametrize("optics_name", OPTICS_NAMES) -def test_telescope_from_name(optics_name, camera_name): +@pytest.mark.parametrize("optics_name", ["LST", "MST"]) +def test_telescope_from_name(optics_name, camera_geometry): """ Check we can construct all telescopes from their names """ + camera_name = camera_geometry.camera_name tel = TelescopeDescription.from_name(optics_name, camera_name) assert optics_name in str(tel) assert camera_name in str(tel) assert tel.camera.geometry.pix_x.shape[0] > 0 assert tel.optics.equivalent_focal_length.to("m") > 0 - assert tel.type in ["MST", "SST", "LST", "UNKNOWN"] + assert tel.type in {"MST", "SST", "LST", "UNKNOWN"} diff --git a/ctapipe/io/tests/test_hdf5.py b/ctapipe/io/tests/test_hdf5.py index 48f2c1f234a..bdf69d3ba2e 100644 --- a/ctapipe/io/tests/test_hdf5.py +++ b/ctapipe/io/tests/test_hdf5.py @@ -1,5 +1,3 @@ -import tempfile - import enum import numpy as np import pytest @@ -22,34 +20,31 @@ @pytest.fixture(scope="session") -def temp_h5_file(tmpdir_factory): - """a fixture that fetches a temporary output dir/file for a test - file that we want to read or write (so it doesn't clutter up the test - directory when the automated tests are run)""" - return str(tmpdir_factory.mktemp("data").join("test.h5")) - +def test_h5_file(tmp_path_factory): + """Test hdf5 file with some tables for the reader tests""" + path = tmp_path_factory.mktemp("hdf5") / "test.h5" -def test_write_container(temp_h5_file): - r0tel = R0CameraContainer() - simshower = SimulatedShowerContainer() - simshower.reset() - r0tel.waveform = np.random.uniform(size=(50, 10)) - r0tel.meta["test_attribute"] = 3.14159 - r0tel.meta["date"] = "2020-10-10" + r0 = R0CameraContainer() + shower = SimulatedShowerContainer() + r0.waveform = np.random.uniform(size=(50, 10)) + r0.meta["test_attribute"] = 3.14159 + r0.meta["date"] = "2020-10-10" with HDF5TableWriter( - temp_h5_file, group_name="R0", filters=tables.Filters(complevel=7) + path, group_name="R0", filters=tables.Filters(complevel=7) ) as writer: - for ii in range(100): - r0tel.waveform[:] = np.random.uniform(size=(50, 10)) - simshower.energy = 10 ** np.random.uniform(1, 2) * u.TeV - simshower.core_x = np.random.uniform(-1, 1) * u.m - simshower.core_y = np.random.uniform(-1, 1) * u.m + for _ in range(100): + r0.waveform[:] = np.random.uniform(size=(50, 10)) + shower.energy = 10 ** np.random.uniform(1, 2) * u.TeV + shower.core_x = np.random.uniform(-1, 1) * u.m + shower.core_y = np.random.uniform(-1, 1) * u.m + + writer.write("tel_001", r0) + writer.write("tel_002", r0) # write a second table too + writer.write("sim_shower", shower) - writer.write("tel_001", r0tel) - writer.write("tel_002", r0tel) # write a second table too - writer.write("sim_shower", simshower) + return path def test_append_container(tmp_path): @@ -72,7 +67,8 @@ def test_append_container(tmp_path): assert np.all(table["event_id"] == np.tile(np.arange(10), 2)) -def test_read_multiple_containers(): +def test_read_multiple_containers(tmp_path): + path = tmp_path / "test_append.h5" hillas_parameter_container = HillasParametersContainer( x=1 * u.m, y=1 * u.m, length=1 * u.m, width=1 * u.m ) @@ -83,54 +79,55 @@ def test_read_multiple_containers(): intensity_width_1=0.1, intensity_width_2=0.1, ) - with tempfile.NamedTemporaryFile() as f: - with HDF5TableWriter(f.name, group_name="dl1", add_prefix=True) as writer: - writer.write("params", [hillas_parameter_container, leakage_container]) - - df = pd.read_hdf(f.name, key="/dl1/params") - assert "hillas_x" in df.columns - assert "leakage_pixels_width_1" in df.columns - - # test reading both containers separately - with HDF5TableReader(f.name) as reader: - generator = reader.read( - "/dl1/params", HillasParametersContainer(), prefixes=True - ) - hillas = next(generator) - for value, read_value in zip( - hillas_parameter_container.as_dict().values(), hillas.as_dict().values() - ): - np.testing.assert_equal(value, read_value) - - with HDF5TableReader(f.name) as reader: - generator = reader.read("/dl1/params", LeakageContainer(), prefixes=True) - leakage = next(generator) - for value, read_value in zip( - leakage_container.as_dict().values(), leakage.as_dict().values() - ): - np.testing.assert_equal(value, read_value) - - # test reading both containers simultaneously - with HDF5TableReader(f.name) as reader: - generator = reader.read( - "/dl1/params", - [HillasParametersContainer(), LeakageContainer()], - prefixes=True, - ) - hillas_, leakage_ = next(generator) - - for value, read_value in zip( - leakage_container.as_dict().values(), leakage_.as_dict().values() - ): - np.testing.assert_equal(value, read_value) - - for value, read_value in zip( - hillas_parameter_container.as_dict().values(), hillas_.as_dict().values() - ): - np.testing.assert_equal(value, read_value) - - -def test_read_without_prefixes(): + with HDF5TableWriter(path, group_name="dl1", add_prefix=True) as writer: + writer.write("params", [hillas_parameter_container, leakage_container]) + + df = pd.read_hdf(path, key="/dl1/params") + assert "hillas_x" in df.columns + assert "leakage_pixels_width_1" in df.columns + + # test reading both containers separately + with HDF5TableReader(path) as reader: + generator = reader.read( + "/dl1/params", HillasParametersContainer(), prefixes=True + ) + hillas = next(generator) + for value, read_value in zip( + hillas_parameter_container.as_dict().values(), hillas.as_dict().values() + ): + np.testing.assert_equal(value, read_value) + + with HDF5TableReader(path) as reader: + generator = reader.read("/dl1/params", LeakageContainer(), prefixes=True) + leakage = next(generator) + for value, read_value in zip( + leakage_container.as_dict().values(), leakage.as_dict().values() + ): + np.testing.assert_equal(value, read_value) + + # test reading both containers simultaneously + with HDF5TableReader(path) as reader: + generator = reader.read( + "/dl1/params", + [HillasParametersContainer(), LeakageContainer()], + prefixes=True, + ) + hillas_, leakage_ = next(generator) + + for value, read_value in zip( + leakage_container.as_dict().values(), leakage_.as_dict().values() + ): + np.testing.assert_equal(value, read_value) + + for value, read_value in zip( + hillas_parameter_container.as_dict().values(), hillas_.as_dict().values() + ): + np.testing.assert_equal(value, read_value) + + +def test_read_without_prefixes(tmp_path): + path = tmp_path / "test.h5" + hillas_parameter_container = HillasParametersContainer( x=1 * u.m, y=1 * u.m, length=1 * u.m, width=1 * u.m ) @@ -141,54 +138,56 @@ def test_read_without_prefixes(): intensity_width_1=0.1, intensity_width_2=0.1, ) - with tempfile.NamedTemporaryFile() as f: - with HDF5TableWriter(f.name, group_name="dl1", add_prefix=False) as writer: - writer.write("params", [hillas_parameter_container, leakage_container]) - - df = pd.read_hdf(f.name, key="/dl1/params") - assert "x" in df.columns - assert "pixels_width_1" in df.columns - - # call with prefixes=False - with HDF5TableReader(f.name) as reader: - generator = reader.read( - "/dl1/params", - [HillasParametersContainer(), LeakageContainer()], - prefixes=False, - ) - hillas_, leakage_ = next(generator) - - for value, read_value in zip( - leakage_container.as_dict().values(), leakage_.as_dict().values() - ): - np.testing.assert_equal(value, read_value) - - for value, read_value in zip( - hillas_parameter_container.as_dict().values(), hillas_.as_dict().values() - ): - np.testing.assert_equal(value, read_value) - - # call with manually removed prefixes - with HDF5TableReader(f.name) as reader: - generator = reader.read( - "/dl1/params", - [HillasParametersContainer(prefix=""), LeakageContainer(prefix="")], - prefixes=True, - ) - hillas_, leakage_ = next(generator) - - for value, read_value in zip( - leakage_container.as_dict().values(), leakage_.as_dict().values() - ): - np.testing.assert_equal(value, read_value) - - for value, read_value in zip( - hillas_parameter_container.as_dict().values(), hillas_.as_dict().values() - ): - np.testing.assert_equal(value, read_value) - - -def test_read_duplicated_container_types(): + + with HDF5TableWriter(path, group_name="dl1", add_prefix=False) as writer: + writer.write("params", [hillas_parameter_container, leakage_container]) + + df = pd.read_hdf(path, key="/dl1/params") + assert "x" in df.columns + assert "pixels_width_1" in df.columns + + # call with prefixes=False + with HDF5TableReader(path) as reader: + generator = reader.read( + "/dl1/params", + [HillasParametersContainer(), LeakageContainer()], + prefixes=False, + ) + hillas_, leakage_ = next(generator) + + for value, read_value in zip( + leakage_container.as_dict().values(), leakage_.as_dict().values() + ): + np.testing.assert_equal(value, read_value) + + for value, read_value in zip( + hillas_parameter_container.as_dict().values(), hillas_.as_dict().values() + ): + np.testing.assert_equal(value, read_value) + + # call with manually removed prefixes + with HDF5TableReader(path) as reader: + generator = reader.read( + "/dl1/params", + [HillasParametersContainer(prefix=""), LeakageContainer(prefix="")], + prefixes=True, + ) + hillas_, leakage_ = next(generator) + + for value, read_value in zip( + leakage_container.as_dict().values(), leakage_.as_dict().values() + ): + np.testing.assert_equal(value, read_value) + + for value, read_value in zip( + hillas_parameter_container.as_dict().values(), hillas_.as_dict().values() + ): + np.testing.assert_equal(value, read_value) + + +def test_read_duplicated_container_types(tmp_path): + path = tmp_path / "test.h5" + hillas_config_1 = HillasParametersContainer( x=1 * u.m, y=2 * u.m, length=3 * u.m, width=4 * u.m, prefix="hillas_1" ) @@ -196,55 +195,57 @@ def test_read_duplicated_container_types(): x=2 * u.m, y=3 * u.m, length=4 * u.m, width=5 * u.m, prefix="hillas_2" ) - with tempfile.NamedTemporaryFile() as f: - with HDF5TableWriter(f.name, group_name="dl1", add_prefix=True) as writer: - writer.write("params", [hillas_config_1, hillas_config_2]) + with HDF5TableWriter(path, group_name="dl1", add_prefix=True) as writer: + writer.write("params", [hillas_config_1, hillas_config_2]) - df = pd.read_hdf(f.name, key="/dl1/params") - assert "hillas_1_x" in df.columns - assert "hillas_2_x" in df.columns + df = pd.read_hdf(path, key="/dl1/params") + assert "hillas_1_x" in df.columns + assert "hillas_2_x" in df.columns - with HDF5TableReader(f.name) as reader: - generator = reader.read( - "/dl1/params", - [HillasParametersContainer(), HillasParametersContainer()], - prefixes=["hillas_1", "hillas_2"], - ) - hillas_1, hillas_2 = next(generator) + with HDF5TableReader(path) as reader: + generator = reader.read( + "/dl1/params", + [HillasParametersContainer(), HillasParametersContainer()], + prefixes=["hillas_1", "hillas_2"], + ) + hillas_1, hillas_2 = next(generator) - for value, read_value in zip( - hillas_config_1.as_dict().values(), hillas_1.as_dict().values() - ): - np.testing.assert_equal(value, read_value) + for value, read_value in zip( + hillas_config_1.as_dict().values(), hillas_1.as_dict().values() + ): + np.testing.assert_equal(value, read_value) - for value, read_value in zip( - hillas_config_2.as_dict().values(), hillas_2.as_dict().values() - ): - np.testing.assert_equal(value, read_value) + for value, read_value in zip( + hillas_config_2.as_dict().values(), hillas_2.as_dict().values() + ): + np.testing.assert_equal(value, read_value) -def test_custom_prefix(): +def test_custom_prefix(tmp_path): + path = tmp_path / "test.h5" + container = HillasParametersContainer( x=1 * u.m, y=1 * u.m, length=1 * u.m, width=1 * u.m ) container.prefix = "custom" - with tempfile.NamedTemporaryFile() as f: - with HDF5TableWriter(f.name, group_name="dl1", add_prefix=True) as writer: - writer.write("params", container) - - with HDF5TableReader(f.name) as reader: - generator = reader.read( - "/dl1/params", HillasParametersContainer(), prefixes="custom" - ) - read_container = next(generator) - assert isinstance(read_container, HillasParametersContainer) - for value, read_value in zip( - container.as_dict().values(), read_container.as_dict().values() - ): - np.testing.assert_equal(value, read_value) - - -def test_units(): + with HDF5TableWriter(path, group_name="dl1", add_prefix=True) as writer: + writer.write("params", container) + + with HDF5TableReader(path) as reader: + generator = reader.read( + "/dl1/params", HillasParametersContainer(), prefixes="custom" + ) + read_container = next(generator) + assert isinstance(read_container, HillasParametersContainer) + for value, read_value in zip( + container.as_dict().values(), read_container.as_dict().values() + ): + np.testing.assert_equal(value, read_value) + + +def test_units(tmp_path): + path = tmp_path / "test.h5" + class WithUnits(Container): inverse_length = Field(5 / u.m, "foo") time = Field(1 * u.s, "bar", unit=u.s) @@ -252,18 +253,16 @@ class WithUnits(Container): c = WithUnits() - with tempfile.NamedTemporaryFile() as f: - with HDF5TableWriter(f.name, "data") as writer: - writer.write("units", c) - - with tables.open_file(f.name, "r") as f: + with HDF5TableWriter(path, "data") as writer: + writer.write("units", c) - assert f.root.data.units.attrs["inverse_length_UNIT"] == "m-1" - assert f.root.data.units.attrs["time_UNIT"] == "s" - assert f.root.data.units.attrs["grammage_UNIT"] == "cm-2 g" + with tables.open_file(path, "r") as f: + assert f.root.data.units.attrs["inverse_length_UNIT"] == "m-1" + assert f.root.data.units.attrs["time_UNIT"] == "s" + assert f.root.data.units.attrs["grammage_UNIT"] == "cm-2 g" -def test_write_containers(temp_h5_file): +def test_write_containers(tmp_path): class C1(Container): a = Field(None, "a") b = Field(None, "b") @@ -272,62 +271,61 @@ class C2(Container): c = Field(None, "c") d = Field(None, "d") - with tempfile.NamedTemporaryFile() as f: - with HDF5TableWriter(f.name, "test") as writer: - for i in range(20): - c1 = C1() - c2 = C2() - c1.a, c1.b, c2.c, c2.d = np.random.normal(size=4) - c1.b = np.random.normal() + with HDF5TableWriter(tmp_path / "test.h5", "test") as writer: + for _ in range(20): + c1 = C1() + c2 = C2() + c1.a, c1.b, c2.c, c2.d = np.random.normal(size=4) + writer.write("tel_001", [c1, c2]) - writer.write("tel_001", [c1, c2]) +def test_write_bool(tmp_path): + path = tmp_path / "test.h5" -def test_write_bool(): class C(Container): boolean = Field(True, "Boolean value") - with tempfile.NamedTemporaryFile() as f: - with HDF5TableWriter(f.name, "test") as writer: - for i in range(2): - c = C(boolean=(i % 2 == 0)) - writer.write("c", c) + with HDF5TableWriter(path, "test") as writer: + for i in range(2): + c = C(boolean=(i % 2 == 0)) + writer.write("c", c) - c = C() - with HDF5TableReader(f.name) as reader: - c_reader = reader.read("/test/c", c) - for i in range(2): - cur = next(c_reader) - expected = (i % 2) == 0 - assert isinstance(cur.boolean, np.bool_) - assert cur.boolean == expected + c = C() + with HDF5TableReader(path) as reader: + c_reader = reader.read("/test/c", c) + for i in range(2): + cur = next(c_reader) + expected = (i % 2) == 0 + assert isinstance(cur.boolean, np.bool_) + assert cur.boolean == expected -def test_write_large_integer(): +def test_write_large_integer(tmp_path): + path = tmp_path / "test.h5" + class C(Container): value = Field(True, "Integer value") exps = [15, 31, 63] - with tempfile.NamedTemporaryFile() as f: - with HDF5TableWriter(f.name, "test") as writer: - for exp in exps: - c = C(value=2 ** exp - 1) - writer.write("c", c) + with HDF5TableWriter(path, "test") as writer: + for exp in exps: + c = C(value=2 ** exp - 1) + writer.write("c", c) - c = C() - with HDF5TableReader(f.name) as reader: - c_reader = reader.read("/test/c", c) - for exp in exps: - cur = next(c_reader) - assert cur.value == 2 ** exp - 1 + c = C() + with HDF5TableReader(path) as reader: + c_reader = reader.read("/test/c", c) + for exp in exps: + cur = next(c_reader) + assert cur.value == 2 ** exp - 1 -def test_read_container(temp_h5_file): +def test_read_container(test_h5_file): r0tel1 = R0CameraContainer() r0tel2 = R0CameraContainer() sim_shower = SimulatedShowerContainer() - with HDF5TableReader(temp_h5_file) as reader: + with HDF5TableReader(test_h5_file) as reader: # get the generators for each table # test supplying a single container as well as an @@ -337,7 +335,7 @@ def test_read_container(temp_h5_file): r0tab2 = reader.read("/R0/tel_002", r0tel2) # read all 3 tables in sync - for ii in range(3): + for _ in range(3): m = next(simtab)[0] r0_1 = next(r0tab1) @@ -352,56 +350,51 @@ def test_read_container(temp_h5_file): assert r0_1.meta["date"] == "2020-10-10" -def test_read_whole_table(temp_h5_file): - +def test_read_whole_table(test_h5_file): sim_shower = SimulatedShowerContainer() - with HDF5TableReader(temp_h5_file) as reader: - + with HDF5TableReader(test_h5_file) as reader: for cont in reader.read("/R0/sim_shower", sim_shower): print(cont) -def test_with_context_writer(temp_h5_file): +def test_with_context_writer(tmp_path): + path = tmp_path / "test.h5" + class C1(Container): a = Field("a", None) b = Field("b", None) - with tempfile.NamedTemporaryFile() as f: - - with HDF5TableWriter(f.name, "test") as h5_table: + with HDF5TableWriter(path, "test") as h5_table: - for i in range(5): - c1 = C1() - c1.a, c1.b = np.random.normal(size=2) + for i in range(5): + c1 = C1() + c1.a, c1.b = np.random.normal(size=2) - h5_table.write("tel_001", c1) + h5_table.write("tel_001", c1) -def test_writer_closes_file(temp_h5_file): +def test_writer_closes_file(tmp_path): - with tempfile.NamedTemporaryFile() as f: - with HDF5TableWriter(f.name, "test") as h5_table: - - assert h5_table.h5file.isopen == 1 + with HDF5TableWriter(tmp_path / "test.h5", "test") as h5_table: + assert h5_table.h5file.isopen == 1 assert h5_table.h5file.isopen == 0 -def test_reader_closes_file(temp_h5_file): - - with HDF5TableReader(temp_h5_file) as h5_table: +def test_reader_closes_file(test_h5_file): + with HDF5TableReader(test_h5_file) as h5_table: assert h5_table._h5file.isopen == 1 assert h5_table._h5file.isopen == 0 -def test_with_context_reader(temp_h5_file): +def test_with_context_reader(test_h5_file): sim_shower = SimulatedShowerContainer() - with HDF5TableReader(temp_h5_file) as h5_table: + with HDF5TableReader(test_h5_file) as h5_table: assert h5_table._h5file.isopen == 1 @@ -411,107 +404,89 @@ def test_with_context_reader(temp_h5_file): assert h5_table._h5file.isopen == 0 -def test_closing_reader(temp_h5_file): +def test_closing_reader(test_h5_file): - f = HDF5TableReader(temp_h5_file) + f = HDF5TableReader(test_h5_file) f.close() assert f._h5file.isopen == 0 -def test_closing_writer(temp_h5_file): - - with tempfile.NamedTemporaryFile() as f: - h5_table = HDF5TableWriter(f.name, "test") - h5_table.close() +def test_closing_writer(tmp_path): - assert h5_table.h5file.isopen == 0 + h5_table = HDF5TableWriter(tmp_path / "test.h5", "test") + h5_table.close() + assert h5_table.h5file.isopen == 0 -def test_cannot_read_with_writer(temp_h5_file): +def test_cannot_read_with_writer(tmp_path): with pytest.raises(IOError): - - with HDF5TableWriter(temp_h5_file, "test", mode="r"): + with HDF5TableWriter(tmp_path / "test.h5", "test", mode="r"): pass -def test_cannot_write_with_reader(temp_h5_file): - - with HDF5TableReader(temp_h5_file, mode="w") as h5: +def test_cannot_write_with_reader(test_h5_file): + with HDF5TableReader(test_h5_file, mode="w") as h5: assert h5._h5file.mode == "r" -def test_cannot_append_with_reader(temp_h5_file): - - with HDF5TableReader(temp_h5_file, mode="a") as h5: +def test_cannot_append_with_reader(test_h5_file): + with HDF5TableReader(test_h5_file, mode="a") as h5: assert h5._h5file.mode == "r" -def test_cannot_r_plus_with_reader(temp_h5_file): - - with HDF5TableReader(temp_h5_file, mode="r+") as h5: +def test_cannot_r_plus_with_reader(test_h5_file): + with HDF5TableReader(test_h5_file, mode="r+") as h5: assert h5._h5file.mode == "r" -def test_append_mode(temp_h5_file): - class ContainerA(Container): +def test_append_mode(tmp_path): + path = tmp_path / "test.h5" + class ContainerA(Container): a = Field(int) - a = ContainerA() - a.a = 1 + a = ContainerA(a=1) # First open with 'w' mode to clear the file and add a Container - with HDF5TableWriter(temp_h5_file, "group") as h5: - + with HDF5TableWriter(path, "group") as h5: h5.write("table_1", a) # Try to append A again - with HDF5TableWriter(temp_h5_file, "group", mode="a") as h5: - + with HDF5TableWriter(path, "group", mode="a") as h5: h5.write("table_2", a) # Check if file has two tables with a = 1 - with HDF5TableReader(temp_h5_file) as h5: + with HDF5TableReader(path) as h5: for a in h5.read("/group/table_1", ContainerA()): - assert a.a == 1 for a in h5.read("/group/table_2", ContainerA()): - assert a.a == 1 -def test_write_to_any_location(temp_h5_file): - +def test_write_to_any_location(tmp_path): + path = tmp_path / "test.h5" loc = "path/path_1" class ContainerA(Container): - a = Field(0, "some integer field") - a = ContainerA() - a.a = 1 - - with HDF5TableWriter(temp_h5_file, group_name=loc + "/group_1") as h5: + a = ContainerA(a=1) + with HDF5TableWriter(path, group_name=loc + "/group_1") as h5: for _ in range(5): - h5.write("table", a) h5.write("deeper/table2", a) - with HDF5TableReader(temp_h5_file) as h5: - + with HDF5TableReader(path) as h5: for a in h5.read("/" + loc + "/group_1/table", ContainerA()): - assert a.a == 1 - with HDF5TableReader(temp_h5_file) as h5: - + with HDF5TableReader(path) as h5: for a in h5.read("/" + loc + "/group_1/deeper/table2", ContainerA()): - assert a.a == 1 @@ -715,39 +690,56 @@ class TimeContainer(Container): assert (data.time - time).to(u.s).value < 1e-7 -def test_filters(): +def test_filters(tmp_path): from tables import Filters, open_file + path = tmp_path / "test_time.hdf5" + class TestContainer(Container): value = Field(-1, "test") no_comp = Filters(complevel=0) zstd = Filters(complevel=5, complib="blosc:zstd") - with tempfile.NamedTemporaryFile(suffix=".hdf5") as f: - with HDF5TableWriter( - f.name, group_name="data", mode="w", filters=no_comp - ) as writer: - assert writer.h5file.filters.complevel == 0 + with HDF5TableWriter(path, group_name="data", mode="w", filters=no_comp) as writer: + assert writer.h5file.filters.complevel == 0 - c = TestContainer(value=5) - writer.write("default", c) + c = TestContainer(value=5) + writer.write("default", c) - writer.filters = zstd - writer.write("zstd", c) + writer.filters = zstd + writer.write("zstd", c) - writer.filters = no_comp - writer.write("nocomp", c) + writer.filters = no_comp + writer.write("nocomp", c) - with open_file(f.name) as h5file: - assert h5file.root.data.default.filters.complevel == 0 - assert h5file.root.data.zstd.filters.complevel == 5 - assert h5file.root.data.zstd.filters.complib == "blosc:zstd" - assert h5file.root.data.nocomp.filters.complevel == 0 + with open_file(path) as h5file: + assert h5file.root.data.default.filters.complevel == 0 + assert h5file.root.data.zstd.filters.complevel == 5 + assert h5file.root.data.zstd.filters.complib == "blosc:zstd" + assert h5file.root.data.nocomp.filters.complevel == 0 -def test_column_order(): +def test_column_order_single_container(tmp_path): """ Test that columns are written in the order the containers define them""" + path = tmp_path / "test.h5" + + class Container1(Container): + b = Field(1, "b") + a = Field(2, "a") + + # test with single container + with HDF5TableWriter(path, mode="w") as writer: + c = Container1() + writer.write("foo", c) + + with tables.open_file(path, "r") as f: + assert f.root.foo[:].dtype.names == ("b", "a") + + +def test_column_order_multiple_containers(tmp_path): + """ Test that columns are written in the order the containers define them""" + path = tmp_path / "test.h5" class Container1(Container): b = Field(1, "b") @@ -757,36 +749,27 @@ class Container2(Container): d = Field(3, "d") c = Field(4, "c") - # test with single container - with tempfile.NamedTemporaryFile(suffix=".hdf5") as f: - with HDF5TableWriter(f.name, mode="w") as writer: - c = Container1() - writer.write("foo", c) - - with tables.open_file(f.name, "r") as f: - assert f.root.foo[:].dtype.names == ("b", "a") - # test with two containers - with tempfile.NamedTemporaryFile(suffix=".hdf5") as f: - with HDF5TableWriter(f.name, mode="w") as writer: - c1 = Container1() - c2 = Container2() - writer.write("foo", [c2, c1]) - writer.write("bar", [c1, c2]) + with HDF5TableWriter(path, mode="w") as writer: + c1 = Container1() + c2 = Container2() + writer.write("foo", [c2, c1]) + writer.write("bar", [c1, c2]) - with tables.open_file(f.name, "r") as f: - assert f.root.foo[:].dtype.names == ("d", "c", "b", "a") - assert f.root.bar[:].dtype.names == ("b", "a", "d", "c") + with tables.open_file(path, "r") as f: + assert f.root.foo[:].dtype.names == ("d", "c", "b", "a") + assert f.root.bar[:].dtype.names == ("b", "a", "d", "c") -def test_writing_nan_defaults(): +def test_writing_nan_defaults(tmp_path): from ctapipe.containers import ImageParametersContainer + path = tmp_path / "test.h5" + params = ImageParametersContainer() - with tempfile.NamedTemporaryFile(suffix=".hdf5") as f: - with HDF5TableWriter(f.name, mode="w") as writer: - writer.write("params", params.values()) + with HDF5TableWriter(path, mode="w") as writer: + writer.write("params", params.values()) ALL_CONTAINERS = [] @@ -800,27 +783,16 @@ def test_writing_nan_defaults(): @pytest.mark.parametrize("cls", ALL_CONTAINERS) -def test_write_default_container(cls): - - with tempfile.NamedTemporaryFile(suffix=".hdf5") as f: - with HDF5TableWriter(f.name, mode="w") as writer: - try: - writer.write("params", cls()) - except ValueError as e: - # some containers do not have writable members, - # only subcontainers. For now, ignore them. - if "cannot create an empty data type" in str(e): - pytest.xfail() - else: - raise +def test_write_default_container(cls, tmp_path): + path = tmp_path / "test.h5" - -if __name__ == "__main__": - - import logging - - logging.basicConfig(level=logging.DEBUG) - - test_write_container("test.h5") - test_read_container("test.h5") - test_read_whole_table("test.h5") + with HDF5TableWriter(path, mode="w") as writer: + try: + writer.write("params", cls()) + except ValueError as e: + # some containers do not have writable members, + # only subcontainers. For now, ignore them. + if "cannot create an empty data type" in str(e): + pytest.xfail() + else: + raise diff --git a/ctapipe/tools/camdemo.py b/ctapipe/tools/camdemo.py index c72408cdfbe..80f8cb994af 100644 --- a/ctapipe/tools/camdemo.py +++ b/ctapipe/tools/camdemo.py @@ -18,11 +18,7 @@ hillas_parameters, HillasParameterizationError, ) -from ctapipe.instrument import ( - TelescopeDescription, - OpticsDescription, - CameraDescription, -) +from ctapipe.instrument import TelescopeDescription, OpticsDescription from ctapipe.visualization import CameraDisplay @@ -43,10 +39,8 @@ class CameraDemo(Tool): "much faster but may cause some draw " "artifacts)", ).tag(config=True) - camera = traits.CaselessStrEnum( - CameraDescription.get_known_camera_names(), - default_value="NectarCam", - help="Name of camera to display", + camera = traits.Unicode( + default_value="NectarCam", help="Name of camera to display" ).tag(config=True) optics = traits.CaselessStrEnum( diff --git a/ctapipe/tools/tests/test_tools.py b/ctapipe/tools/tests/test_tools.py index 4b73cd02e68..41f4ee2dfad 100644 --- a/ctapipe/tools/tests/test_tools.py +++ b/ctapipe/tools/tests/test_tools.py @@ -8,7 +8,6 @@ import matplotlib as mpl -import tempfile import pandas as pd import tables @@ -19,66 +18,73 @@ from pathlib import Path -tmp_dir = tempfile.TemporaryDirectory() GAMMA_TEST_LARGE = get_dataset_path("gamma_test_large.simtel.gz") LST_MUONS = get_dataset_path("lst_muons.simtel.zst") @pytest.fixture(scope="module") -def dl1_image_file(): +def dl1_tmp_path(tmp_path_factory): + return tmp_path_factory.mktemp("dl1") + + +@pytest.fixture(scope="module") +def dl1_image_file(dl1_tmp_path): """ DL1 file containing only images (DL1A) from a gamma simulation set. """ - command = ( - "ctapipe-stage1 " - f"--input {GAMMA_TEST_LARGE} " - f"--output {tmp_dir.name}/images.dl1.h5 " - "--write-images " - "--max-events 20 " - "--allowed-tels=[1,2,3]" - ) - subprocess.call(command.split(), stdout=subprocess.PIPE) - return f"{tmp_dir.name}/images.dl1.h5" + output = dl1_tmp_path / "images.dl1.h5" + command = [ + "ctapipe-stage1", + f"--input={GAMMA_TEST_LARGE}", + f"--output={output}", + "--write-images", + "--max-events=20", + "--allowed-tels=[1,2,3]", + ] + subprocess.run(command, stdout=subprocess.PIPE, check=True) + return output @pytest.fixture(scope="module") -def dl1_parameters_file(): +def dl1_parameters_file(dl1_tmp_path): """ DL1 File containing only parameters (DL1B) from a gamma simulation set. """ - command = ( - "ctapipe-stage1 " - f"--input {GAMMA_TEST_LARGE} " - f"--output {tmp_dir.name}/parameters.dl1.h5 " - "--write-parameters " - "--max-events 20 " - "--allowed-tels=[1,2,3]" - ) - subprocess.call(command.split(), stdout=subprocess.PIPE) - return f"{tmp_dir.name}/parameters.dl1.h5" + output = dl1_tmp_path / "parameters.dl1.h5" + command = [ + "ctapipe-stage1", + f"--input={GAMMA_TEST_LARGE}", + f"--output={output}", + "--write-parameters", + "--max-events=20", + "--allowed-tels=[1,2,3]", + ] + subprocess.run(command, stdout=subprocess.PIPE, check=True) + return output @pytest.fixture(scope="module") -def dl1_muon_file(): +def dl1_muon_file(dl1_tmp_path): """ DL1 file containing only images from a muon simulation set. """ - command = ( - "ctapipe-stage1 " - f"--input {LST_MUONS} " - f"--output {tmp_dir.name}/muons.dl1.h5 " - "--write-images" - ) - subprocess.call(command.split(), stdout=subprocess.PIPE) - return f"{tmp_dir.name}/muons.dl1.h5" - - -def test_stage_1_dl1(tmpdir, dl1_image_file, dl1_parameters_file): + output = dl1_tmp_path / "muons.dl1.h5" + command = [ + "ctapipe-stage1", + f"--input={LST_MUONS}", + f"--output={output}", + "--write-images", + ] + subprocess.run(command, stdout=subprocess.PIPE, check=True) + return output + + +def test_stage_1_dl1(tmp_path, dl1_image_file, dl1_parameters_file): from ctapipe.tools.stage1 import Stage1Tool config = Path("./examples/stage1_config.json").absolute() # DL1A file as input - dl1b_from_dl1a_file = tmp_dir.name + "/dl1b_from dl1a.dl1.h5" + dl1b_from_dl1a_file = tmp_path / "dl1b_from dl1a.dl1.h5" assert ( run_tool( Stage1Tool(), @@ -89,7 +95,7 @@ def test_stage_1_dl1(tmpdir, dl1_image_file, dl1_parameters_file): "--write-parameters", "--overwrite", ], - cwd=tmpdir, + cwd=tmp_path, ) == 0 ) @@ -134,17 +140,17 @@ def test_stage_1_dl1(tmpdir, dl1_image_file, dl1_parameters_file): argv=[ f"--config={config}", f"--input={dl1_parameters_file}", - f"--output={tmp_dir.name + '/dl1b_from_dl1b.dl1.h5'}", + f"--output={tmp_path}/dl1b_from_dl1b.dl1.h5", "--write-parameters", "--overwrite", ], - cwd=tmpdir, + cwd=tmp_path, ) == 1 ) -def test_stage1_datalevels(tmpdir): +def test_stage1_datalevels(tmp_path): """test the dl1 tool on a file not providing r1, dl0 or dl1a""" from ctapipe.io import EventSource from ctapipe.tools.stage1 import Stage1Tool @@ -175,8 +181,8 @@ def subarray(self): def _generator(self): return None - dummy_file = tmp_dir.name + "/datalevels_dummy.h5" - out_file = tmp_dir.name + "/datalevels_dummy_stage1_output.h5" + dummy_file = tmp_path / "datalevels_dummy.h5" + out_file = tmp_path / "datalevels_dummy_stage1_output.h5" with open(dummy_file, "wb") as f: f.write(b"dummy") f.flush() @@ -194,7 +200,7 @@ def _generator(self): "--write-images", "--overwrite", ], - cwd=tmpdir, + cwd=tmp_path, ) == 1 ) @@ -202,10 +208,10 @@ def _generator(self): assert isinstance(tool.event_source, DummyEventSource) -def test_muon_reconstruction(tmpdir, dl1_muon_file): +def test_muon_reconstruction(tmp_path, dl1_muon_file): from ctapipe.tools.muon_reconstruction import MuonAnalysis - muon_simtel_output_file = tmp_dir.name + "/muon_reco_on_simtel.h5" + muon_simtel_output_file = tmp_path / "muon_reco_on_simtel.h5" assert ( run_tool( MuonAnalysis(), @@ -214,7 +220,7 @@ def test_muon_reconstruction(tmpdir, dl1_muon_file): f"--output={muon_simtel_output_file}", "--overwrite", ], - cwd=tmpdir, + cwd=tmp_path, ) == 0 ) @@ -224,7 +230,7 @@ def test_muon_reconstruction(tmpdir, dl1_muon_file): assert len(table) > 20 assert np.count_nonzero(np.isnan(table["muonring_radius"])) == 0 - muon_dl1_output_file = tmp_dir.name + "/muon_reco_on_dl1a.h5" + muon_dl1_output_file = tmp_path / "muon_reco_on_dl1a.h5" assert ( run_tool( MuonAnalysis(), @@ -233,7 +239,7 @@ def test_muon_reconstruction(tmpdir, dl1_muon_file): f"--output={muon_dl1_output_file}", "--overwrite", ], - cwd=tmpdir, + cwd=tmp_path, ) == 0 ) @@ -246,7 +252,7 @@ def test_muon_reconstruction(tmpdir, dl1_muon_file): assert run_tool(MuonAnalysis(), ["--help-all"]) == 0 -def test_display_summed_images(tmpdir): +def test_display_summed_images(tmp_path): from ctapipe.tools.display_summed_images import ImageSumDisplayerTool mpl.use("Agg") @@ -254,7 +260,7 @@ def test_display_summed_images(tmpdir): run_tool( ImageSumDisplayerTool(), argv=shlex.split(f"--infile={GAMMA_TEST_LARGE} " "--max-events=2 "), - cwd=tmpdir, + cwd=tmp_path, ) == 0 ) @@ -262,7 +268,7 @@ def test_display_summed_images(tmpdir): assert run_tool(ImageSumDisplayerTool(), ["--help-all"]) == 0 -def test_display_integrator(tmpdir): +def test_display_integrator(tmp_path): from ctapipe.tools.display_integrator import DisplayIntegrator mpl.use("Agg") @@ -271,7 +277,7 @@ def test_display_integrator(tmpdir): run_tool( DisplayIntegrator(), argv=shlex.split(f"--f={GAMMA_TEST_LARGE} " "--max_events=1 "), - cwd=tmpdir, + cwd=tmp_path, ) == 0 ) @@ -279,7 +285,7 @@ def test_display_integrator(tmpdir): assert run_tool(DisplayIntegrator(), ["--help-all"]) == 0 -def test_display_events_single_tel(tmpdir): +def test_display_events_single_tel(tmp_path): from ctapipe.tools.display_events_single_tel import SingleTelEventDisplay mpl.use("Agg") @@ -292,7 +298,7 @@ def test_display_events_single_tel(tmpdir): "--tel=11 " "--max-events=2 " # <--- inconsistent!!! ), - cwd=tmpdir, + cwd=tmp_path, ) == 0 ) @@ -300,7 +306,7 @@ def test_display_events_single_tel(tmpdir): assert run_tool(SingleTelEventDisplay(), ["--help-all"]) == 0 -def test_display_dl1(tmpdir, dl1_image_file, dl1_parameters_file): +def test_display_dl1(tmp_path, dl1_image_file, dl1_parameters_file): from ctapipe.tools.display_dl1 import DisplayDL1Calib mpl.use("Agg") @@ -310,7 +316,7 @@ def test_display_dl1(tmpdir, dl1_image_file, dl1_parameters_file): run_tool( DisplayDL1Calib(), argv=shlex.split("--max_events=1 " "--telescope=11 "), - cwd=tmpdir, + cwd=tmp_path, ) == 0 ) @@ -343,46 +349,44 @@ def test_info(): info(show_all=True) -def test_dump_triggers(tmpdir): +def test_dump_triggers(tmp_path): from ctapipe.tools.dump_triggers import DumpTriggersTool sys.argv = ["dump_triggers"] - outfile = tmpdir.join("triggers.fits") + outfile = tmp_path / "triggers.fits" tool = DumpTriggersTool(infile=GAMMA_TEST_LARGE, outfile=str(outfile)) - assert run_tool(tool, cwd=tmpdir) == 0 + assert run_tool(tool, cwd=tmp_path) == 0 assert outfile.exists() assert run_tool(tool, ["--help-all"]) == 0 -def test_dump_instrument(tmpdir): +def test_dump_instrument(tmp_path): from ctapipe.tools.dump_instrument import DumpInstrumentTool sys.argv = ["dump_instrument"] - tmpdir.chdir() - tool = DumpInstrumentTool() - assert run_tool(tool, [f"--input={GAMMA_TEST_LARGE}"], cwd=tmpdir) == 0 - assert tmpdir.join("FlashCam.camgeom.fits.gz").exists() + assert run_tool(tool, [f"--input={GAMMA_TEST_LARGE}"], cwd=tmp_path) == 0 + assert (tmp_path / "FlashCam.camgeom.fits.gz").exists() assert ( - run_tool(tool, [f"--input={GAMMA_TEST_LARGE}", "--format=ecsv"], cwd=tmpdir) + run_tool(tool, [f"--input={GAMMA_TEST_LARGE}", "--format=ecsv"], cwd=tmp_path) == 0 ) - assert tmpdir.join("MonteCarloArray.optics.ecsv.txt").exists() + assert (tmp_path / "MonteCarloArray.optics.ecsv.txt").exists() assert ( - run_tool(tool, [f"--input={GAMMA_TEST_LARGE}", "--format=hdf5"], cwd=tmpdir) + run_tool(tool, [f"--input={GAMMA_TEST_LARGE}", "--format=hdf5"], cwd=tmp_path) == 0 ) - assert tmpdir.join("subarray.h5").exists() + assert (tmp_path / "subarray.h5").exists() - assert run_tool(tool, ["--help-all"], cwd=tmpdir) == 0 + assert run_tool(tool, ["--help-all"], cwd=tmp_path) == 0 -def test_camdemo(tmpdir, camera_geometries): +def test_camdemo(tmp_path): from ctapipe.tools.camdemo import CameraDemo sys.argv = ["camera_demo"] @@ -391,15 +395,15 @@ def test_camdemo(tmpdir, camera_geometries): tool.cleanframes = 2 tool.display = False - assert run_tool(tool, cwd=tmpdir) == 0 + assert run_tool(tool, cwd=tmp_path) == 0 assert run_tool(tool, ["--help-all"]) == 0 -def test_bokeh_file_viewer(tmpdir): +def test_bokeh_file_viewer(tmp_path): from ctapipe.tools.bokeh.file_viewer import BokehFileViewer sys.argv = ["bokeh_file_viewer"] tool = BokehFileViewer(disable_server=True) - assert run_tool(tool, cwd=tmpdir) == 0 + assert run_tool(tool, cwd=tmp_path) == 0 assert tool.reader.input_url == get_dataset_path("gamma_test_large.simtel.gz") assert run_tool(tool, ["--help-all"]) == 0 diff --git a/ctapipe/utils/download.py b/ctapipe/utils/download.py index fb4dd138868..9fb59fa8ff7 100644 --- a/ctapipe/utils/download.py +++ b/ctapipe/utils/download.py @@ -5,6 +5,7 @@ from tqdm.auto import tqdm from urllib.parse import urlparse import time +from contextlib import contextmanager __all__ = ["download_file", "download_cached", "download_file_cached"] @@ -76,38 +77,50 @@ def get_cache_path(url, cache_name="ctapipe", env_override="CTAPIPE_CACHE"): return path +@contextmanager +def file_lock(path): + # if the file already exists, we wait until it does not exist anymore + if path.is_file(): + log.warning("Another download for this file is already running, waiting.") + while path.is_file(): + time.sleep(0.1) + + # create the lock_file file + path.open("w").close() + try: + yield + finally: + path.unlink() + + def download_cached( url, cache_name="ctapipe", auth=None, env_prefix="CTAPIPE_DATA_", progress=False ): path = get_cache_path(url, cache_name=cache_name) path.parent.mkdir(parents=True, exist_ok=True) - part_file = path.with_suffix(path.suffix + ".part") - - if part_file.is_file(): - log.warning("Another download for this file is already running, waiting.") - while part_file.is_file(): - time.sleep(1) - - # if we already dowloaded the file, just use it - if path.is_file(): - log.debug(f"{url} is available in cache.") + lock_file = path.with_suffix(path.suffix + ".lock") + + with file_lock(lock_file): + # if we already dowloaded the file, just use it + if path.is_file(): + log.debug(f"{url} is available in cache.") + return path + + if auth is True: + try: + auth = ( + os.environ[env_prefix + "USER"], + os.environ[env_prefix + "PASSWORD"], + ) + except KeyError: + raise KeyError( + f'You need to set the env variables "{env_prefix}USER"' + f' and "{env_prefix}PASSWORD" to download test files.' + ) from None + + download_file(url=url, path=path, auth=auth, progress=progress) return path - if auth is True: - try: - auth = ( - os.environ[env_prefix + "USER"], - os.environ[env_prefix + "PASSWORD"], - ) - except KeyError: - raise KeyError( - f'You need to set the env variables "{env_prefix}USER"' - f' and "{env_prefix}PASSWORD" to download test files.' - ) from None - - download_file(url=url, path=path, auth=auth, progress=progress) - return path - def download_file_cached( name,