diff --git a/ctapipe/conftest.py b/ctapipe/conftest.py
index d4e8959747f..cf6a7b22956 100644
--- a/ctapipe/conftest.py
+++ b/ctapipe/conftest.py
@@ -11,6 +11,15 @@
from ctapipe.utils import get_dataset_path
from ctapipe.utils.filelock import FileLock
+from pytest_astropy_header.display import PYTEST_HEADER_MODULES
+
+PYTEST_HEADER_MODULES.clear()
+PYTEST_HEADER_MODULES["eventio"] = "eventio"
+PYTEST_HEADER_MODULES["numpy"] = "numpy"
+PYTEST_HEADER_MODULES["scipy"] = "scipy"
+PYTEST_HEADER_MODULES["astropy"] = "astropy"
+PYTEST_HEADER_MODULES["numba"] = "numba"
+
# names of camera geometries available on the data server
camera_names = [
"ASTRICam",
@@ -44,7 +53,8 @@ def _global_example_event():
print("******************** LOAD TEST EVENT ***********************")
- with SimTelEventSource(input_url=filename) as reader:
+ # FIXME: switch to prod5b+ file that contains effective focal length
+ with SimTelEventSource(input_url=filename, focal_length_choice="nominal") as reader:
event = next(iter(reader))
return event
@@ -59,7 +69,7 @@ def example_subarray():
print("******************** LOAD TEST EVENT ***********************")
- with SimTelEventSource(input_url=filename) as reader:
+ with SimTelEventSource(input_url=filename, focal_length_choice="nominal") as reader:
return reader.subarray
@@ -84,7 +94,7 @@ def _subarray_and_event_gamma_off_axis_500_gev():
path = get_dataset_path("lst_prod3_calibration_and_mcphotons.simtel.zst")
- with SimTelEventSource(path) as source:
+ with SimTelEventSource(path, focal_length_choice="nominal") as source:
it = iter(source)
# we want the second event, first event is a corner clipper
next(it)
@@ -353,6 +363,7 @@ def dl1_muon_file(dl1_tmp_path):
"--write-dl1-images",
"--DataWriter.write_dl1_parameters=False",
"--DataWriter.Contact.name=αℓℓ the äüöß",
+ "--SimTelEventSource.focal_length_choice=nominal",
]
assert run_tool(ProcessorTool(), argv=argv, cwd=dl1_tmp_path) == 0
return output
diff --git a/ctapipe/coordinates/__init__.py b/ctapipe/coordinates/__init__.py
index 905d38098bd..eeedb1fd296 100644
--- a/ctapipe/coordinates/__init__.py
+++ b/ctapipe/coordinates/__init__.py
@@ -14,7 +14,12 @@
import warnings
from .telescope_frame import TelescopeFrame
from .nominal_frame import NominalFrame
-from .ground_frames import GroundFrame, TiltedGroundFrame, project_to_ground
+from .ground_frames import (
+ GroundFrame,
+ TiltedGroundFrame,
+ project_to_ground,
+ EastingNorthingFrame,
+)
from .camera_frame import CameraFrame, EngineeringCameraFrame
@@ -25,6 +30,7 @@
"NominalFrame",
"GroundFrame",
"TiltedGroundFrame",
+ "EastingNorthingFrame",
"MissingFrameAttributeWarning",
"altaz_to_righthanded_cartesian",
"project_to_ground",
diff --git a/ctapipe/coordinates/ground_frames.py b/ctapipe/coordinates/ground_frames.py
index 7285607b8c4..8b19bb4dfba 100644
--- a/ctapipe/coordinates/ground_frames.py
+++ b/ctapipe/coordinates/ground_frames.py
@@ -14,21 +14,25 @@
- Tests Tests Tests!
"""
import astropy.units as u
+from astropy.units.quantity import Quantity
import numpy as np
from astropy.coordinates import (
+ AltAz,
BaseCoordinateFrame,
CartesianRepresentation,
- FunctionTransform,
CoordinateAttribute,
- AltAz,
+ FunctionTransform,
+ RepresentationMapping,
+ frame_transform_graph,
+ AffineTransform,
)
-from astropy.coordinates import frame_transform_graph
from numpy import cos, sin
__all__ = [
"GroundFrame",
"TiltedGroundFrame",
"project_to_ground",
+ "EastingNorthingFrame",
]
@@ -47,6 +51,25 @@ class GroundFrame(BaseCoordinateFrame):
default_representation = CartesianRepresentation
+class EastingNorthingFrame(BaseCoordinateFrame):
+ """GroundFrame but in standard Easting/Northing coordinates instead of
+ SimTel/Corsika conventions
+
+ Frame attributes: None
+
+ """
+
+ default_representation = CartesianRepresentation
+
+ frame_specific_representation_info = {
+ CartesianRepresentation: [
+ RepresentationMapping("x", "easting"),
+ RepresentationMapping("y", "northing"),
+ RepresentationMapping("z", "height"),
+ ]
+ }
+
+
class TiltedGroundFrame(BaseCoordinateFrame):
"""Tilted ground coordinate frame. The tilted ground coordinate frame
is a cartesian system describing the 2 dimensional projected
@@ -200,3 +223,34 @@ def project_to_ground(tilt_system):
y_projected = y_initial - trans[2][1] * z_initial / trans[2][2]
return GroundFrame(x=x_projected * unit, y=y_projected * unit, z=0 * unit)
+
+
+@frame_transform_graph.transform(FunctionTransform, GroundFrame, GroundFrame)
+def ground_to_ground(ground_coords, ground_frame):
+ """Null transform for going from ground to ground, since there are no
+ attributes of the GroundSystem"""
+ return ground_coords
+
+
+# Matrices for transforming between GroundFrame and EastingNorthingFrame
+NO_OFFSET = CartesianRepresentation(Quantity([0, 0, 0], u.m))
+GROUND_TO_EASTNORTH = np.asarray([[0, -1, 0], [1, 0, 0], [0, 0, 1]])
+
+
+@frame_transform_graph.transform(AffineTransform, GroundFrame, EastingNorthingFrame)
+def ground_to_easting_northing(ground_coords, eastnorth_frame):
+ """
+ convert GroundFrame points into eastings/northings for plotting purposes
+
+ """
+
+ return GROUND_TO_EASTNORTH, NO_OFFSET
+
+
+@frame_transform_graph.transform(AffineTransform, EastingNorthingFrame, GroundFrame)
+def easting_northing_to_ground(eastnorth_coords, ground_frame):
+ """
+ convert eastings/northings back to GroundFrame
+
+ """
+ return GROUND_TO_EASTNORTH.T, NO_OFFSET
diff --git a/ctapipe/coordinates/tests/test_coordinates.py b/ctapipe/coordinates/tests/test_coordinates.py
index 12d2d82ff2a..cec9630da84 100644
--- a/ctapipe/coordinates/tests/test_coordinates.py
+++ b/ctapipe/coordinates/tests/test_coordinates.py
@@ -72,7 +72,7 @@ def test_telescope_separation():
tel1 = SkyCoord(fov_lon=0 * u.deg, fov_lat=0 * u.deg, frame=telescope_frame)
tel2 = SkyCoord(fov_lon=0 * u.deg, fov_lat=1 * u.deg, frame=telescope_frame)
- assert tel1.separation(tel2) == u.Quantity(1, u.deg)
+ assert u.isclose(tel1.separation(tel2), 1 * u.deg)
def test_separation_is_the_same():
@@ -267,6 +267,7 @@ def test_camera_focal_length_array():
def test_ground_frame_roundtrip():
+ """test transform from sky to ground roundtrip"""
from ctapipe.coordinates import GroundFrame, TiltedGroundFrame
normal = SkyCoord(alt=70 * u.deg, az=0 * u.deg, frame=AltAz())
@@ -275,6 +276,25 @@ def test_ground_frame_roundtrip():
back = tilted.transform_to(GroundFrame())
- assert u.isclose(coord.x, back.x)
- assert u.isclose(coord.y, back.y)
- assert u.isclose(coord.z, back.z)
+ assert u.isclose(coord.x, back.x, atol=1e-12 * u.m)
+ assert u.isclose(coord.y, back.y, atol=1e-12 * u.m)
+ assert u.isclose(coord.z, back.z, atol=1e-12 * u.m)
+
+
+def test_ground_to_eastnorth_roundtrip():
+ """Check Ground to EastingNorthing and the round-trip"""
+ from ctapipe.coordinates import GroundFrame, EastingNorthingFrame
+
+ ground = SkyCoord(
+ x=[1, 2, 3] * u.m, y=[-2, 5, 2] * u.m, z=[1, -1, 2] * u.m, frame=GroundFrame()
+ )
+ eastnorth = ground.transform_to(EastingNorthingFrame())
+ ground2 = eastnorth.transform_to(GroundFrame())
+
+ assert u.isclose(eastnorth.easting, [2, -5, -2] * u.m).all()
+ assert u.isclose(eastnorth.northing, [1, 2, 3] * u.m).all()
+ assert u.isclose(eastnorth.height, [1, -1, 2] * u.m).all()
+
+ assert u.isclose(ground.x, ground2.x).all()
+ assert u.isclose(ground.y, ground2.y).all()
+ assert u.isclose(ground.z, ground2.z).all()
\ No newline at end of file
diff --git a/ctapipe/core/component.py b/ctapipe/core/component.py
index c82cfbb1b82..3f0275fdaff 100644
--- a/ctapipe/core/component.py
+++ b/ctapipe/core/component.py
@@ -2,6 +2,8 @@
from abc import ABCMeta
from inspect import isabstract
from logging import getLogger
+from docutils.core import publish_parts
+from inspect import cleandoc
from traitlets import TraitError
from traitlets.config import Configurable
@@ -219,22 +221,45 @@ def _repr_html_(self):
"""nice HTML rep, with blue for non-default values"""
traits = self.traits()
name = self.__class__.__name__
+ docstring = (
+ publish_parts(cleandoc(self.__class__.__doc__), writer_name="html")[
+ "html_body"
+ ]
+ or "Undocumented"
+ )
lines = [
+ "
",
f"
{name}",
- f"
{self.__class__.__doc__ or 'Undocumented!'}
",
+ f"
{docstring}
",
"
",
+ " ",
+ " ",
+ " ",
+ " ",
+ " ",
+ " ",
]
for key, val in self.get_current_config()[name].items():
+ htmlval = (
+ str(val).replace("/", "/").replace("_", "_")
+ ) # allow breaking at boundary
+
# traits of the current component
if key in traits:
thehelp = f"{traits[key].help} (default: {traits[key].default_value})"
lines.append(f"{key} | ")
if val != traits[key].default_value:
- lines.append(f"{val} | ")
+ lines.append(
+ f"{htmlval} | "
+ )
else:
- lines.append(f"{val} | ")
- lines.append(f'{thehelp} |
')
+ lines.append(f"{htmlval} | ")
+ lines.append(
+ f"{thehelp} | "
+ )
+ lines.append(" ")
lines.append("
")
+ lines.append("
")
return "\n".join(lines)
diff --git a/ctapipe/core/container.py b/ctapipe/core/container.py
index bcd07180fe1..cb1b536cab6 100644
--- a/ctapipe/core/container.py
+++ b/ctapipe/core/container.py
@@ -77,7 +77,9 @@ def __repr__(self):
if self.ndim is not None:
desc += f" as a {self.ndim}-D array"
if self.dtype is not None:
- desc += f" with type {self.dtype}"
+ desc += f" with dtype {self.dtype}"
+ if self.type is not None:
+ desc += f" with type {self.type}"
return desc
@@ -143,7 +145,7 @@ def validate(self, value):
class DeprecatedField(Field):
- """ used to mark which fields may be removed in next version """
+ """used to mark which fields may be removed in next version"""
def __init__(self, default, description="", unit=None, ucd=None, reason=""):
super().__init__(default=default, description=description, unit=unit, ucd=ucd)
diff --git a/ctapipe/core/tool.py b/ctapipe/core/tool.py
index 2d4118bfdb5..b7c23718a46 100644
--- a/ctapipe/core/tool.py
+++ b/ctapipe/core/tool.py
@@ -8,6 +8,8 @@
from abc import abstractmethod
from typing import Union
import yaml
+from docutils.core import publish_parts
+from inspect import cleandoc
try:
import tomli as toml
@@ -401,30 +403,47 @@ def _repr_html_(self):
"""nice HTML rep, with blue for non-default values"""
traits = self.traits()
name = self.__class__.__name__
+ docstring = (
+ publish_parts(
+ cleandoc(self.__class__.__doc__ or self.description), writer_name="html"
+ )["html_body"]
+ or "Undocumented"
+ )
lines = [
f"{name}",
- f" {self.__class__.__doc__ or self.description}
",
+ f" {docstring}
",
"",
+ " ",
+ " ",
+ " ",
+ " ",
+ " ",
+ " ",
]
for key, val in self.get_current_config()[name].items():
- # after running setup, also the subcomponents are in the current config
- # which are not in traits
- if key not in traits:
- continue
-
- default = traits[key].default_value
- thehelp = f"{traits[key].help} (default: {default})"
- lines.append(f"{key} | ")
- if val != default:
- lines.append(f"{val} | ")
- else:
- lines.append(f"{val} | ")
- lines.append(f'{thehelp} |
')
-
+ htmlval = (
+ str(val).replace("/", "/").replace("_", "_")
+ ) # allow breaking at boundary
+
+ # traits of the current component
+ if key in traits:
+ thehelp = f"{traits[key].help} (default: {traits[key].default_value})"
+ lines.append(f"{key} | ")
+ if val != traits[key].default_value:
+ lines.append(
+ f"{htmlval} | "
+ )
+ else:
+ lines.append(f"{htmlval} | ")
+ lines.append(
+ f"{thehelp} |
"
+ )
+ lines.append(" ")
lines.append("
")
- lines.append("Components:")
+ lines.append("
Components:")
lines.append(", ".join([x.__name__ for x in self.classes]))
lines.append("
")
+ lines.append("")
return "\n".join(lines)
diff --git a/ctapipe/image/__init__.py b/ctapipe/image/__init__.py
index 01fbde96eaa..5264cb0e0a3 100644
--- a/ctapipe/image/__init__.py
+++ b/ctapipe/image/__init__.py
@@ -45,7 +45,7 @@
TwoPassWindowSum,
extract_around_peak,
extract_sliding_window,
- neighbor_average_waveform,
+ neighbor_average_maximum,
subtract_baseline,
integration_correction,
)
@@ -110,7 +110,7 @@
"TwoPassWindowSum",
"extract_around_peak",
"extract_sliding_window",
- "neighbor_average_waveform",
+ "neighbor_average_maximum",
"subtract_baseline",
"integration_correction",
"DataVolumeReducer",
diff --git a/ctapipe/image/extractor.py b/ctapipe/image/extractor.py
index 34343bd36c3..a36c35b93da 100644
--- a/ctapipe/image/extractor.py
+++ b/ctapipe/image/extractor.py
@@ -14,7 +14,7 @@
"TwoPassWindowSum",
"extract_around_peak",
"extract_sliding_window",
- "neighbor_average_waveform",
+ "neighbor_average_maximum",
"subtract_baseline",
"integration_correction",
]
@@ -195,7 +195,7 @@ def extract_sliding_window(waveforms, width, sampling_rate_ghz, sum_, peak_time)
@njit(cache=True)
-def neighbor_average_waveform(waveforms, neighbors_indices, neighbors_indptr, lwt):
+def neighbor_average_maximum(waveforms, neighbors_indices, neighbors_indptr, lwt):
"""
Obtain the average waveform built from the neighbors of each pixel
@@ -228,19 +228,18 @@ def neighbor_average_waveform(waveforms, neighbors_indices, neighbors_indptr, lw
# initialize to waveforms weighted with lwt
# so the value of the pixel itself is already taken into account
- average = waveforms * lwt
+ peak_pos = np.empty(n_pixels, dtype=np.int64)
for pixel in prange(n_pixels):
+ average = waveforms[pixel] * lwt
neighbors = indices[indptr[pixel] : indptr[pixel + 1]]
- n = lwt
for neighbor in neighbors:
- average[pixel] += waveforms[neighbor]
- n += 1
+ average += waveforms[neighbor]
- average[pixel] /= n
+ peak_pos[pixel] = np.argmax(average)
- return average
+ return peak_pos
def subtract_baseline(waveforms, baseline_start, baseline_end):
@@ -751,13 +750,12 @@ def _calculate_correction(self, telid):
def __call__(self, waveforms, telid, selected_gain_channel):
neighbors = self.subarray.tel[telid].camera.geometry.neighbor_matrix_sparse
- average_wfs = neighbor_average_waveform(
+ peak_index = neighbor_average_maximum(
waveforms,
neighbors_indices=neighbors.indices,
neighbors_indptr=neighbors.indptr,
lwt=self.lwt.tel[telid],
)
- peak_index = average_wfs.argmax(axis=-1)
charge, peak_time = extract_around_peak(
waveforms,
peak_index,
diff --git a/ctapipe/image/tests/test_extractor.py b/ctapipe/image/tests/test_extractor.py
index 88d7a14d366..fb683ec9240 100644
--- a/ctapipe/image/tests/test_extractor.py
+++ b/ctapipe/image/tests/test_extractor.py
@@ -12,7 +12,7 @@
extract_around_peak,
extract_sliding_window,
integration_correction,
- neighbor_average_waveform,
+ neighbor_average_maximum,
subtract_baseline,
)
from ctapipe.image.toymodel import SkewedGaussian, WaveformModel, obtain_time_image
@@ -191,10 +191,10 @@ def test_extract_around_peak_charge_expected(toymodel):
assert_equal(charge, n_samples)
-def test_neighbor_average_waveform(toymodel):
+def test_neighbor_average_peakpos(toymodel):
waveforms, subarray, telid, _, _, _ = toymodel
neighbors = subarray.tel[telid].camera.geometry.neighbor_matrix_sparse
- average_wf = neighbor_average_waveform(
+ peak_pos = neighbor_average_maximum(
waveforms,
neighbors_indices=neighbors.indices,
neighbors_indptr=neighbors.indptr,
@@ -204,10 +204,11 @@ def test_neighbor_average_waveform(toymodel):
pixel = 0
_, nei_pixel = np.where(neighbors[pixel].A)
expected_average = waveforms[nei_pixel].sum(0) / len(nei_pixel)
- assert_allclose(average_wf[pixel], expected_average, rtol=1e-3)
+ expected_peak_pos = np.argmax(expected_average, axis=-1)
+ assert (peak_pos[pixel] == expected_peak_pos).all()
lwt = 4
- average_wf = neighbor_average_waveform(
+ peak_pos = neighbor_average_maximum(
waveforms,
neighbors_indices=neighbors.indices,
neighbors_indptr=neighbors.indptr,
@@ -218,7 +219,8 @@ def test_neighbor_average_waveform(toymodel):
_, nei_pixel = np.where(neighbors[pixel].A)
nei_pixel = np.concatenate([nei_pixel, [pixel] * lwt])
expected_average = waveforms[nei_pixel].sum(0) / len(nei_pixel)
- assert_allclose(average_wf[pixel], expected_average, rtol=1e-3)
+ expected_peak_pos = np.argmax(expected_average, axis=-1)
+ assert (peak_pos[pixel] == expected_peak_pos).all()
def test_extract_peak_time_within_range():
@@ -532,7 +534,7 @@ def test_global_peak_window_sum_with_pixel_fraction(subarray):
tel_id = 1
camera = subarray.tel[tel_id].camera
- sample_rate = camera.readout.sampling_rate.to_value(u.ns ** -1)
+ sample_rate = camera.readout.sampling_rate.to_value(u.ns**-1)
n_pixels = camera.geometry.n_pixels
selected_gain_channel = np.zeros(n_pixels, dtype=np.uint8)
diff --git a/ctapipe/instrument/subarray.py b/ctapipe/instrument/subarray.py
index 557040b42b0..cc7c5b6d4a1 100644
--- a/ctapipe/instrument/subarray.py
+++ b/ctapipe/instrument/subarray.py
@@ -1,26 +1,24 @@
"""
Description of Arrays or Subarrays of telescopes
"""
-from typing import Dict, List, Union
-from contextlib import ExitStack
import warnings
+from contextlib import ExitStack
+from copy import copy
+from itertools import groupby
+from typing import Dict, List, Union
import numpy as np
+import tables
from astropy import units as u
from astropy.coordinates import SkyCoord
from astropy.table import QTable, Table
from astropy.utils import lazyproperty
-import tables
-from copy import copy
-from itertools import groupby
-import ctapipe
-
-from ..coordinates import GroundFrame, CameraFrame
-from .telescope import TelescopeDescription
-from .camera import CameraDescription, CameraReadout, CameraGeometry
+from .. import __version__ as CTAPIPE_VERSION
+from ..coordinates import CameraFrame, GroundFrame
+from .camera import CameraDescription, CameraGeometry, CameraReadout
from .optics import OpticsDescription
-
+from .telescope import TelescopeDescription
__all__ = ["SubarrayDescription"]
@@ -90,7 +88,7 @@ def __repr__(self):
@property
def tel(self):
- """ for backward compatibility"""
+ """for backward compatibility"""
return self.tels
@property
@@ -132,7 +130,7 @@ def info(self, printer=print):
@lazyproperty
def tel_coords(self):
- """ returns telescope positions as astropy.coordinates.SkyCoord"""
+ """returns telescope positions as astropy.coordinates.SkyCoord"""
pos_x = np.array([p[0].to("m").value for p in self.positions.values()]) * u.m
pos_y = np.array([p[1].to("m").value for p in self.positions.values()]) * u.m
@@ -142,7 +140,7 @@ def tel_coords(self):
@lazyproperty
def tel_ids(self):
- """ telescope IDs as an array"""
+ """telescope IDs as an array"""
return np.array(list(self.tel.keys()))
@lazyproperty
@@ -238,7 +236,7 @@ def to_table(self, kind="subarray"):
meta = {
"ORIGIN": "ctapipe.instrument.SubarrayDescription",
"SUBARRAY": self.name,
- "SOFT_VER": ctapipe.__version__,
+ "SOFT_VER": CTAPIPE_VERSION,
"TAB_TYPE": kind,
}
@@ -277,8 +275,8 @@ def to_table(self, kind="subarray"):
unique_types = self.telescope_types
mirror_area = u.Quantity(
- [t.optics.mirror_area.to_value(u.m ** 2) for t in unique_types],
- u.m ** 2,
+ [t.optics.mirror_area.to_value(u.m**2) for t in unique_types],
+ u.m**2,
)
focal_length = u.Quantity(
[t.optics.equivalent_focal_length.to_value(u.m) for t in unique_types],
@@ -332,47 +330,29 @@ def peek(self):
"""
Draw a quick matplotlib plot of the array
"""
+ from ctapipe.coordinates.ground_frames import EastingNorthingFrame
+ from ctapipe.visualization import ArrayDisplay
from matplotlib import pyplot as plt
- from astropy.visualization import quantity_support
-
- types = set(self.tels.values())
- tab = self.to_table()
plt.figure(figsize=(8, 8))
-
- with quantity_support():
- for tel_type in types:
- tels = tab[tab["tel_description"] == str(tel_type)]["tel_id"]
- sub = self.select_subarray(tels, name=tel_type)
- tel_coords = sub.tel_coords
- radius = np.array(
- [
- np.sqrt(tel.optics.mirror_area / np.pi).value
- for tel in sub.tels.values()
- ]
- )
-
- plt.scatter(
- tel_coords.x, tel_coords.y, s=radius * 8, alpha=0.5, label=tel_type
- )
-
- plt.legend(loc="best")
- plt.title(self.name)
- plt.tight_layout()
+ ad = ArrayDisplay(subarray=self, frame=EastingNorthingFrame(), tel_scale=0.75)
+ ad.add_labels()
+ plt.title(self.name)
+ plt.tight_layout()
@lazyproperty
def telescope_types(self) -> List[TelescopeDescription]:
- """ list of telescope types in the array"""
+ """list of telescope types in the array"""
return list({tel for tel in self.tel.values()})
@lazyproperty
def camera_types(self) -> List[CameraDescription]:
- """ list of camera types in the array """
+ """list of camera types in the array"""
return list({tel.camera for tel in self.tel.values()})
@lazyproperty
def optics_types(self) -> List[OpticsDescription]:
- """ list of optics types in the array """
+ """list of optics types in the array"""
return list({tel.optics for tel in self.tel.values()})
def get_tel_ids_for_type(self, tel_type):
diff --git a/ctapipe/io/eventseeker.py b/ctapipe/io/eventseeker.py
index 3c58643db8a..bb77b069c12 100644
--- a/ctapipe/io/eventseeker.py
+++ b/ctapipe/io/eventseeker.py
@@ -28,7 +28,7 @@ class EventSeeker(Component):
To obtain a particular event in a simtel file:
>>> from ctapipe.io import SimTelEventSource
- >>> event_source = SimTelEventSource(input_url="dataset://gamma_test_large.simtel.gz")
+ >>> event_source = SimTelEventSource(input_url="dataset://gamma_test_large.simtel.gz", focal_length_choice="nominal")
>>> seeker = EventSeeker(event_source=event_source)
>>> event = seeker.get_event_index(2)
>>> print(event.count)
@@ -37,7 +37,7 @@ class EventSeeker(Component):
To obtain a particular event in a simtel file from its event_id:
>>> from ctapipe.io import SimTelEventSource
- >>> event_source = SimTelEventSource(input_url="dataset://gamma_test_large.simtel.gz", back_seekable=True)
+ >>> event_source = SimTelEventSource(input_url="dataset://gamma_test_large.simtel.gz", back_seekable=True, focal_length_choice="nominal")
>>> seeker = EventSeeker(event_source=event_source)
>>> event = seeker.get_event_id(31007)
>>> print(event.count)
diff --git a/ctapipe/io/eventsource.py b/ctapipe/io/eventsource.py
index 4b46f33c4b5..61a9c50c058 100644
--- a/ctapipe/io/eventsource.py
+++ b/ctapipe/io/eventsource.py
@@ -36,7 +36,7 @@ class EventSource(Component):
appropriate subclass if a compatible source is found for the given
``input_url``.
- >>> EventSource(input_url="dataset://gamma_test_large.simtel.gz")
+ >>> EventSource(input_url="dataset://gamma_prod5.simtel.zst")
An ``EventSource`` can also be created through the configuration system,
@@ -45,7 +45,7 @@ class EventSource(Component):
>>> self.source = EventSource(parent=self) # doctest: +SKIP
To loop through the events in a file:
- >>> source = EventSource(input_url="dataset://gamma_test_large.simtel.gz", max_events=2)
+ >>> source = EventSource(input_url="dataset://gamma_prod5.simtel.zst", max_events=2)
>>> for event in source:
... print(event.count)
0
@@ -58,7 +58,7 @@ class EventSource(Component):
It is encouraged to use ``EventSource`` in a context manager to ensure
the correct cleanups are performed when you are finished with the source:
- >>> with EventSource(input_url="dataset://gamma_test_large.simtel.gz", max_events=2) as source:
+ >>> with EventSource(input_url="dataset://gamma_prod5.simtel.zst", max_events=2) as source:
... for event in source:
... print(event.count)
0
diff --git a/ctapipe/io/hdf5eventsource.py b/ctapipe/io/hdf5eventsource.py
index 3634c246827..48f534185d7 100644
--- a/ctapipe/io/hdf5eventsource.py
+++ b/ctapipe/io/hdf5eventsource.py
@@ -25,6 +25,8 @@
TimingParametersContainer,
TriggerContainer,
ImageParametersContainer,
+ TelEventIndexContainer,
+ TelescopeTriggerContainer,
R1CameraContainer,
)
from .eventsource import EventSource
@@ -126,14 +128,12 @@ def __init__(self, input_url=None, config=None, parent=None, **kwargs):
super().__init__(input_url=input_url, config=config, parent=parent, **kwargs)
self.file_ = tables.open_file(self.input_url)
- self._full_subarray_info = SubarrayDescription.from_hdf(self.input_url)
+ self._full_subarray = SubarrayDescription.from_hdf(self.input_url)
if self.allowed_tels:
- self._subarray_info = self._full_subarray_info.select_subarray(
- self.allowed_tels
- )
+ self._subarray = self._full_subarray.select_subarray(self.allowed_tels)
else:
- self._subarray_info = self._full_subarray_info
+ self._subarray = self._full_subarray
self._simulation_configs = self._parse_simulation_configs()
self.datamodel_version = self.file_.root._v_attrs[
"CTA PRODUCT DATA MODEL VERSION"
@@ -196,7 +196,7 @@ def has_simulated_dl1(self):
@property
def subarray(self):
- return self._subarray_info
+ return self._subarray
@lazyproperty
def datalevels(self):
@@ -243,7 +243,7 @@ class ObsIdContainer(Container):
if "simulation" in self.file_.root.configuration:
reader = HDF5TableReader(self.file_).read(
"/configuration/simulation/run",
- containers=[SimulationConfigContainer(), ObsIdContainer()],
+ containers=(SimulationConfigContainer, ObsIdContainer),
)
for (config, index) in reader:
simulation_configs[index.obs_id] = config
@@ -266,7 +266,7 @@ def _generate_events(self):
if DataLevel.R1 in self.datalevels:
waveform_readers = {
tel.name: self.reader.read(
- f"/r1/event/telescope/{tel.name}", R1CameraContainer()
+ f"/r1/event/telescope/{tel.name}", R1CameraContainer
)
for tel in self.file_.root.r1.event.telescope
}
@@ -275,7 +275,7 @@ def _generate_events(self):
image_readers = {
tel.name: self.reader.read(
f"/dl1/event/telescope/images/{tel.name}",
- DL1CameraContainer(),
+ DL1CameraContainer,
ignore_columns={"parameters"},
)
for tel in self.file_.root.dl1.event.telescope.images
@@ -289,27 +289,35 @@ def _generate_events(self):
}
if DataLevel.DL1_PARAMETERS in self.datalevels:
+ # FIXME: check units or config, not version. We have a switch.
+ if self.datamodel_version >= "v2.1.0":
+ hillas_cls = HillasParametersContainer
+ timing_cls = TimingParametersContainer
+ else:
+ hillas_cls = CameraHillasParametersContainer
+ timing_cls = CameraTimingParametersContainer
+
param_readers = {
tel.name: self.reader.read(
f"/dl1/event/telescope/parameters/{tel.name}",
- containers=[
- (
- HillasParametersContainer()
- if (self.datamodel_version >= "v2.1.0")
- else CameraHillasParametersContainer(prefix="hillas")
- ),
- (
- TimingParametersContainer()
- if (self.datamodel_version >= "v2.1.0")
- else CameraTimingParametersContainer(prefix="timing")
- ),
- LeakageContainer(),
- ConcentrationContainer(),
- MorphologyContainer(),
- IntensityStatisticsContainer(),
- PeakTimeStatisticsContainer(),
+ containers=(
+ hillas_cls,
+ timing_cls,
+ LeakageContainer,
+ ConcentrationContainer,
+ MorphologyContainer,
+ IntensityStatisticsContainer,
+ PeakTimeStatisticsContainer,
+ ),
+ prefixes=[
+ "hillas",
+ "timing",
+ "leakage",
+ "concentration",
+ "morphology",
+ "intensity",
+ "peak_time",
],
- prefixes=True,
)
for tel in self.file_.root.dl1.event.telescope.parameters
}
@@ -318,15 +326,11 @@ def _generate_events(self):
tel.name: self.reader.read(
f"/simulation/event/telescope/parameters/{tel.name}",
containers=[
- (
- HillasParametersContainer()
- if (self.datamodel_version >= "v2.1.0")
- else CameraHillasParametersContainer(prefix="hillas")
- ),
- LeakageContainer(),
- ConcentrationContainer(),
- MorphologyContainer(),
- IntensityStatisticsContainer(),
+ hillas_cls,
+ LeakageContainer,
+ ConcentrationContainer,
+ MorphologyContainer,
+ IntensityStatisticsContainer,
],
prefixes=True,
)
@@ -337,7 +341,7 @@ def _generate_events(self):
# simulated shower wide information
mc_shower_reader = HDF5TableReader(self.file_).read(
"/simulation/event/subarray/shower",
- SimulatedShowerContainer(),
+ SimulatedShowerContainer,
prefixes="true",
)
data.simulation = SimulatedEventContainer()
@@ -345,9 +349,14 @@ def _generate_events(self):
# Setup iterators for the array events
events = HDF5TableReader(self.file_).read(
"/dl1/event/subarray/trigger",
- [TriggerContainer(), EventIndexContainer()],
+ [TriggerContainer, EventIndexContainer],
ignore_columns={"tel"},
)
+ telescope_trigger_reader = HDF5TableReader(self.file_).read(
+ "/dl1/event/telescope/trigger",
+ [TelEventIndexContainer, TelescopeTriggerContainer],
+ ignore_columns={"trigger_pixels"},
+ )
array_pointing_finder = IndexFinder(
self.file_.root.dl1.monitoring.subarray.pointing.col("time")
@@ -358,7 +367,8 @@ def _generate_events(self):
for tel in self.file_.root.dl1.monitoring.telescope.pointing
}
- for counter, (trigger, index) in enumerate(events):
+ counter = 0
+ for trigger, index in events:
data.dl1.tel.clear()
if self.is_simulation:
data.simulation.tel.clear()
@@ -368,29 +378,29 @@ def _generate_events(self):
data.count = counter
data.trigger = trigger
data.index = index
- data.trigger.tels_with_trigger = (
- self._full_subarray_info.tel_mask_to_tel_ids(
- data.trigger.tels_with_trigger
- )
+ data.trigger.tels_with_trigger = self._full_subarray.tel_mask_to_tel_ids(
+ data.trigger.tels_with_trigger
)
+ full_tels_with_trigger = data.trigger.tels_with_trigger.copy()
if self.allowed_tels:
data.trigger.tels_with_trigger = np.intersect1d(
data.trigger.tels_with_trigger, np.array(list(self.allowed_tels))
)
- # Maybe there is a simpler way to do this
- # Beware: tels_with_trigger contains all triggered telescopes whereas
- # the telescope trigger table contains only the subset of
- # allowed_tels given during the creation of the dl1 file
- for i in self.file_.root.dl1.event.telescope.trigger.where(
- f"(obs_id=={data.index.obs_id}) & (event_id=={data.index.event_id})"
- ):
- if self.allowed_tels and i["tel_id"] not in self.allowed_tels:
+ # the telescope trigger table contains triggers for all telescopes
+ # that participated in the event, so we need to read a row for each
+ # of them, ignoring the ones not in allowed_tels after reading
+ for tel_id in full_tels_with_trigger:
+ tel_index, tel_trigger = next(telescope_trigger_reader)
+
+ if self.allowed_tels and tel_id not in self.allowed_tels:
continue
- if self.datamodel_version == "v1.0.0":
- data.trigger.tel[i["tel_id"]].time = i["telescopetrigger_time"]
- else:
- data.trigger.tel[i["tel_id"]].time = i["time"]
+
+ data.trigger.tel[tel_index.tel_id] = tel_trigger
+
+ # this needs to stay *after* reading the telescope trigger table
+ if len(data.trigger.tels_with_trigger) == 0:
+ continue
self._fill_array_pointing(data, array_pointing_finder)
self._fill_telescope_pointing(data, tel_pointing_finder)
@@ -472,6 +482,7 @@ def _generate_events(self):
)
yield data
+ counter += 1
def _fill_array_pointing(self, data, array_pointing_finder):
"""
@@ -510,7 +521,7 @@ def _fill_telescope_pointing(self, data, tel_pointing_finder):
f"tel_{tel:03d}"
]
closest_time_index = tel_pointing_finder[f"tel_{tel:03d}"].closest(
- data.trigger.tel[tel].time
+ data.trigger.tel[tel].time.mjd
)
pointing_telescope = tel_pointing_table
data.pointing.tel[tel].azimuth = u.Quantity(
diff --git a/ctapipe/io/hdf5tableio.py b/ctapipe/io/hdf5tableio.py
index 1d776d2c7e6..8af447144da 100644
--- a/ctapipe/io/hdf5tableio.py
+++ b/ctapipe/io/hdf5tableio.py
@@ -35,8 +35,8 @@
"uint16": tables.UInt16Col,
"uint32": tables.UInt32Col,
"uint64": tables.UInt64Col,
- "bool": tables.BoolCol, # python bool
- "bool_": tables.BoolCol, # np.bool_
+ "bool": tables.BoolCol, # python bool
+ "bool_": tables.BoolCol, # np.bool_
}
@@ -267,8 +267,8 @@ class Schema(tables.IsDescription):
return meta
def _setup_new_table(self, table_name, containers):
- """ set up the table. This is called the first time `write()`
- is called on a new table """
+ """set up the table. This is called the first time `write()`
+ is called on a new table"""
self.log.debug("Initializing table '%s' in group '%s'", table_name, self._group)
meta = self._create_hdf5_table_schema(table_name, containers)
@@ -391,6 +391,7 @@ def __init__(self, filename, **kwargs):
self._tables = {}
self._cols_to_read = {}
self._missing_cols = {}
+ self._meta = {}
kwargs.update(mode="r")
if isinstance(filename, str) or isinstance(filename, PurePath):
@@ -463,7 +464,7 @@ def _map_transforms_from_table_header(self, table_name):
def _map_table_to_containers(
self, table_name, containers, prefixes, ignore_columns
):
- """ identifies which columns in the table to read into the containers,
+ """identifies which columns in the table to read into the containers,
by comparing their names including an optional prefix."""
tab = self._tables[table_name]
self._cols_to_read[table_name] = []
@@ -499,13 +500,14 @@ def _map_table_to_containers(
self._missing_cols[table_name][-1].append(colname)
self.log.warning(
f"Table {table_name} is missing column {colname_with_prefix} "
- f"that is in container {container.__class__.__name__}. "
+ f"that is in container {container}. "
"It will be skipped."
)
- # copy all user-defined attributes back to Container.meta
+ # store the meta
+ self._meta[table_name] = {}
for key in tab.attrs._f_list():
- container.meta[key] = tab.attrs[key]
+ self._meta[table_name][key] = tab.attrs[key]
# check if the table has additional columns not present in any container
for colname in tab.colnames:
@@ -525,8 +527,8 @@ def read(self, table_name, containers, prefixes=False, ignore_columns=None):
----------
table_name: str
name of table to read from
- container : ctapipe.core.Container
- Container instance to fill
+ containers : Iterable[ctapipe.core.Container]
+ Container classes to fill
prefix: bool, str or list
Prefix that was added while writing the file.
If True, the container prefix is taken into consideration, when
@@ -540,17 +542,28 @@ def read(self, table_name, containers, prefixes=False, ignore_columns=None):
ignore_columns = set(ignore_columns) if ignore_columns is not None else set()
return_iterable = True
+
if isinstance(containers, Container):
+ raise TypeError("Expected container *classes*, not *instances*")
+
+ # check for a single container
+ if isinstance(containers, type):
containers = (containers,)
return_iterable = False
+ for container in containers:
+ if isinstance(container, Container):
+ raise TypeError("Expected container *classes*, not *instances*")
+
if prefixes is False:
prefixes = ["" for _ in containers]
elif prefixes is True:
- prefixes = [container.prefix for container in containers]
+ prefixes = [container.container_prefix for container in containers]
elif isinstance(prefixes, str):
prefixes = [prefixes for _ in containers]
- assert len(prefixes) == len(containers)
+
+ if len(prefixes) != len(containers):
+ raise ValueError("Length of provided prefixes does not match containers")
if table_name not in self._tables:
tab = self._setup_table(table_name, containers, prefixes, ignore_columns)
@@ -564,8 +577,10 @@ def read(self, table_name, containers, prefixes=False, ignore_columns=None):
# __getitem__ just gives plain numpy data
row = tab[row_index]
- for container, prefix, missing_cols in zip(containers, prefixes, missing):
- for fieldname in container.keys():
+ ret = []
+ for cls, prefix, missing_cols in zip(containers, prefixes, missing):
+ kwargs = {}
+ for fieldname in cls.fields.keys():
if prefix:
colname = f"{prefix}_{fieldname}"
@@ -575,15 +590,19 @@ def read(self, table_name, containers, prefixes=False, ignore_columns=None):
if colname not in self._cols_to_read[table_name]:
continue
- container[fieldname] = self._apply_col_transform(
+ kwargs[fieldname] = self._apply_col_transform(
table_name, colname, row[colname]
)
# set missing fields to None
for fieldname in missing_cols:
- container[fieldname] = None
+ kwargs[fieldname] = None
+
+ container = cls(**kwargs)
+ container.meta = self._meta[table_name]
+ ret.append(container)
if return_iterable:
- yield containers
+ yield ret
else:
- yield containers[0]
+ yield ret[0]
diff --git a/ctapipe/io/simteleventsource.py b/ctapipe/io/simteleventsource.py
index 380035a602f..b39f0d8c818 100644
--- a/ctapipe/io/simteleventsource.py
+++ b/ctapipe/io/simteleventsource.py
@@ -182,7 +182,7 @@ class SimTelEventSource(EventSource):
focal_length_choice = CaselessStrEnum(
["nominal", "effective"],
- default_value="nominal",
+ default_value="effective",
help=(
"if both nominal and effective focal lengths are available in the "
"SimTelArray file, which one to use when constructing the "
@@ -308,26 +308,31 @@ def prepare_subarray_info(self, telescope_descriptions, header):
pixel_settings = telescope_description["pixel_settings"]
n_pixels = cam_settings["n_pixels"]
- focal_length = u.Quantity(cam_settings["focal_length"], u.m)
mirror_area = u.Quantity(cam_settings["mirror_area"], u.m**2)
- if self.focal_length_choice == "effective":
- try:
- focal_length = u.Quantity(
- cam_settings["effective_focal_length"], u.m
- )
- except KeyError as err:
- raise RuntimeError(
- f"the SimTelEventSource option 'focal_length_choice' was set to "
- f"{self.focal_length_choice}, but the effective focal length "
- f"was not present in the file. ({err})"
- )
+ nominal_focal_length = u.Quantity(cam_settings["focal_length"], u.m)
+ effective_focal_length = u.Quantity(
+ cam_settings.get("effective_focal_length", np.nan), u.m
+ )
try:
- telescope = guess_telescope(n_pixels, focal_length)
+ telescope = guess_telescope(n_pixels, nominal_focal_length)
except ValueError:
telescope = unknown_telescope(mirror_area, n_pixels)
+ if self.focal_length_choice == "effective":
+ if np.isnan(effective_focal_length):
+ raise RuntimeError(
+ f"`SimTelEventSource.focal_length_choice` was set to"
+ f" {self.focal_length_choice!r}, but the effective focal length"
+ f" was not present in the file. "
+ " Use nominal focal length or adapt your simulation configuration"
+ " to include the effective focal length"
+ )
+ focal_length = effective_focal_length
+ else:
+ focal_length = nominal_focal_length
+
optics = OpticsDescription(
name=telescope.name,
num_mirrors=telescope.n_mirrors,
diff --git a/ctapipe/io/tableloader.py b/ctapipe/io/tableloader.py
index 678900c6a39..4fd5c456de4 100644
--- a/ctapipe/io/tableloader.py
+++ b/ctapipe/io/tableloader.py
@@ -79,13 +79,13 @@ class TableLoader(Component):
The following `TableLoader` methods load data from all relevant tables,
depending on the options, and joins them into single tables:
+
* `TableLoader.read_subarray_events`
* `TableLoader.read_telescope_events`
-
- `TableLoader.read_telescope_events_by_type` retuns a dict with a table per
- telescope type, which is needed for e.g. DL1 image data that might have
- different shapes for each of the telescope types as tables do not support
- variable length columns.
+ * `TableLoader.read_telescope_events_by_type` retuns a dict with a table per
+ telescope type, which is needed for e.g. DL1 image data that might have
+ different shapes for each of the telescope types as tables do not support
+ variable length columns.
"""
input_url = traits.Path(directory_ok=False, exists=True).tag(config=True)
diff --git a/ctapipe/io/tests/conftest.py b/ctapipe/io/tests/conftest.py
index a3f327696c0..a29cdd20a89 100644
--- a/ctapipe/io/tests/conftest.py
+++ b/ctapipe/io/tests/conftest.py
@@ -1,6 +1,5 @@
import pytest
from ctapipe.io import EventSource, DataWriter
-from ctapipe.utils import get_dataset_path
@pytest.fixture(scope="session")
@@ -9,11 +8,10 @@ def r1_path(tmp_path_factory):
@pytest.fixture(scope="session")
-def r1_hdf5_file(r1_path):
+def r1_hdf5_file(prod5_proton_simtel_path, r1_path):
source = EventSource(
- get_dataset_path("gamma_LaPalma_baseline_20Zd_180Az_prod3b_test.simtel.gz"),
+ prod5_proton_simtel_path,
max_events=5,
- allowed_tels=[1, 2, 3, 4],
)
path = r1_path / "test_r1.h5"
diff --git a/ctapipe/io/tests/test_datawriter.py b/ctapipe/io/tests/test_datawriter.py
index 85e5c0c27ea..6a0b8a0aa8f 100644
--- a/ctapipe/io/tests/test_datawriter.py
+++ b/ctapipe/io/tests/test_datawriter.py
@@ -51,6 +51,7 @@ def test_write(tmpdir: Path):
get_dataset_path("gamma_LaPalma_baseline_20Zd_180Az_prod3b_test.simtel.gz"),
max_events=20,
allowed_tels=[1, 2, 3, 4],
+ focal_length_choice='nominal',
)
calibrate = CameraCalibrator(subarray=source.subarray)
@@ -130,6 +131,7 @@ def test_roundtrip(tmpdir: Path):
get_dataset_path("gamma_LaPalma_baseline_20Zd_180Az_prod3b_test.simtel.gz"),
max_events=20,
allowed_tels=[1, 2, 3, 4],
+ focal_length_choice='nominal',
)
calibrate = CameraCalibrator(subarray=source.subarray)
@@ -201,7 +203,7 @@ def test_dl1writer_no_events(tmpdir: Path):
output_path = Path(tmpdir / "no_events.dl1.h5")
dataset = "lst_prod3_calibration_and_mcphotons.simtel.zst"
- with EventSource(get_dataset_path(dataset)) as source:
+ with EventSource(get_dataset_path(dataset), focal_length_choice='nominal') as source:
# exhaust source
for _ in source:
pass
@@ -231,7 +233,7 @@ def test_dl1writer_no_events(tmpdir: Path):
def test_metadata(tmpdir: Path):
output_path = Path(tmpdir / "metadata.dl1.h5")
- dataset = "lst_prod3_calibration_and_mcphotons.simtel.zst"
+ dataset = "gamma_20deg_0deg_run2___cta-prod5-paranal_desert-2147m-Paranal-dark_cone10-100evts.simtel.zst"
config = Config(
{
diff --git a/ctapipe/io/tests/test_event_source.py b/ctapipe/io/tests/test_event_source.py
index a46f85061cf..9a839ff89dc 100644
--- a/ctapipe/io/tests/test_event_source.py
+++ b/ctapipe/io/tests/test_event_source.py
@@ -6,6 +6,8 @@
from ctapipe.core import Component
+prod5_path = "gamma_20deg_0deg_run2___cta-prod5-paranal_desert-2147m-Paranal-dark_cone10-100evts.simtel.zst"
+
def test_construct():
# at least one of input_url / parent / config is required
@@ -43,20 +45,20 @@ def datalevels(self):
def test_can_be_implemented():
- dataset = get_dataset_path("gamma_test_large.simtel.gz")
+ dataset = get_dataset_path(prod5_path)
test_reader = DummyReader(input_url=dataset)
assert test_reader is not None
def test_is_iterable():
- dataset = get_dataset_path("gamma_test_large.simtel.gz")
+ dataset = get_dataset_path(prod5_path)
test_reader = DummyReader(input_url=dataset)
for _ in test_reader:
pass
def test_function():
- dataset = get_dataset_path("gamma_test_large.simtel.gz")
+ dataset = get_dataset_path(prod5_path)
reader = EventSource(input_url=dataset)
assert isinstance(reader, SimTelEventSource)
assert reader.input_url == dataset
@@ -75,7 +77,7 @@ def test_function_nonexistant_file():
def test_from_config():
- dataset = get_dataset_path("gamma_test_large.simtel.gz")
+ dataset = get_dataset_path(prod5_path)
config = Config({"EventSource": {"input_url": dataset}})
reader = EventSource(config=config)
assert isinstance(reader, SimTelEventSource)
@@ -83,7 +85,7 @@ def test_from_config():
def test_from_config_parent():
- dataset = get_dataset_path("gamma_test_large.simtel.gz")
+ dataset = get_dataset_path(prod5_path)
class Parent(Component):
def __init__(self, config=None, parent=None):
@@ -109,7 +111,7 @@ def __init__(self, config=None, parent=None):
def test_from_config_default():
old_default = EventSource.input_url.default_value
- dataset = get_dataset_path("gamma_test_large.simtel.gz")
+ dataset = get_dataset_path(prod5_path)
EventSource.input_url.default_value = dataset
config = Config()
reader = EventSource(config=config, parent=None)
@@ -119,7 +121,7 @@ def test_from_config_default():
def test_from_config_invalid_type():
- dataset = get_dataset_path("gamma_test_large.simtel.gz")
+ dataset = get_dataset_path(prod5_path)
EventSource.input_url.default_value = dataset
config = Config({"EventSource": {"input_url": 124}})
with pytest.raises(TraitError):
@@ -127,10 +129,10 @@ def test_from_config_invalid_type():
def test_event_source_input_url_config_override():
- dataset1 = get_dataset_path("gamma_test_large.simtel.gz")
- dataset2 = get_dataset_path(
+ dataset1 = get_dataset_path(
"gamma_LaPalma_baseline_20Zd_180Az_prod3b_test.simtel.gz"
)
+ dataset2 = get_dataset_path(prod5_path)
config = Config({"EventSource": {"input_url": dataset1}})
reader = EventSource(input_url=dataset2, config=config)
@@ -141,13 +143,13 @@ def test_event_source_input_url_config_override():
def test_max_events():
max_events = 10
- dataset = get_dataset_path("gamma_test_large.simtel.gz")
+ dataset = get_dataset_path(prod5_path)
reader = EventSource(input_url=dataset, max_events=max_events)
assert reader.max_events == max_events
def test_max_events_from_config():
- dataset = get_dataset_path("gamma_test_large.simtel.gz")
+ dataset = get_dataset_path(prod5_path)
max_events = 10
config = Config({"EventSource": {"input_url": dataset, "max_events": max_events}})
reader = EventSource(config=config)
@@ -155,15 +157,15 @@ def test_max_events_from_config():
def test_allowed_tels():
- dataset = get_dataset_path("gamma_test_large.simtel.gz")
+ dataset = get_dataset_path(prod5_path)
reader = EventSource(input_url=dataset)
assert reader.allowed_tels is None
reader = EventSource(input_url=dataset, allowed_tels={1, 3})
- assert len(reader.allowed_tels) == 2
+ assert reader.allowed_tels == {1, 3}
def test_allowed_tels_from_config():
- dataset = get_dataset_path("gamma_test_large.simtel.gz")
+ dataset = get_dataset_path(prod5_path)
config = Config({"EventSource": {"input_url": dataset, "allowed_tels": {1, 3}}})
reader = EventSource(config=config, parent=None)
- assert len(reader.allowed_tels) == 2
+ assert reader.allowed_tels == {1, 3}
diff --git a/ctapipe/io/tests/test_eventseeker.py b/ctapipe/io/tests/test_eventseeker.py
index fee3119c380..58ec2547afa 100644
--- a/ctapipe/io/tests/test_eventseeker.py
+++ b/ctapipe/io/tests/test_eventseeker.py
@@ -8,7 +8,11 @@
def test_eventseeker():
- with SimTelEventSource(input_url=dataset, back_seekable=True) as reader:
+ with SimTelEventSource(
+ input_url=dataset,
+ back_seekable=True,
+ focal_length_choice='nominal',
+ ) as reader:
seeker = EventSeeker(event_source=reader)
@@ -33,7 +37,8 @@ def test_eventseeker():
seeker.get_event_index(dict())
with SimTelEventSource(
- input_url=dataset, max_events=5, back_seekable=True
+ input_url=dataset, max_events=5, back_seekable=True,
+ focal_length_choice='nominal',
) as reader:
seeker = EventSeeker(event_source=reader)
with pytest.raises(IndexError):
@@ -42,7 +47,10 @@ def test_eventseeker():
def test_eventseeker_edit():
- with SimTelEventSource(input_url=dataset, back_seekable=True) as reader:
+ with SimTelEventSource(
+ input_url=dataset, back_seekable=True,
+ focal_length_choice='nominal',
+ ) as reader:
seeker = EventSeeker(event_source=reader)
event = seeker.get_event_index(1)
assert event.count == 1
@@ -54,7 +62,10 @@ def test_eventseeker_edit():
def test_eventseeker_simtel():
# Ensure the EventSeeker can forward seek even if back-seeking is not possible
- with SimTelEventSource(input_url=dataset, back_seekable=False) as reader:
+ with SimTelEventSource(
+ input_url=dataset, back_seekable=False,
+ focal_length_choice='nominal',
+ ) as reader:
seeker = EventSeeker(event_source=reader)
event = seeker.get_event_index(1)
assert event.count == 1
diff --git a/ctapipe/io/tests/test_hdf5.py b/ctapipe/io/tests/test_hdf5.py
index f63de8efa42..b21c5aa17c4 100644
--- a/ctapipe/io/tests/test_hdf5.py
+++ b/ctapipe/io/tests/test_hdf5.py
@@ -67,6 +67,13 @@ def test_append_container(tmp_path):
assert np.all(table["event_id"] == np.tile(np.arange(10), 2))
+def test_reader_container_reuse(test_h5_file):
+ """Test the reader does not reuse the same container instance"""
+ with HDF5TableReader(test_h5_file) as reader:
+ iterator = reader.read("/R0/sim_shower", SimulatedShowerContainer)
+ assert next(iterator) is not next(iterator)
+
+
def test_read_multiple_containers(tmp_path):
path = tmp_path / "test_append.h5"
hillas_parameter_container = HillasParametersContainer(
@@ -88,9 +95,7 @@ def test_read_multiple_containers(tmp_path):
# test reading both containers separately
with HDF5TableReader(path) as reader:
- generator = reader.read(
- "/dl1/params", HillasParametersContainer(), prefixes=True
- )
+ generator = reader.read("/dl1/params", HillasParametersContainer, prefixes=True)
hillas = next(generator)
for value, read_value in zip(
hillas_parameter_container.as_dict().values(), hillas.as_dict().values()
@@ -98,7 +103,7 @@ def test_read_multiple_containers(tmp_path):
np.testing.assert_equal(value, read_value)
with HDF5TableReader(path) as reader:
- generator = reader.read("/dl1/params", LeakageContainer(), prefixes=True)
+ generator = reader.read("/dl1/params", LeakageContainer, prefixes=True)
leakage = next(generator)
for value, read_value in zip(
leakage_container.as_dict().values(), leakage.as_dict().values()
@@ -109,7 +114,7 @@ def test_read_multiple_containers(tmp_path):
with HDF5TableReader(path) as reader:
generator = reader.read(
"/dl1/params",
- [HillasParametersContainer(), LeakageContainer()],
+ (HillasParametersContainer, LeakageContainer),
prefixes=True,
)
hillas_, leakage_ = next(generator)
@@ -140,7 +145,7 @@ def test_read_without_prefixes(tmp_path):
)
with HDF5TableWriter(path, group_name="dl1", add_prefix=False) as writer:
- writer.write("params", [hillas_parameter_container, leakage_container])
+ writer.write("params", (hillas_parameter_container, leakage_container))
df = pd.read_hdf(path, key="/dl1/params")
assert "fov_lon" in df.columns
@@ -150,7 +155,7 @@ def test_read_without_prefixes(tmp_path):
with HDF5TableReader(path) as reader:
generator = reader.read(
"/dl1/params",
- [HillasParametersContainer(), LeakageContainer()],
+ (HillasParametersContainer, LeakageContainer),
prefixes=False,
)
hillas_, leakage_ = next(generator)
@@ -169,8 +174,8 @@ def test_read_without_prefixes(tmp_path):
with HDF5TableReader(path) as reader:
generator = reader.read(
"/dl1/params",
- [HillasParametersContainer(prefix=""), LeakageContainer(prefix="")],
- prefixes=True,
+ (HillasParametersContainer, LeakageContainer),
+ prefixes=["", ""],
)
hillas_, leakage_ = next(generator)
@@ -213,7 +218,7 @@ def test_read_duplicated_container_types(tmp_path):
with HDF5TableReader(path) as reader:
generator = reader.read(
"/dl1/params",
- [HillasParametersContainer(), HillasParametersContainer()],
+ (HillasParametersContainer, HillasParametersContainer),
prefixes=["hillas_1", "hillas_2"],
)
hillas_1, hillas_2 = next(generator)
@@ -241,7 +246,7 @@ def test_custom_prefix(tmp_path):
with HDF5TableReader(path) as reader:
generator = reader.read(
- "/dl1/params", HillasParametersContainer(), prefixes="custom"
+ "/dl1/params", HillasParametersContainer, prefixes="custom"
)
read_container = next(generator)
assert isinstance(read_container, HillasParametersContainer)
@@ -257,7 +262,7 @@ def test_units(tmp_path):
class WithUnits(Container):
inverse_length = Field(5 / u.m, "foo")
time = Field(1 * u.s, "bar", unit=u.s)
- grammage = Field(2 * u.g / u.cm ** 2, "baz", unit=u.g / u.cm ** 2)
+ grammage = Field(2 * u.g / u.cm**2, "baz", unit=u.g / u.cm**2)
c = WithUnits()
@@ -300,7 +305,7 @@ class C(Container):
c = C()
with HDF5TableReader(path) as reader:
- c_reader = reader.read("/test/c", c)
+ c_reader = reader.read("/test/c", C)
for i in range(2):
cur = next(c_reader)
expected = (i % 2) == 0
@@ -317,30 +322,26 @@ class C(Container):
exps = [15, 31, 63]
with HDF5TableWriter(path, "test") as writer:
for exp in exps:
- c = C(value=2 ** exp - 1)
+ c = C(value=2**exp - 1)
writer.write("c", c)
c = C()
with HDF5TableReader(path) as reader:
- c_reader = reader.read("/test/c", c)
+ c_reader = reader.read("/test/c", C)
for exp in exps:
cur = next(c_reader)
- assert cur.value == 2 ** exp - 1
+ assert cur.value == 2**exp - 1
def test_read_container(test_h5_file):
- r0tel1 = R0CameraContainer()
- r0tel2 = R0CameraContainer()
- sim_shower = SimulatedShowerContainer()
-
with HDF5TableReader(test_h5_file) as reader:
# get the generators for each table
# test supplying a single container as well as an
# iterable with one entry only
- simtab = reader.read("/R0/sim_shower", (sim_shower,))
- r0tab1 = reader.read("/R0/tel_001", r0tel1)
- r0tab2 = reader.read("/R0/tel_002", r0tel2)
+ simtab = reader.read("/R0/sim_shower", (SimulatedShowerContainer,))
+ r0tab1 = reader.read("/R0/tel_001", R0CameraContainer)
+ r0tab2 = reader.read("/R0/tel_002", R0CameraContainer)
# read all 3 tables in sync
for _ in range(3):
@@ -359,10 +360,9 @@ def test_read_container(test_h5_file):
def test_read_whole_table(test_h5_file):
- sim_shower = SimulatedShowerContainer()
with HDF5TableReader(test_h5_file) as reader:
- for cont in reader.read("/R0/sim_shower", sim_shower):
+ for cont in reader.read("/R0/sim_shower", SimulatedShowerContainer):
print(cont)
@@ -400,13 +400,11 @@ def test_reader_closes_file(test_h5_file):
def test_with_context_reader(test_h5_file):
- sim_shower = SimulatedShowerContainer()
-
with HDF5TableReader(test_h5_file) as h5_table:
assert h5_table._h5file.isopen == 1
- for cont in h5_table.read("/R0/sim_shower", sim_shower):
+ for cont in h5_table.read("/R0/sim_shower", SimulatedShowerContainer):
print(cont)
assert h5_table._h5file.isopen == 0
@@ -455,24 +453,24 @@ def test_append_mode(tmp_path):
class ContainerA(Container):
a = Field(int)
- a = ContainerA(a=1)
+ container = ContainerA(a=1)
# First open with 'w' mode to clear the file and add a Container
with HDF5TableWriter(path, "group") as h5:
- h5.write("table_1", a)
+ h5.write("table_1", container)
# Try to append A again
with HDF5TableWriter(path, "group", mode="a") as h5:
- h5.write("table_2", a)
+ h5.write("table_2", container)
# Check if file has two tables with a = 1
with HDF5TableReader(path) as h5:
- for a in h5.read("/group/table_1", ContainerA()):
- assert a.a == 1
+ for container in h5.read("/group/table_1", ContainerA):
+ assert container.a == 1
- for a in h5.read("/group/table_2", ContainerA()):
- assert a.a == 1
+ for container in h5.read("/group/table_2", ContainerA):
+ assert container.a == 1
def test_write_to_any_location(tmp_path):
@@ -482,20 +480,20 @@ def test_write_to_any_location(tmp_path):
class ContainerA(Container):
a = Field(0, "some integer field")
- a = ContainerA(a=1)
+ container = ContainerA(a=1)
with HDF5TableWriter(path, group_name=loc + "/group_1") as h5:
for _ in range(5):
- h5.write("table", a)
- h5.write("deeper/table2", a)
+ h5.write("table", container)
+ h5.write("deeper/table2", container)
with HDF5TableReader(path) as h5:
- for a in h5.read("/" + loc + "/group_1/table", ContainerA()):
- assert a.a == 1
+ for container in h5.read(f"/{loc}/group_1/table", ContainerA):
+ assert container.a == 1
with HDF5TableReader(path) as h5:
- for a in h5.read("/" + loc + "/group_1/deeper/table2", ContainerA()):
- assert a.a == 1
+ for container in h5.read(f"/{loc}/group_1/deeper/table2", ContainerA):
+ assert container.a == 1
class WithNormalEnum(Container):
@@ -532,7 +530,7 @@ def create_stream(n_event):
with HDF5TableReader(tmp_file, mode="r") as h5_table:
for group_name in ["data/"]:
group_name = "/{}table".format(group_name)
- for data in h5_table.read(group_name, WithNormalEnum()):
+ for data in h5_table.read(group_name, WithNormalEnum):
assert isinstance(data.event_type, WithNormalEnum.EventType)
@@ -564,7 +562,7 @@ def create_stream(n_event):
with HDF5TableReader(tmp_file, mode="r") as h5_table:
for group_name in ["data/"]:
group_name = "/{}table".format(group_name)
- for data in h5_table.read(group_name, WithIntEnum()):
+ for data in h5_table.read(group_name, WithIntEnum):
assert isinstance(data.event_type, WithIntEnum.EventType)
@@ -596,13 +594,13 @@ class SomeContainer(Container):
# check that we get back the transformed values (note here a round trip will
# not work, as there is no inverse transform in this test)
with HDF5TableReader(tmp_file, mode="r") as reader:
- data = next(reader.read("/data/mytable", SomeContainer()))
+ data = next(reader.read("/data/mytable", SomeContainer))
assert data.hillas_x is None
assert data.hillas_y is None
assert data.impact_x == 15
assert data.impact_y == 15
- data = next(reader.read("/data/anothertable", SomeContainer()))
+ data = next(reader.read("/data/anothertable", SomeContainer))
assert data.hillas_x is None
assert data.hillas_y is None
assert data.impact_x is None
@@ -610,7 +608,7 @@ class SomeContainer(Container):
def test_column_transforms(tmp_path):
- """ ensure a user-added column transform is applied """
+ """ensure a user-added column transform is applied"""
from ctapipe.containers import NAN_TIME
from ctapipe.io.tableio import FixedPointColumnTransform
@@ -634,7 +632,7 @@ class SomeContainer(Container):
# check that we get a length-3 array when reading back
with HDF5TableReader(tmp_file, mode="r") as reader:
- data = next(reader.read("/data/mytable", SomeContainer()))
+ data = next(reader.read("/data/mytable", SomeContainer))
assert data.current.value == 1e6
assert data.current.unit == u.uA
assert isinstance(data.time, Time)
@@ -644,7 +642,7 @@ class SomeContainer(Container):
def test_fixed_point_column_transform(tmp_path):
- """ ensure a user-added column transform is applied """
+ """ensure a user-added column transform is applied"""
from ctapipe.io.tableio import FixedPointColumnTransform
tmp_file = tmp_path / "test_column_transforms.hdf5"
@@ -669,8 +667,8 @@ class SomeContainer(Container):
writer.write("unsigned", cont)
with HDF5TableReader(tmp_file, mode="r") as reader:
- signed = next(reader.read("/data/signed", SomeContainer()))
- unsigned = next(reader.read("/data/unsigned", SomeContainer()))
+ signed = next(reader.read("/data/signed", SomeContainer))
+ unsigned = next(reader.read("/data/unsigned", SomeContainer))
for data in (signed, unsigned):
# check we get our original nans back
@@ -680,7 +678,7 @@ class SomeContainer(Container):
def test_column_transforms_regexps(tmp_path):
- """ ensure a user-added column transform is applied when given as a regexp"""
+ """ensure a user-added column transform is applied when given as a regexp"""
tmp_file = tmp_path / "test_column_transforms.hdf5"
@@ -704,11 +702,11 @@ class SomeContainer(Container):
# check that we get back the transformed values (note here a round trip will
# not work, as there is no inverse transform in this test)
with HDF5TableReader(tmp_file, mode="r") as reader:
- data = next(reader.read("/data/mytable", SomeContainer()))
+ data = next(reader.read("/data/mytable", SomeContainer))
assert data.hillas_x == 10
assert data.hillas_y == 10
- data = next(reader.read("/data/anothertable", SomeContainer()))
+ data = next(reader.read("/data/anothertable", SomeContainer))
assert data.hillas_x == 1
assert data.hillas_y == 10
@@ -727,7 +725,7 @@ class TimeContainer(Container):
writer.write("table", container)
with HDF5TableReader(tmp_file, mode="r") as reader:
- for data in reader.read("/data/table", TimeContainer()):
+ for data in reader.read("/data/table", TimeContainer):
assert isinstance(data.time, Time)
assert data.time.scale == "tai"
assert data.time.format == "mjd"
@@ -765,7 +763,7 @@ class TestContainer(Container):
def test_column_order_single_container(tmp_path):
- """ Test that columns are written in the order the containers define them"""
+ """Test that columns are written in the order the containers define them"""
path = tmp_path / "test.h5"
class Container1(Container):
@@ -782,7 +780,7 @@ class Container1(Container):
def test_column_order_multiple_containers(tmp_path):
- """ Test that columns are written in the order the containers define them"""
+ """Test that columns are written in the order the containers define them"""
path = tmp_path / "test.h5"
class Container1(Container):
@@ -885,7 +883,7 @@ class Container2(Container):
# test this also works with table reader
with HDF5TableReader(path) as reader:
- generator = reader.read("/strings", Container2())
+ generator = reader.read("/strings", Container2)
for string in expected:
c = next(generator)
assert c.string == string
diff --git a/ctapipe/io/tests/test_hdf5eventsource.py b/ctapipe/io/tests/test_hdf5eventsource.py
index a8634e79985..d3d98f83223 100644
--- a/ctapipe/io/tests/test_hdf5eventsource.py
+++ b/ctapipe/io/tests/test_hdf5eventsource.py
@@ -136,4 +136,18 @@ def test_read_r1(r1_hdf5_file):
pass
assert e is not None
- assert e.count == 4
+ assert e.count == 3
+
+
+def test_trigger_allowed_tels(dl1_proton_file):
+ with HDF5EventSource(
+ input_url=dl1_proton_file, allowed_tels={1, 2, 3, 4, 5, 10}
+ ) as s:
+ print()
+ i = 0
+ for i, e in enumerate(s):
+ assert e.count == i
+ assert set(e.trigger.tels_with_trigger) == e.trigger.tel.keys()
+ assert len(e.trigger.tels_with_trigger) > 1
+
+ assert i == 1
diff --git a/ctapipe/io/tests/test_prod2.py b/ctapipe/io/tests/test_prod2.py
index 200d2406014..98be8be0eb5 100644
--- a/ctapipe/io/tests/test_prod2.py
+++ b/ctapipe/io/tests/test_prod2.py
@@ -9,7 +9,10 @@
def test_eventio_prod2():
with pytest.warns(UnknownPixelShapeWarning):
- with SimTelEventSource(input_url=dataset) as reader:
+ with SimTelEventSource(
+ input_url=dataset,
+ focal_length_choice='nominal',
+ ) as reader:
for event in reader:
if event.count == 2:
break
diff --git a/ctapipe/io/tests/test_simteleventsource.py b/ctapipe/io/tests/test_simteleventsource.py
index d94a09c61b4..77eba9e8d12 100644
--- a/ctapipe/io/tests/test_simteleventsource.py
+++ b/ctapipe/io/tests/test_simteleventsource.py
@@ -2,8 +2,8 @@
from ctapipe.instrument.camera.geometry import UnknownPixelShapeWarning
import numpy as np
-from astropy.utils.data import download_file
import astropy.units as u
+from astropy.coordinates import Angle
from itertools import zip_longest
import pytest
from astropy.time import Time
@@ -20,18 +20,21 @@
gamma_test_large_path = get_dataset_path("gamma_test_large.simtel.gz")
gamma_test_path = get_dataset_path("gamma_test.simtel.gz")
calib_events_path = get_dataset_path("lst_prod3_calibration_and_mcphotons.simtel.zst")
+prod5b_path = get_dataset_path("gamma_20deg_0deg_run2___cta-prod5-paranal_desert-2147m-Paranal-dark_cone10-100evts.simtel.zst")
def test_positional_input():
- source = SimTelEventSource(gamma_test_large_path)
- assert source.input_url == Path(gamma_test_large_path)
+ source = SimTelEventSource(prod5b_path)
+ assert source.input_url == Path(prod5b_path)
def test_simtel_event_source_on_gamma_test_one_event():
+ assert SimTelEventSource.is_compatible(gamma_test_large_path)
+
with SimTelEventSource(
- input_url=gamma_test_large_path, back_seekable=True
+ input_url=gamma_test_large_path, back_seekable=True,
+ focal_length_choice='nominal',
) as reader:
- assert reader.is_compatible(gamma_test_large_path)
assert not reader.is_stream
for event in reader:
@@ -47,7 +50,7 @@ def test_simtel_event_source_on_gamma_test_one_event():
def test_that_event_is_not_modified_after_loop():
- dataset = gamma_test_large_path
+ dataset = prod5b_path
with SimTelEventSource(input_url=dataset, max_events=2) as source:
for event in source:
last_event = copy.deepcopy(event)
@@ -61,12 +64,11 @@ def test_that_event_is_not_modified_after_loop():
def test_additional_meta_data_from_simulation_config():
- with SimTelEventSource(input_url=gamma_test_large_path) as reader:
- next(iter(reader))
-
- # for expectation values
- from astropy import units as u
- from astropy.coordinates import Angle
+ with SimTelEventSource(
+ input_url=gamma_test_large_path,
+ focal_length_choice='nominal',
+ ) as reader:
+ pass
# There should be only one observation
assert len(reader.obs_ids) == 1
@@ -104,7 +106,10 @@ def test_additional_meta_data_from_simulation_config():
def test_properties():
- source = SimTelEventSource(input_url=gamma_test_large_path)
+ source = SimTelEventSource(
+ input_url=gamma_test_large_path,
+ focal_length_choice='nominal',
+ )
assert source.is_simulation
assert source.datalevels == (DataLevel.R0, DataLevel.R1)
@@ -112,11 +117,14 @@ def test_properties():
assert source.simulation_config[7514].corsika_version == 6990
-def test_gamma_file():
+def test_gamma_file_prod2():
dataset = gamma_test_path
with pytest.warns(UnknownPixelShapeWarning):
- with SimTelEventSource(input_url=dataset) as reader:
+ with SimTelEventSource(
+ input_url=dataset,
+ focal_length_choice='nominal',
+ ) as reader:
assert reader.is_compatible(dataset)
assert reader.is_stream # using gzip subprocess makes it a stream
@@ -134,7 +142,9 @@ def test_gamma_file():
def test_max_events():
max_events = 5
with SimTelEventSource(
- input_url=gamma_test_large_path, max_events=max_events
+ input_url=gamma_test_large_path,
+ max_events=max_events,
+ focal_length_choice='nominal',
) as reader:
count = 0
for _ in reader:
@@ -143,7 +153,11 @@ def test_max_events():
def test_pointing():
- with SimTelEventSource(input_url=gamma_test_large_path, max_events=3) as reader:
+ with SimTelEventSource(
+ input_url=gamma_test_large_path,
+ max_events=3,
+ focal_length_choice='nominal',
+ ) as reader:
for e in reader:
assert np.isclose(e.pointing.array_altitude.to_value(u.deg), 70)
assert np.isclose(e.pointing.array_azimuth.to_value(u.deg), 0)
@@ -160,7 +174,8 @@ def test_allowed_telescopes():
# test that the allowed_tels mask works:
allowed_tels = {3, 4}
with SimTelEventSource(
- input_url=gamma_test_large_path, allowed_tels=allowed_tels, max_events=5
+ input_url=gamma_test_large_path, allowed_tels=allowed_tels, max_events=5,
+ focal_length_choice='nominal',
) as reader:
assert not allowed_tels.symmetric_difference(reader.subarray.tel_ids)
for event in reader:
@@ -188,7 +203,8 @@ def test_calibration_events():
EventType.SUBARRAY,
]
with SimTelEventSource(
- input_url=calib_events_path, skip_calibration_events=False
+ input_url=calib_events_path, skip_calibration_events=False,
+ focal_length_choice='nominal',
) as reader:
for event, expected_type in zip_longest(reader, expected_types):
@@ -197,7 +213,10 @@ def test_calibration_events():
def test_trigger_times():
- source = SimTelEventSource(input_url=calib_events_path)
+ source = SimTelEventSource(
+ input_url=calib_events_path,
+ focal_length_choice='nominal',
+ )
t0 = Time("2020-05-06T15:30:00")
t1 = Time("2020-05-06T15:40:00")
@@ -209,7 +228,10 @@ def test_trigger_times():
def test_true_image():
- with SimTelEventSource(input_url=calib_events_path) as reader:
+ with SimTelEventSource(
+ input_url=calib_events_path,
+ focal_length_choice='nominal',
+ ) as reader:
for event in reader:
for tel in event.simulation.tel.values():
@@ -218,7 +240,10 @@ def test_true_image():
def test_instrument():
"""Test if same telescope types share a single instance of CameraGeometry"""
- source = SimTelEventSource(input_url=gamma_test_large_path)
+ source = SimTelEventSource(
+ input_url=gamma_test_large_path,
+ focal_length_choice='nominal',
+ )
subarray = source.subarray
assert subarray.tel[1].optics.num_mirrors == 1
@@ -278,39 +303,36 @@ def test_apply_simtel_r1_calibration_2_channel():
assert r1_waveforms[1, 0] == (r0_waveforms[0, 1, 0] - ped[0, 1]) * dc_to_pe[0, 1]
-def test_effective_focal_length():
- test_file_url = (
- "https://github.com/cta-observatory/pyeventio/raw/master/tests"
- "/resources/prod4_pixelsettings_v3.gz"
- )
- test_file = download_file(test_file_url)
+def test_focal_length_choice():
+ # this file does not contain the effective focal length
+ with pytest.raises(RuntimeError):
+ SimTelEventSource(gamma_test_large_path)
- focal_length_nominal = 0
- focal_length_effective = 0
+ with pytest.raises(RuntimeError):
+ SimTelEventSource(gamma_test_large_path, focal_length_choice='effective')
- with SimTelEventSource(
- input_url=test_file, focal_length_choice="nominal"
- ) as source:
- subarray = source.subarray
- focal_length_nominal = subarray.tel[1].optics.equivalent_focal_length
+ s = SimTelEventSource(gamma_test_large_path, focal_length_choice='nominal')
+ assert s.subarray.tel[1].optics.equivalent_focal_length == 28 * u.m
- with SimTelEventSource(
- input_url=test_file, focal_length_choice="effective"
- ) as source:
- subarray = source.subarray
- focal_length_effective = subarray.tel[1].optics.equivalent_focal_length
+ # this file does
+ s = SimTelEventSource(prod5b_path, focal_length_choice='effective')
+ assert u.isclose(s.subarray.tel[1].optics.equivalent_focal_length, 29.3 * u.m, atol=0.05 * u.m)
+ # check guessing of the name is not affected by focal length choice
+ assert str(s.subarray.tel[1]) == 'LST_LST_LSTCam'
+
+ s = SimTelEventSource(prod5b_path, focal_length_choice='nominal')
+ assert u.isclose(s.subarray.tel[1].optics.equivalent_focal_length, 28.0 * u.m, atol=0.05 * u.m)
+ # check guessing of the name is not affected by focal length choice
+ assert str(s.subarray.tel[1]) == 'LST_LST_LSTCam'
- assert focal_length_nominal > 0
- assert focal_length_effective > 0
- assert focal_length_nominal != focal_length_effective
def test_only_config():
config = Config()
- config.SimTelEventSource.input_url = gamma_test_large_path
+ config.SimTelEventSource.input_url = prod5b_path
s = SimTelEventSource(config=config)
- assert s.input_url == Path(gamma_test_large_path).absolute()
+ assert s.input_url == Path(prod5b_path).absolute()
def test_calibscale_and_calibshift(prod5_gamma_simtel_path):
@@ -355,11 +377,17 @@ def test_calibscale_and_calibshift(prod5_gamma_simtel_path):
def test_true_image_sum():
# this file does not contain true pe info
- with SimTelEventSource(gamma_test_large_path) as s:
+ with SimTelEventSource(
+ gamma_test_large_path,
+ focal_length_choice='nominal',
+ ) as s:
e = next(iter(s))
assert np.all(np.isnan(sim.true_image_sum) for sim in e.simulation.tel.values())
- with SimTelEventSource(calib_events_path) as s:
+ with SimTelEventSource(
+ calib_events_path,
+ focal_length_choice='nominal',
+ ) as s:
e = next(iter(s))
true_image_sums = {}
@@ -371,8 +399,10 @@ def test_true_image_sum():
# check it also works with allowed_tels, since the values
# are stored in a flat array in simtel
- with SimTelEventSource(calib_events_path, allowed_tels={2, 3}) as s:
+ with SimTelEventSource(
+ calib_events_path, allowed_tels={2, 3},
+ focal_length_choice='nominal',
+ ) as s:
e = next(iter(s))
-
assert e.simulation.tel[2].true_image_sum == true_image_sums[2]
assert e.simulation.tel[3].true_image_sum == true_image_sums[3]
diff --git a/ctapipe/reco/tests/test_HillasReconstructor.py b/ctapipe/reco/tests/test_HillasReconstructor.py
index 7ee4f1ad0c5..0b594cab952 100644
--- a/ctapipe/reco/tests/test_HillasReconstructor.py
+++ b/ctapipe/reco/tests/test_HillasReconstructor.py
@@ -197,11 +197,11 @@ def test_reconstruction_against_simulation(subarray_and_event_gamma_off_axis_500
)
def test_CameraFrame_against_TelescopeFrame(filename):
- input_file = get_dataset_path(
- "gamma_divergent_LaPalma_baseline_20Zd_180Az_prod3_test.simtel.gz"
- )
+ input_file = get_dataset_path(filename)
+ # "gamma_divergent_LaPalma_baseline_20Zd_180Az_prod3_test.simtel.gz"
+ # )
- source = SimTelEventSource(input_file, max_events=10)
+ source = SimTelEventSource(input_file, max_events=10, focal_length_choice="nominal")
# too few events survive for this test with the defautl quality criteria,
# use less restrictive ones
diff --git a/ctapipe/reco/tests/test_reconstruction_methods.py b/ctapipe/reco/tests/test_reconstruction_methods.py
index 1989e3962e8..ec99c7488bd 100644
--- a/ctapipe/reco/tests/test_reconstruction_methods.py
+++ b/ctapipe/reco/tests/test_reconstruction_methods.py
@@ -30,7 +30,7 @@ def test_reconstructors(reconstructors):
"gamma_LaPalma_baseline_20Zd_180Az_prod3b_test.simtel.gz"
)
- source = EventSource(filename, max_events=10)
+ source = EventSource(filename, max_events=10, focal_length_choice="nominal")
subarray = source.subarray
calib = CameraCalibrator(source.subarray)
image_processor = ImageProcessor(source.subarray)
diff --git a/ctapipe/tools/merge.py b/ctapipe/tools/merge.py
index e1e64cb8c69..d458fa32e4c 100644
--- a/ctapipe/tools/merge.py
+++ b/ctapipe/tools/merge.py
@@ -257,10 +257,10 @@ def setup(self):
self.log.info("Merging simulated data")
self.is_simulation = True
- # do not try to merge optional nodes not present in first file
- for node in filter(lambda n: n not in h5file, optional_nodes):
- self.log.info(f"First file does not contain {node}, ignoring")
- self.usable_nodes.remove(node)
+ for node in optional_nodes:
+ if node in self.usable_nodes and node not in h5file:
+ self.log.info(f"First file does not contain {node}, ignoring")
+ self.usable_nodes.remove(node)
# create output file with subarray from first file
self.first_subarray = SubarrayDescription.from_hdf(self.input_files[0])
diff --git a/ctapipe/tools/tests/test_process.py b/ctapipe/tools/tests/test_process.py
index 394089063cf..c72e18e9d7f 100644
--- a/ctapipe/tools/tests/test_process.py
+++ b/ctapipe/tools/tests/test_process.py
@@ -210,6 +210,7 @@ def test_stage_2_from_simtel(tmp_path):
f"--output={output}",
"--max-events=5",
"--overwrite",
+ "--SimTelEventSource.focal_length_choice=nominal",
],
cwd=tmp_path,
)
@@ -285,6 +286,7 @@ def test_training_from_simtel(tmp_path):
f"--output={output}",
"--max-events=5",
"--overwrite",
+ "--SimTelEventSource.focal_length_choice=nominal",
],
cwd=tmp_path,
)
diff --git a/ctapipe/tools/tests/test_tools.py b/ctapipe/tools/tests/test_tools.py
index f906213b40d..ebf74a751a1 100644
--- a/ctapipe/tools/tests/test_tools.py
+++ b/ctapipe/tools/tests/test_tools.py
@@ -13,6 +13,7 @@
GAMMA_TEST_LARGE = get_dataset_path("gamma_test_large.simtel.gz")
LST_MUONS = get_dataset_path("lst_muons.simtel.zst")
+PROD5B_PATH = get_dataset_path("gamma_20deg_0deg_run2___cta-prod5-paranal_desert-2147m-Paranal-dark_cone10-100evts.simtel.zst")
def test_muon_reconstruction_simtel(tmp_path):
@@ -25,6 +26,7 @@ def test_muon_reconstruction_simtel(tmp_path):
argv=[
f"--input={LST_MUONS}",
f"--output={muon_simtel_output_file}",
+ "--SimTelEventSource.focal_length_choice=nominal",
"--overwrite",
],
cwd=tmp_path,
@@ -71,7 +73,12 @@ def test_display_dl1(tmp_path, dl1_image_file, dl1_parameters_file):
# test simtel
assert (
run_tool(
- DisplayDL1Calib(), argv=["--max-events=1", "--telescope=11"], cwd=tmp_path
+ DisplayDL1Calib(),
+ argv=[
+ "--max-events=1", "--telescope=11",
+ "--SimTelEventSource.focal_length_choice=nominal",
+ ],
+ cwd=tmp_path
)
== 0
)
@@ -125,7 +132,7 @@ def test_dump_triggers(tmp_path):
sys.argv = ["dump_triggers"]
outfile = tmp_path / "triggers.fits"
- tool = DumpTriggersTool(infile=GAMMA_TEST_LARGE, outfile=str(outfile))
+ tool = DumpTriggersTool(infile=PROD5B_PATH, outfile=str(outfile))
assert run_tool(tool, cwd=tmp_path) == 0
@@ -139,18 +146,18 @@ def test_dump_instrument(tmp_path):
sys.argv = ["dump_instrument"]
tool = DumpInstrumentTool()
- assert run_tool(tool, [f"--input={GAMMA_TEST_LARGE}"], cwd=tmp_path) == 0
+ assert run_tool(tool, [f"--input={PROD5B_PATH}"], cwd=tmp_path) == 0
assert (tmp_path / "FlashCam.camgeom.fits.gz").exists()
assert (
- run_tool(tool, [f"--input={GAMMA_TEST_LARGE}", "--format=ecsv"], cwd=tmp_path)
+ run_tool(tool, [f"--input={PROD5B_PATH}", "--format=ecsv"], cwd=tmp_path)
== 0
)
assert (tmp_path / "MonteCarloArray.optics.ecsv").exists()
assert (
- run_tool(tool, [f"--input={GAMMA_TEST_LARGE}", "--format=hdf5"], cwd=tmp_path)
+ run_tool(tool, [f"--input={PROD5B_PATH}", "--format=hdf5"], cwd=tmp_path)
== 0
)
assert (tmp_path / "subarray.h5").exists()
diff --git a/ctapipe/utils/datasets.py b/ctapipe/utils/datasets.py
index 42f187f2b82..71ba61485ed 100644
--- a/ctapipe/utils/datasets.py
+++ b/ctapipe/utils/datasets.py
@@ -28,7 +28,7 @@
__all__ = ["get_dataset_path", "find_in_path", "find_all_matching_datasets"]
-DEFAULT_URL = "http://cccta-dataserver.in2p3.fr/data/ctapipe-extra/v0.3.3/"
+DEFAULT_URL = "http://cccta-dataserver.in2p3.fr/data/ctapipe-extra/v0.3.4/"
def get_searchpath_dirs(searchpath=os.getenv("CTAPIPE_SVC_PATH"), url=DEFAULT_URL):
diff --git a/ctapipe/visualization/mpl_array.py b/ctapipe/visualization/mpl_array.py
index 5d895ec8d86..1c3b3e2c456 100644
--- a/ctapipe/visualization/mpl_array.py
+++ b/ctapipe/visualization/mpl_array.py
@@ -62,10 +62,12 @@ def __init__(
self.frame = frame
self.subarray = subarray
+ self.axes = axes or plt.gca()
# get the telescope positions. If a new frame is set, this will
# transform to the new frame.
- self.tel_coords = subarray.tel_coords.transform_to(frame)
+ self.tel_coords = subarray.tel_coords.transform_to(frame).cartesian
+ self.unit = self.tel_coords.x.unit
# set up colors per telescope type
tel_types = [str(tel) for tel in subarray.tels.values()]
@@ -76,6 +78,10 @@ def __init__(
for tel in subarray.tel.values()
]
+ self.radii = radius
+ else:
+ self.radii = np.ones(len(tel_types)) * radius
+
if title is None:
title = subarray.name
@@ -116,15 +122,22 @@ def __init__(
)
plt.legend(handles=legend_elements)
+ self.add_radial_grid()
+
+ # create the plot
self.tel_colors = tel_color
self.autoupdate = autoupdate
self.telescopes = PatchCollection(patches, match_original=True)
self.telescopes.set_linewidth(2.0)
- self.axes = axes or plt.gca()
self.axes.add_collection(self.telescopes)
self.axes.set_aspect(1.0)
self.axes.set_title(title)
+ xunit = self.tel_coords.x.unit.to_string("latex")
+ yunit = self.tel_coords.y.unit.to_string("latex")
+ xname, yname, _ = frame.get_representation_component_names().keys()
+ self.axes.set_xlabel(f"{xname} [{xunit}] $\\rightarrow$")
+ self.axes.set_ylabel(f"{yname} [{yunit}] $\\rightarrow$")
self._labels = []
self._quiver = None
self.axes.autoscale_view()
@@ -136,12 +149,50 @@ def values(self):
@values.setter
def values(self, values):
- """ set the telescope colors to display """
+ """set the telescope colors to display"""
self.telescopes.set_array(np.ma.masked_invalid(values))
self._update()
+ def add_radial_grid(self, spacing=100 * u.m):
+ """add some dotted rings for distance estimation. The number of rings
+ is estimated automatically from the spacing and the array footprint.
+
+ Parameters
+ ----------
+ spacing: Quantity
+ spacing between rings
+
+ """
+
+ n_circles = np.round(
+ (np.sqrt(self.subarray.footprint / np.pi) / spacing).to_value(""),
+ 0,
+ )
+ circle_radii = np.arange(1, n_circles + 2, 1) * spacing.to_value(self.unit)
+ circle_patches = PatchCollection(
+ [
+ Circle(
+ xy=(0, 0),
+ radius=r,
+ fill=False,
+ fc="none",
+ linestyle="dotted",
+ color="gray",
+ alpha=0.1,
+ lw=1,
+ )
+ for r in circle_radii
+ ],
+ color="#eeeeee",
+ ls="dotted",
+ fc="none",
+ lw=3,
+ )
+
+ self.axes.add_collection(circle_patches)
+
def set_vector_uv(self, uu, vv, c=None, **kwargs):
- """ sets the vector field U,V and color for all telescopes
+ """sets the vector field U,V and color for all telescopes
Parameters
----------
@@ -289,9 +340,17 @@ def set_line_hillas(self, hillas_dict, core_dict, range, **kwargs):
def add_labels(self):
px = self.tel_coords.x.to_value("m")
py = self.tel_coords.y.to_value("m")
- for tel, x, y in zip(self.subarray.tels, px, py):
+ for tel, x, y, r in zip(self.subarray.tels, px, py, self.radii):
name = str(tel)
- lab = self.axes.text(x, y, name, fontsize=8, clip_on=True)
+ lab = self.axes.text(
+ x,
+ y - r * 1.8,
+ name,
+ fontsize=8,
+ clip_on=True,
+ horizontalalignment="center",
+ verticalalignment="top",
+ )
self._labels.append(lab)
def remove_labels(self):
@@ -300,7 +359,7 @@ def remove_labels(self):
self._labels = []
def _update(self):
- """ signal a redraw if necessary """
+ """signal a redraw if necessary"""
if self.autoupdate:
plt.draw()
diff --git a/docs/examples/InstrumentDescription.ipynb b/docs/examples/InstrumentDescription.ipynb
index f70a901ce53..dbefaa51c48 100644
--- a/docs/examples/InstrumentDescription.ipynb
+++ b/docs/examples/InstrumentDescription.ipynb
@@ -21,8 +21,7 @@
"from ctapipe.io import EventSource\n",
"import numpy as np\n",
"\n",
- "#filename = get_dataset_path(\"gamma_test_large.simtel.gz\") # try this one as well\n",
- "filename = get_dataset_path(\"gamma_test_large.simtel.gz\") \n",
+ "filename = get_dataset_path(\"gamma_prod5.simtel.zst\") \n",
"\n",
"with EventSource(filename, max_events=1) as source:\n",
" subarray = source.subarray"
@@ -359,7 +358,7 @@
],
"metadata": {
"kernelspec": {
- "display_name": "Python 3",
+ "display_name": "Python 3 (ipykernel)",
"language": "python",
"name": "python3"
},
@@ -373,7 +372,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
- "version": "3.7.10"
+ "version": "3.8.13"
}
},
"nbformat": 4,
diff --git a/docs/examples/containers_with_enums_and_table_writer.ipynb b/docs/examples/containers_with_enums_and_table_writer.ipynb
deleted file mode 100644
index 5bd23e96eeb..00000000000
--- a/docs/examples/containers_with_enums_and_table_writer.ipynb
+++ /dev/null
@@ -1,212 +0,0 @@
-{
- "cells": [
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "# containers_with_enums_and_table_writer\n"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "## Create some example Containers"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "import enum\n",
- "from ctapipe.io import HDF5TableWriter\n",
- "from ctapipe.core import Container, Field\n",
- "from astropy import units as u\n",
- "import numpy as np"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "class WithEnum(Container):\n",
- " \n",
- " # this class could also be defined in global namespace \n",
- " # outside this container, but this looks a bit tidier.\n",
- " # both variants work however\n",
- " class EventType(enum.Enum):\n",
- " pedestal = 1\n",
- " physics = 2\n",
- " calibration = 3\n",
- " \n",
- " event_type = Field(\n",
- " EventType.calibration, \n",
- " f'type of event, one of: {list(EventType.__members__.keys())}'\n",
- " )"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "let's also make a dummy stream (generator) that will create a series of these containers"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "def create_stream(n_event):\n",
- " data = WithEnum()\n",
- " for i in range(n_event):\n",
- " data.event_type = WithEnum.EventType(i % 3 + 1)\n",
- " yield data"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "for data in create_stream(3):\n",
- " for key, val in data.items():\n",
- " print('{}: {}, type : {}'.format(key, val, type(val)))"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "## Writing the Data"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "with HDF5TableWriter('container.h5', group_name='data') as h5_table:\n",
- " for data in create_stream(10):\n",
- " h5_table.write('table', data)"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "!ls container.h5"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "## Reading the Data"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "import pandas as pd\n",
- "\n",
- "data = pd.read_hdf('container.h5', key='/data/table')\n",
- "data.head()"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "#### Reading with PyTables"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "import tables\n",
- "h5 = tables.open_file('container.h5')\n",
- "table = h5.root['data']['table']\n",
- "table"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "table.attrs"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "from ctapipe.io import HDF5TableReader\n",
- "\n",
- "def read(mode):\n",
- " \n",
- " print('reading mode {}'.format(mode))\n",
- "\n",
- " with HDF5TableReader('container.h5', mode=mode) as h5_table:\n",
- "\n",
- " for group_name in ['data/']:\n",
- "\n",
- " group_name = '/{}table'.format(group_name)\n",
- " print(group_name)\n",
- "\n",
- " for data in h5_table.read(group_name, WithEnum()):\n",
- "\n",
- " print(data.as_dict())"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "read('r')"
- ]
- }
- ],
- "metadata": {
- "kernelspec": {
- "display_name": "Python 3",
- "language": "python",
- "name": "python3"
- },
- "language_info": {
- "codemirror_mode": {
- "name": "ipython",
- "version": 3
- },
- "file_extension": ".py",
- "mimetype": "text/x-python",
- "name": "python",
- "nbconvert_exporter": "python",
- "pygments_lexer": "ipython3",
- "version": "3.7.2"
- }
- },
- "nbformat": 4,
- "nbformat_minor": 2
-}
diff --git a/docs/examples/index.rst b/docs/examples/index.rst
index 910d5e11e12..978d437425d 100644
--- a/docs/examples/index.rst
+++ b/docs/examples/index.rst
@@ -25,4 +25,3 @@ the Tutorials section for more complete examples)
Tools
provenance
table_writer_reader
- containers_with_enums_and_table_writer
diff --git a/docs/examples/table_writer_reader.ipynb b/docs/examples/table_writer_reader.ipynb
index aa279843bbb..dfc97778ddd 100644
--- a/docs/examples/table_writer_reader.ipynb
+++ b/docs/examples/table_writer_reader.ipynb
@@ -78,7 +78,7 @@
" data.a_bool = (i % 2) == 0\n",
" data.a_np_int = np.int64(i)\n",
" data.a_np_float = np.float64(i)\n",
- " data.a_np_bool = np.bool((i % 2) == 0)\n",
+ " data.a_np_bool = np.bool_((i % 2) == 0)\n",
" \n",
" yield data"
]
@@ -107,103 +107,7 @@
"cell_type": "markdown",
"metadata": {},
"source": [
- "### How not to do it:"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "h5_table = HDF5TableWriter('container.h5', group_name='data')\n",
- "\n",
- "for data in create_stream(10):\n",
- " \n",
- " h5_table.write('table', data)\n",
- "\n",
- "h5_table.close()"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "In that case the file is not garenteed to close properly for instance if one does a mistake in the for loop. Let's just add a stupid mistake and see what happens."
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "try:\n",
- " h5_table = HDF5TableWriter('container.h5', group_name='data')\n",
- "\n",
- " for data in create_stream(10):\n",
- "\n",
- " h5_table.write('table', data)\n",
- " 0/0 # cause an error\n",
- " \n",
- " h5_table.close()\n",
- "except Exception as err:\n",
- " print(\"FAILED!\", err)"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "Now the file did not close properly. So let's try to correct the mistake and execute the code again."
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "try:\n",
- " h5_table = HDF5TableWriter('container.h5', group_name='data')\n",
- "\n",
- " for data in create_stream(10):\n",
- "\n",
- " h5_table.write('table', data)\n",
- " 0/0 # cause an error\n",
- " h5_table.close()\n",
- "except Exception as err:\n",
- " print(\"FAILED!\", err)"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "Ah it seems that the file did not close! Now I am stuck. Maybe I should restart the kernel? ahh no I don't want to loose everything. Can I just close it ?"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "h5_table.close()"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "It worked!"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "### Better to use context management!"
+ "Always use context managers with IO classes, as they will make sure the underlying resources are properly closed in case of errors:"
]
},
{
@@ -221,7 +125,9 @@
" 0/0\n",
"except Exception as err:\n",
" print(\"FAILED:\", err)\n",
- "print('Done')"
+ "print('Done')\n",
+ "\n",
+ "h5_table.h5file.isopen == False"
]
},
{
@@ -356,6 +262,38 @@
"For other TableWriter implementations, others may be possible (depending on format)\n"
]
},
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "#### Reading using `ctapipe.io.read_table`\n",
+ "\n",
+ "This is the preferred method, it returns an astropy `Table` and supports keeping track of\n",
+ "units, metadata and transformations."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "from ctapipe.io import read_table\n",
+ "\n",
+ "\n",
+ "table = read_table('container.h5', '/data_0/table')\n",
+ "table[:5]"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "table.meta"
+ ]
+ },
{
"cell_type": "markdown",
"metadata": {},
@@ -447,7 +385,7 @@
" group_name = '/{}table'.format(group_name)\n",
" print(group_name)\n",
"\n",
- " for data in h5_table.read(group_name, VariousTypesContainer()):\n",
+ " for data in h5_table.read(group_name, VariousTypesContainer):\n",
"\n",
" print(data.as_dict())"
]
@@ -498,7 +436,7 @@
],
"metadata": {
"kernelspec": {
- "display_name": "Python 3",
+ "display_name": "Python 3 (ipykernel)",
"language": "python",
"name": "python3"
},
@@ -512,7 +450,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
- "version": "3.8.8"
+ "version": "3.8.13"
}
},
"nbformat": 4,
diff --git a/docs/tutorials/calibrated_data_exploration.ipynb b/docs/tutorials/calibrated_data_exploration.ipynb
index d2c5bddf28d..725d29d84a1 100644
--- a/docs/tutorials/calibrated_data_exploration.ipynb
+++ b/docs/tutorials/calibrated_data_exploration.ipynb
@@ -48,7 +48,7 @@
"metadata": {},
"outputs": [],
"source": [
- "filename = get_dataset_path(\"gamma_test.simtel.gz\")\n",
+ "filename = get_dataset_path(\"gamma_prod5.simtel.zst\")\n",
"source = EventSource(filename, max_events=2)\n",
"\n",
"for event in source:\n",
@@ -85,7 +85,9 @@
{
"cell_type": "code",
"execution_count": null,
- "metadata": {},
+ "metadata": {
+ "scrolled": false
+ },
"outputs": [],
"source": [
"print(event.r1)"
@@ -351,11 +353,18 @@
"ad.values = hit_pattern\n",
"ad.add_labels()\n"
]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": []
}
],
"metadata": {
"kernelspec": {
- "display_name": "Python 3",
+ "display_name": "Python 3 (ipykernel)",
"language": "python",
"name": "python3"
},
@@ -369,7 +378,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
- "version": "3.7.10"
+ "version": "3.8.13"
}
},
"nbformat": 4,
diff --git a/docs/tutorials/coordinates_example.ipynb b/docs/tutorials/coordinates_example.ipynb
index 6f6d2d47042..429efc7ed15 100644
--- a/docs/tutorials/coordinates_example.ipynb
+++ b/docs/tutorials/coordinates_example.ipynb
@@ -90,11 +90,11 @@
},
"outputs": [],
"source": [
- "filename = get_dataset_path(\"gamma_test_large.simtel.gz\")\n",
- "source = EventSource(filename, max_events=4)\n",
+ "filename = get_dataset_path(\"gamma_prod5.simtel.zst\")\n",
+ "source = EventSource(filename)\n",
"\n",
"events = [copy.deepcopy(event) for event in source]\n",
- "event = events[3]\n",
+ "event = events[4]\n",
"\n",
"layout = set(source.subarray.tel_ids) "
]
@@ -123,8 +123,8 @@
},
"outputs": [],
"source": [
- "print(f'Telescope with data: {event.r0.tel.keys()}')\n",
- "tel_id = 2"
+ "print(f'Telescope with data: {event.r1.tel.keys()}')\n",
+ "tel_id = 3"
]
},
{
@@ -620,7 +620,7 @@
],
"metadata": {
"kernelspec": {
- "display_name": "Python 3",
+ "display_name": "Python 3 (ipykernel)",
"language": "python",
"name": "python3"
},
@@ -634,7 +634,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
- "version": "3.7.10"
+ "version": "3.8.13"
}
},
"nbformat": 4,
diff --git a/docs/tutorials/ctapipe_handson.ipynb b/docs/tutorials/ctapipe_handson.ipynb
index d9ce8fd3760..68115dfd9f5 100644
--- a/docs/tutorials/ctapipe_handson.ipynb
+++ b/docs/tutorials/ctapipe_handson.ipynb
@@ -35,7 +35,7 @@
"metadata": {},
"outputs": [],
"source": [
- "path = utils.get_dataset_path(\"gamma_test_large.simtel.gz\")"
+ "path = utils.get_dataset_path(\"gamma_prod5.simtel.zst\")"
]
},
{
@@ -44,7 +44,7 @@
"metadata": {},
"outputs": [],
"source": [
- "source = EventSource(path, max_events=4)\n",
+ "source = EventSource(path, max_events=5)\n",
"\n",
"for event in source:\n",
" print(event.count, event.index.event_id, event.simulation.shower.energy)"
@@ -65,7 +65,7 @@
"metadata": {},
"outputs": [],
"source": [
- "event.r0"
+ "event.r1"
]
},
{
@@ -74,8 +74,8 @@
"metadata": {},
"outputs": [],
"source": [
- "for event in EventSource(path, max_events=4):\n",
- " print(event.count, event.r0.tel.keys())"
+ "for event in EventSource(path, max_events=5):\n",
+ " print(event.count, event.r1.tel.keys())"
]
},
{
@@ -84,7 +84,7 @@
"metadata": {},
"outputs": [],
"source": [
- "event.r0.tel[2]"
+ "event.r0.tel[3]"
]
},
{
@@ -366,7 +366,7 @@
"metadata": {},
"outputs": [],
"source": [
- "for event in EventSource(path, max_events=4):\n",
+ "for event in EventSource(path, max_events=5):\n",
" calib(event) # fills in r1, dl0, and dl1\n",
" print(event.dl1.tel.keys())"
]
@@ -480,8 +480,8 @@
"disp = CameraDisplay(tel.camera.geometry, image=cleaned)\n",
"disp.cmap = plt.cm.coolwarm\n",
"disp.add_colorbar()\n",
- "plt.xlim(-1.0,0)\n",
- "plt.ylim(0,1.0)"
+ "plt.xlim(0.5, 1.0)\n",
+ "plt.ylim(-1.0, 0.0)"
]
},
{
@@ -503,8 +503,8 @@
"disp = CameraDisplay(tel.camera.geometry, image=cleaned)\n",
"disp.cmap = plt.cm.coolwarm\n",
"disp.add_colorbar()\n",
- "plt.xlim(-1.0,0)\n",
- "plt.ylim(0,1.0)\n",
+ "plt.xlim(0.5, 1.0)\n",
+ "plt.ylim(-1.0, 0.0)\n",
"disp.overlay_moments(params, color='white', lw=2)"
]
},
@@ -557,8 +557,8 @@
"metadata": {},
"outputs": [],
"source": [
- "data = utils.get_dataset_path(\"gamma_test_large.simtel.gz\") \n",
- "source = EventSource(data, allowed_tels=[1,2,3,4], max_events=10) # remove the max_events limit to get more stats"
+ "data = utils.get_dataset_path(\"gamma_prod5.simtel.zst\") \n",
+ "source = EventSource(data) # remove the max_events limit to get more stats"
]
},
{
@@ -573,7 +573,8 @@
" for tel_id, tel_data in event.dl1.tel.items():\n",
" tel = source.subarray.tel[tel_id]\n",
" mask = tailcuts_clean(tel.camera.geometry, tel_data.image)\n",
- " params = hillas_parameters(tel.camera.geometry[mask], tel_data.image[mask])"
+ " if np.count_nonzero(mask) > 0:\n",
+ " params = hillas_parameters(tel.camera.geometry[mask], tel_data.image[mask])"
]
},
{
@@ -645,20 +646,13 @@
"cell_type": "markdown",
"metadata": {},
"source": [
- "If you do this yourself, loop over more events to get better statistics"
+ "If you do this yourself, chose a larger file to loop over more events to get better statistics"
]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": []
}
],
"metadata": {
"kernelspec": {
- "display_name": "Python 3",
+ "display_name": "Python 3 (ipykernel)",
"language": "python",
"name": "python3"
},
@@ -672,7 +666,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
- "version": "3.7.8"
+ "version": "3.8.13"
}
},
"nbformat": 4,
diff --git a/docs/tutorials/ctapipe_overview.ipynb b/docs/tutorials/ctapipe_overview.ipynb
index 214d384df18..268e7dd69cd 100644
--- a/docs/tutorials/ctapipe_overview.ipynb
+++ b/docs/tutorials/ctapipe_overview.ipynb
@@ -1,1113 +1,1161 @@
{
- "cells": [
- {
- "cell_type": "markdown",
- "metadata": {
- "pycharm": {}
- },
- "source": ["# Analyzing Events Using ctapipe"]
- },
- {
- "cell_type": "markdown",
- "metadata": {
- "pycharm": {},
- "slideshow": {
- "slide_type": "slide"
- }
- },
- "source": [
- "\n",
- "\n",
- "
\n",
- "\n",
- "\n",
- "
Initially presented @ LST Analysis Bootcamp
\n",
- "\n",
- "
Padova, 26.11.2018
\n",
- "\n",
- "
Maximilian Nöthe (@maxnoe) & Kai A. Brügge (@mackaiver)
\n",
- "\n",
- "
"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {
- "pycharm": {},
- "slideshow": {
- "slide_type": "skip"
- }
- },
- "outputs": [],
- "source": [
- "import matplotlib.pyplot as plt\n",
- "import numpy as np\n",
- "\n",
- "%matplotlib inline"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {
- "pycharm": {},
- "slideshow": {
- "slide_type": "skip"
- }
- },
- "outputs": [],
- "source": [
- "plt.rcParams['figure.figsize'] = (12, 8)\n",
- "plt.rcParams['font.size'] = 14\n",
- "plt.rcParams['figure.figsize']"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {
- "pycharm": {},
- "slideshow": {
- "slide_type": "slide"
- }
- },
- "source": [
- "Table of Contents
\n",
- ""
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {
- "pycharm": {},
- "slideshow": {
- "slide_type": "slide"
- }
- },
- "source": ["## General Information"]
- },
- {
- "cell_type": "markdown",
- "metadata": {
- "pycharm": {},
- "slideshow": {
- "slide_type": "slide"
- }
- },
- "source": [
- "### Design\n",
- "\n",
- "* DL0 → DL3 analysis\n",
- "\n",
- "* Currently some R0 → DL2 code to be able to analyze simtel files\n",
- "\n",
- "* ctapipe is built upon the Scientific Python Stack, core dependencies are\n",
- " * numpy\n",
- " * scipy\n",
- " * astropy\n",
- " * numba"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {
- "pycharm": {},
- "slideshow": {
- "slide_type": "slide"
- }
- },
- "source": [
- "### Developement\n",
- "\n",
- "* ctapipe is developed as Open Source Software (Currently under MIT License) at \n",
- "\n",
- "* We use the \"Github-Workflow\": \n",
- " * Few people (e.g. @kosack, @maxnoe) have write access to the main repository\n",
- " * Contributors fork the main repository and work on branches\n",
- " * Pull Requests are merged after Code Review and automatic execution of the test suite\n",
- "\n",
- "* Early developement stage ⇒ backwards-incompatible API changes might and will happen \n"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {
- "pycharm": {},
- "slideshow": {
- "slide_type": "slide"
- }
- },
- "source": [
- "### What's there?\n",
- "\n",
- "* Reading simtel simulation files\n",
- "* Simple calibration, cleaning and feature extraction functions\n",
- "* Camera and Array plotting\n",
- "* Coordinate frames and transformations \n",
- "* Stereo-reconstruction using line intersections\n",
- " \n",
- " "
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {
- "pycharm": {},
- "slideshow": {
- "slide_type": "slide"
- }
- },
- "source": [
- "### What's still missing?\n",
- "\n",
- "* Good integration with machine learning techniques\n",
- "* IRF calculation \n",
- "* Documentation, e.g. formal definitions of coordinate frames "
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {
- "pycharm": {},
- "slideshow": {
- "slide_type": "slide"
- }
- },
- "source": [
- "### What can you do?\n",
- "\n",
- "* Report issues\n",
- " * Hard to get started? Tell us where you are stuck\n",
- " * Tell user stories\n",
- " * Missing features\n",
- "\n",
- "* Start contributing\n",
- " * ctapipe needs more workpower\n",
- " * Implement new reconstruction features"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {
- "pycharm": {},
- "slideshow": {
- "slide_type": "slide"
- }
- },
- "source": ["## A simple hillas analysis"]
- },
- {
- "cell_type": "markdown",
- "metadata": {
- "pycharm": {}
- },
- "source": ["### Reading in simtel files"]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {
- "pycharm": {}
- },
- "outputs": [],
- "source": [
- "from ctapipe.io import EventSource\n",
- "from ctapipe.utils.datasets import get_dataset_path\n",
- "\n",
- "input_url = get_dataset_path('gamma_test_large.simtel.gz')\n",
- "\n",
- "# EventSource() automatically detects what kind of file we are giving it,\n",
- "# if already supported by ctapipe\n",
- "source = EventSource(input_url, max_events=49)\n",
- "\n",
- "print(type(source))"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {
- "pycharm": {}
- },
- "outputs": [],
- "source": [
- "for event in source:\n",
- " print('Id: {}, E = {:1.3f}, Telescopes: {}'.format(event.count, event.simulation.shower.energy, len(event.r0.tel)))"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {
- "pycharm": {}
- },
- "source": [
- "Each event is a `DataContainer` holding several `Field`s of data, which can be containers or just numbers.\n",
- "Let's look a one event:"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {
- "pycharm": {}
- },
- "outputs": [],
- "source": ["event"]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {
- "pycharm": {}
- },
- "outputs": [],
- "source": ["source.subarray.camera_types"]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {
- "pycharm": {}
- },
- "outputs": [],
- "source": ["len(event.r0.tel), len(event.r1.tel)"]
- },
- {
- "cell_type": "markdown",
- "metadata": {
- "pycharm": {}
- },
- "source": [
- "### Data calibration\n",
- "\n",
- "The `CameraCalibrator` calibrates the event (obtaining the `dl1` images)."
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {
- "pycharm": {}
- },
- "outputs": [],
- "source": [
- "from ctapipe.calib import CameraCalibrator\n",
- "\n",
- "calibrator = CameraCalibrator(subarray=source.subarray)"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {
- "pycharm": {}
- },
- "outputs": [],
- "source": ["calibrator(event)"]
- },
- {
- "cell_type": "markdown",
- "metadata": {
- "pycharm": {}
- },
- "source": [
- "### Event displays\n",
- "\n",
- "Let's use ctapipe's plotting facilities to plot the telescope images"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {
- "pycharm": {}
- },
- "outputs": [],
- "source": ["event.dl1.tel.keys()"]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {
- "pycharm": {}
- },
- "outputs": [],
- "source": ["tel_id = 4"]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {
- "pycharm": {}
- },
- "outputs": [],
- "source": [
- "geometry = source.subarray.tel[tel_id].camera.geometry\n",
- "dl1 = event.dl1.tel[tel_id]\n",
- "\n",
- "geometry, dl1"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {
- "pycharm": {}
- },
- "outputs": [],
- "source": ["dl1.image"]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {
- "pycharm": {}
- },
- "outputs": [],
- "source": [
- "from ctapipe.visualization import CameraDisplay\n",
- "\n",
- "display = CameraDisplay(geometry)\n",
- "\n",
- "# right now, there might be one image per gain channel.\n",
- "# This will change as soon as \n",
- "display.image = dl1.image\n",
- "display.add_colorbar()"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {
- "pycharm": {}
- },
- "source": ["### Image Cleaning"]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {
- "pycharm": {}
- },
- "outputs": [],
- "source": ["from ctapipe.image.cleaning import tailcuts_clean"]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {
- "pycharm": {}
- },
- "outputs": [],
- "source": [
- "# unoptimized cleaning levels, copied from \n",
- "# https://github.com/tudo-astroparticlephysics/cta_preprocessing\n",
- "cleaning_level = {\n",
- " 'ASTRICam': (5, 7, 2), # (5, 10)?\n",
- " 'LSTCam': (3.5, 7.5, 2), # ?? (3, 6) for Abelardo...\n",
- " 'FlashCam': (4, 8, 2), # there is some scaling missing?\n",
- "}"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {
- "pycharm": {}
- },
- "outputs": [],
- "source": [
- "boundary, picture, min_neighbors = cleaning_level[geometry.camera_name]\n",
- "\n",
- "clean = tailcuts_clean(\n",
- " geometry, \n",
- " dl1.image,\n",
- " boundary_thresh=boundary,\n",
- " picture_thresh=picture,\n",
- " min_number_picture_neighbors=min_neighbors\n",
- ")"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {
- "pycharm": {}
- },
- "outputs": [],
- "source": [
- "fig, (ax1, ax2) = plt.subplots(1, 2, figsize=(15, 5))\n",
- "\n",
- "d1 = CameraDisplay(geometry, ax=ax1)\n",
- "d2 = CameraDisplay(geometry, ax=ax2)\n",
- "\n",
- "ax1.set_title('Image')\n",
- "d1.image = dl1.image\n",
- "d1.add_colorbar(ax=ax1)\n",
- "\n",
- "ax2.set_title('Pulse Time')\n",
- "d2.image = dl1.peak_time - np.average(dl1.peak_time, weights=dl1.image)\n",
- "d2.cmap = 'RdBu_r'\n",
- "d2.add_colorbar(ax=ax2)\n",
- "d2.set_limits_minmax(-20,20)\n",
- "\n",
- "d1.highlight_pixels(clean, color='red', linewidth=1)"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {
- "pycharm": {}
- },
- "source": ["### Image Parameters"]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {
- "pycharm": {}
- },
- "outputs": [],
- "source": [
- "from ctapipe.image import hillas_parameters, leakage_parameters, concentration_parameters\n",
- "from ctapipe.image import timing_parameters\n",
- "from ctapipe.image import number_of_islands\n",
- "from ctapipe.image import camera_to_shower_coordinates"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {
- "pycharm": {}
- },
- "outputs": [],
- "source": [
- "hillas = hillas_parameters(geometry[clean], dl1.image[clean])\n",
- "\n",
- "print(hillas)"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {
- "pycharm": {}
- },
- "outputs": [],
- "source": [
- "display = CameraDisplay(geometry)\n",
- "\n",
- "# set \"unclean\" pixels to 0\n",
- "cleaned = dl1.image.copy()\n",
- "cleaned[~clean] = 0.0\n",
- "\n",
- "display.image = cleaned\n",
- "display.add_colorbar()\n",
- "\n",
- "display.overlay_moments(hillas, color='xkcd:red')"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {
- "pycharm": {}
- },
- "outputs": [],
- "source": [
- "timing = timing_parameters(\n",
- " geometry,\n",
- " dl1.image,\n",
- " dl1.peak_time,\n",
- " hillas,\n",
- " clean\n",
- ")\n",
- "\n",
- "print(timing)"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {
- "pycharm": {}
- },
- "outputs": [],
- "source": [
- "long, trans = camera_to_shower_coordinates(\n",
- " geometry.pix_x, geometry.pix_y,hillas.x, hillas.y, hillas.psi\n",
- ")\n",
- "\n",
- "plt.plot(long[clean], dl1.peak_time[clean], 'o')\n",
- "plt.plot(long[clean], timing.slope * long[clean] + timing.intercept)"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {
- "pycharm": {}
- },
- "outputs": [],
- "source": [
- "l = leakage_parameters(geometry, dl1.image, clean)\n",
- "print(l)"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {
- "pycharm": {}
- },
- "outputs": [],
- "source": [
- "disp = CameraDisplay(geometry)\n",
- "disp.image = dl1.image\n",
- "disp.highlight_pixels(geometry.get_border_pixel_mask(1), linewidth=2, color='xkcd:red')"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {
- "pycharm": {}
- },
- "outputs": [],
- "source": [
- "n_islands, island_id = number_of_islands(geometry, clean)\n",
- "\n",
- "print(n_islands)"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {
- "pycharm": {}
- },
- "outputs": [],
- "source": [
- "conc = concentration_parameters(geometry, dl1.image, hillas)\n",
- "print(conc)"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {
- "pycharm": {}
- },
- "source": [
- "### Putting it all together / Stereo reconstruction\n",
- "\n",
- "\n",
- "All these steps are now unified in several components configurable through the config system, mainly:\n",
- "\n",
- "* CameraCalibrator for DL0 → DL1 (Images)\n",
- "* ImageProcessor for DL1 (Images) → DL1 (Parameters)\n",
- "* ShowerProcessor for stereo reconstruction of the shower geometry\n",
- "* DataWriter for writing data into HDF5\n",
- "\n",
- "A command line tool doing these steps and writing out data in HDF5 format is available as `ctapipe-process`"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {
- "pycharm": {},
- "scrolled": false
- },
- "outputs": [],
- "source": [
- "import astropy.units as u\n",
- "from astropy.coordinates import SkyCoord, AltAz\n",
- "\n",
- "from ctapipe.containers import ImageParametersContainer\n",
- "from ctapipe.io import EventSource\n",
- "from ctapipe.utils.datasets import get_dataset_path\n",
- "\n",
- "from ctapipe.calib import CameraCalibrator\n",
- "\n",
- "from ctapipe.image import ImageProcessor\n",
- "from ctapipe.reco import ShowerProcessor\n",
- "\n",
- "from ctapipe.io import DataWriter\n",
- "\n",
- "from copy import deepcopy\n",
- "import tempfile\n",
- "\n",
- "from traitlets.config import Config\n",
- "\n",
- "\n",
- "image_processor_config = Config({\n",
- " \"ImageProcessor\": {\n",
- " \"image_cleaner_type\": \"TailcutsImageCleaner\",\n",
- " \"TailcutsImageCleaner\": {\n",
- " \"picture_threshold_pe\": [\n",
- " (\"type\", \"LST_LST_LSTCam\", 7.5),\n",
- " (\"type\", \"MST_MST_FlashCam\", 8),\n",
- " (\"type\", \"SST_ASTRI_ASTRICam\", 7),\n",
- " ],\n",
- " \"boundary_threshold_pe\": [\n",
- " (\"type\", \"LST_LST_LSTCam\", 5),\n",
- " (\"type\", \"MST_MST_FlashCam\", 4),\n",
- " (\"type\", \"SST_ASTRI_ASTRICam\", 4),\n",
- " ]\n",
- " \n",
- " }\n",
- " }\n",
- "})\n",
- "\n",
- "input_url = get_dataset_path('gamma_test_large.simtel.gz')\n",
- "source = EventSource(input_url)\n",
- "\n",
- "calibrator = CameraCalibrator(subarray=source.subarray)\n",
- "image_processor = ImageProcessor(subarray=source.subarray, config=image_processor_config)\n",
- "shower_processor = ShowerProcessor(subarray=source.subarray)\n",
- "horizon_frame = AltAz()\n",
- "\n",
- "f = tempfile.NamedTemporaryFile(suffix='.hdf5')\n",
- "\n",
- "with DataWriter(source, output_path=f.name, overwrite=True, write_dl2=True) as writer:\n",
- " \n",
- " for event in source:\n",
- " energy = event.simulation.shower.energy\n",
- " n_telescopes_r1 = len(event.r1.tel)\n",
- " event_id = event.index.event_id\n",
- " print(f'Id: {event_id}, E = {energy:1.3f}, Telescopes (R1): {n_telescopes_r1}')\n",
- " \n",
- " calibrator(event)\n",
- " image_processor(event)\n",
- " shower_processor(event)\n",
- " \n",
- " stereo = event.dl2.stereo.geometry[\"HillasReconstructor\"]\n",
- " if stereo.is_valid:\n",
- " print(' Alt: {:.2f}°'.format(stereo.alt.deg))\n",
- " print(' Az: {:.2f}°'.format(stereo.az.deg))\n",
- " print(' Hmax: {:.0f}'.format(stereo.h_max))\n",
- " print(' CoreX: {:.1f}'.format(stereo.core_x))\n",
- " print(' CoreY: {:.1f}'.format(stereo.core_y))\n",
- " print(' Multiplicity: {:d}'.format(len(stereo.tel_ids)))\n",
- " \n",
- " # save a nice event for plotting later\n",
- " if event.count == 3:\n",
- " plotting_event = deepcopy(event)\n",
- " \n",
- " writer(event)"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {
- "pycharm": {}
- },
- "outputs": [],
- "source": [
- "from astropy.coordinates.angle_utilities import angular_separation\n",
- "import pandas as pd\n",
- "\n",
- "from ctapipe.io import TableLoader\n",
- "\n",
- "loader = TableLoader(f.name, load_dl2_geometry=True, load_simulated=True)\n",
- "\n",
- "events = loader.read_subarray_events()"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {
- "pycharm": {}
- },
- "outputs": [],
- "source": [
- "theta = angular_separation(\n",
- " events[\"HillasReconstructor_az\"].quantity, events[\"HillasReconstructor_alt\"].quantity,\n",
- " events[\"true_az\"].quantity, events[\"true_alt\"].quantity\n",
- ")\n",
- "\n",
- "plt.hist(theta.to_value(u.deg)**2, bins=25, range=[0, 0.3])\n",
- "plt.xlabel(r'$\\theta² / deg²$')\n",
- "None"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {
- "pycharm": {}
- },
- "source": ["## ArrayDisplay\n"]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {
- "pycharm": {}
- },
- "outputs": [],
- "source": [
- "from ctapipe.visualization import ArrayDisplay\n",
- "\n",
- "\n",
- "angle_offset = plotting_event.pointing.array_azimuth\n",
- "\n",
- "plotting_hillas = {\n",
- " tel_id: dl1.parameters.hillas\n",
- " for tel_id, dl1 in plotting_event.dl1.tel.items()\n",
- "}\n",
- "\n",
- "plotting_core = {\n",
- " tel_id: dl1.parameters.core.psi\n",
- " for tel_id, dl1 in plotting_event.dl1.tel.items()\n",
- "}\n",
- "\n",
- "\n",
- "disp = ArrayDisplay(source.subarray)\n",
- "\n",
- "disp.set_line_hillas(plotting_hillas, plotting_core, 500)\n",
- "\n",
- "plt.scatter(\n",
- " plotting_event.simulation.shower.core_x, plotting_event.simulation.shower.core_y,\n",
- " s=200, c='k', marker='x', label='True Impact',\n",
- ")\n",
- "plt.scatter(\n",
- " plotting_event.dl2.stereo.geometry[\"HillasReconstructor\"].core_x,\n",
- " plotting_event.dl2.stereo.geometry[\"HillasReconstructor\"].core_y,\n",
- " s=200, c='r', marker='x', label='Estimated Impact',\n",
- ")\n",
- "\n",
- "plt.legend()\n",
- "plt.xlim(-400, 400)\n",
- "plt.ylim(-400, 400)"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {
- "pycharm": {}
- },
- "source": ["### Reading the LST dl1 data\n"]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "loader = TableLoader(f.name, load_simulated=True, load_dl1_parameters=True)\n",
- "\n",
- "dl1_table = loader.read_telescope_events([\"LST_LST_LSTCam\"])"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {
- "pycharm": {}
- },
- "outputs": [],
- "source": [
- "plt.scatter(\n",
- " np.log10(dl1_table[\"true_energy\"].quantity / u.TeV),\n",
- " np.log10(dl1_table[\"hillas_intensity\"]),\n",
- ")\n",
- "plt.xlabel('log10(E / TeV)')\n",
- "plt.ylabel('log10(intensity)')\n",
- "None"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {
- "pycharm": {}
- },
- "source": [
- "## Isn't python slow?\n",
- "\n",
- "* Many of you might have heard: \"Python is slow\".\n",
- "* That's trueish.\n",
- "* All python objects are classes living on the heap, even integers.\n",
- "* Looping over lots of \"primitives\" is quite slow compared to other languages.\n",
- "\n",
- "⇒ Vectorize as much as possible using numpy \n",
- "⇒ Use existing interfaces to fast C / C++ / Fortran code \n",
- "⇒ Optimize using numba \n",
- "\n",
- "**But: \"Premature Optimization is the root of all evil\" — Donald Knuth**\n",
- "\n",
- "So profile to find exactly what is slow.\n",
- "\n",
- "### Why use python then?\n",
- "\n",
- "* Python works very well as *glue* for libraries of all kinds of languages\n",
- "* Python has a rich ecosystem for data science, physics, algorithms, astronomy\n",
- "\n",
- "### Example: Number of Islands\n",
- "\n",
- "Find all groups of pixels, that survived the cleaning"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {
- "pycharm": {}
- },
- "outputs": [],
- "source": [
- "from ctapipe.image import toymodel\n",
- "from ctapipe.instrument import CameraGeometry\n",
- "\n",
- "\n",
- "geometry = CameraGeometry.from_name('LSTCam')"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {
- "pycharm": {}
- },
- "source": ["Let's create a toy images with several islands;"]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {
- "pycharm": {}
- },
- "outputs": [],
- "source": [
- "np.random.seed(42)\n",
- "\n",
- "image = np.zeros(geometry.n_pixels)\n",
- "\n",
- "\n",
- "for i in range(9):\n",
- " \n",
- " model = toymodel.Gaussian(\n",
- " x=np.random.uniform(-0.8, 0.8) * u.m,\n",
- " y=np.random.uniform(-0.8, 0.8) * u.m,\n",
- " width=np.random.uniform(0.05, 0.075) * u.m,\n",
- " length=np.random.uniform(0.1, 0.15) * u.m,\n",
- " psi=np.random.uniform(0, 2 * np.pi) * u.rad,\n",
- " )\n",
- "\n",
- " new_image, sig, bg = model.generate_image(\n",
- " geometry, \n",
- " intensity=np.random.uniform(1000, 3000),\n",
- " nsb_level_pe=5\n",
- " )\n",
- " image += new_image"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {
- "pycharm": {}
- },
- "outputs": [],
- "source": [
- "clean = tailcuts_clean(geometry, image, picture_thresh=10, boundary_thresh=5, min_number_picture_neighbors=2)"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {
- "pycharm": {}
- },
- "outputs": [],
- "source": [
- "disp = CameraDisplay(geometry)\n",
- "disp.image = image\n",
- "disp.highlight_pixels(clean, color='xkcd:red', linewidth=1.5)\n",
- "disp.add_colorbar()"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {
- "pycharm": {}
- },
- "outputs": [],
- "source": [
- "def num_islands_python(camera, clean):\n",
- " ''' A breadth first search to find connected islands of neighboring pixels in the cleaning set'''\n",
- " \n",
- " # the camera geometry has a [n_pixel, n_pixel] boolean array\n",
- " # that is True where two pixels are neighbors\n",
- " neighbors = camera.neighbor_matrix\n",
- " \n",
- " island_ids = np.zeros(camera.n_pixels)\n",
- " current_island = 0\n",
- " \n",
- " # a set to remember which pixels we already visited\n",
- " visited = set()\n",
- "\n",
- " # go only through the pixels, that survived cleaning\n",
- " for pix_id in np.where(clean)[0]:\n",
- " if pix_id not in visited:\n",
- " # remember that we already checked this pixel\n",
- " visited.add(pix_id)\n",
- " \n",
- " # if we land in the outer loop again, we found a new island\n",
- " current_island += 1\n",
- " island_ids[pix_id] = current_island\n",
- " \n",
- " # now check all neighbors of the current pixel recursively\n",
- " to_check = set(np.where(neighbors[pix_id] & clean)[0])\n",
- " while to_check:\n",
- " pix_id = to_check.pop()\n",
- " \n",
- " if pix_id not in visited: \n",
- " visited.add(pix_id)\n",
- " island_ids[pix_id] = current_island\n",
- " \n",
- " to_check.update(np.where(neighbors[pix_id] & clean)[0])\n",
- " \n",
- " n_islands = current_island\n",
- " return n_islands, island_ids"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {
- "pycharm": {}
- },
- "outputs": [],
- "source": ["n_islands, island_ids = num_islands_python(geometry, clean)"]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {
- "pycharm": {}
- },
- "outputs": [],
- "source": [
- "from matplotlib.colors import ListedColormap\n",
- "\n",
- "cmap = plt.get_cmap('Paired')\n",
- "cmap = ListedColormap(cmap.colors[:n_islands])\n",
- "cmap.set_under('k')\n",
- "\n",
- "disp = CameraDisplay(geometry)\n",
- "disp.image = island_ids\n",
- "disp.cmap = cmap\n",
- "disp.set_limits_minmax(0.5, n_islands + 0.5)\n",
- "disp.add_colorbar()"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {
- "pycharm": {}
- },
- "outputs": [],
- "source": ["%timeit num_islands_python(geometry, clean)"]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {
- "pycharm": {}
- },
- "outputs": [],
- "source": [
- "from scipy.sparse.csgraph import connected_components\n",
- "\n",
- "def num_islands_scipy(geometry, clean):\n",
- " neighbors = geometry.neighbor_matrix_sparse\n",
- " \n",
- " clean_neighbors = neighbors[clean][:, clean]\n",
- " num_islands, labels = connected_components(clean_neighbors, directed=False)\n",
- " \n",
- " island_ids = np.zeros(geometry.n_pixels)\n",
- " island_ids[clean] = labels + 1\n",
- " \n",
- " return num_islands, island_ids"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {
- "pycharm": {}
- },
- "outputs": [],
- "source": [
- "n_islands_s, island_ids_s = num_islands_scipy(geometry, clean)"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {
- "pycharm": {}
- },
- "outputs": [],
- "source": [
- "disp = CameraDisplay(geometry)\n",
- "disp.image = island_ids_s\n",
- "disp.cmap = cmap\n",
- "disp.set_limits_minmax(0.5, n_islands_s + 0.5)\n",
- "disp.add_colorbar()"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {
- "pycharm": {}
- },
- "outputs": [],
- "source": ["%timeit num_islands_scipy(geometry, clean)"]
- },
- {
- "cell_type": "markdown",
- "metadata": {
- "pycharm": {}
- },
- "source": ["**A lot less code, and a factor 3 speed improvement**"]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": ["Finally, current ctapipe implementation is using numba:"]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": ["%timeit number_of_islands(geometry, clean)"]
+ "cells": [
+ {
+ "cell_type": "markdown",
+ "metadata": {
+ "pycharm": {}
+ },
+ "source": [
+ "# Analyzing Events Using ctapipe"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {
+ "pycharm": {},
+ "slideshow": {
+ "slide_type": "slide"
+ }
+ },
+ "source": [
+ "\n",
+ "\n",
+ "
\n",
+ "\n",
+ "\n",
+ "
Initially presented @ LST Analysis Bootcamp
\n",
+ "\n",
+ "
Padova, 26.11.2018
\n",
+ "\n",
+ "
Maximilian Nöthe (@maxnoe) & Kai A. Brügge (@mackaiver)
\n",
+ "\n",
+ "
"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "pycharm": {},
+ "slideshow": {
+ "slide_type": "skip"
+ }
+ },
+ "outputs": [],
+ "source": [
+ "import matplotlib.pyplot as plt\n",
+ "import numpy as np\n",
+ "\n",
+ "%matplotlib inline"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "pycharm": {},
+ "slideshow": {
+ "slide_type": "skip"
+ }
+ },
+ "outputs": [],
+ "source": [
+ "plt.rcParams['figure.figsize'] = (12, 8)\n",
+ "plt.rcParams['font.size'] = 14\n",
+ "plt.rcParams['figure.figsize']"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {
+ "pycharm": {},
+ "slideshow": {
+ "slide_type": "slide"
+ }
+ },
+ "source": [
+ "Table of Contents
\n",
+ ""
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {
+ "pycharm": {},
+ "slideshow": {
+ "slide_type": "slide"
+ }
+ },
+ "source": [
+ "## General Information"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {
+ "pycharm": {},
+ "slideshow": {
+ "slide_type": "slide"
+ }
+ },
+ "source": [
+ "### Design\n",
+ "\n",
+ "* DL0 → DL3 analysis\n",
+ "\n",
+ "* Currently some R0 → DL2 code to be able to analyze simtel files\n",
+ "\n",
+ "* ctapipe is built upon the Scientific Python Stack, core dependencies are\n",
+ " * numpy\n",
+ " * scipy\n",
+ " * astropy\n",
+ " * numba"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {
+ "pycharm": {},
+ "slideshow": {
+ "slide_type": "slide"
+ }
+ },
+ "source": [
+ "### Developement\n",
+ "\n",
+ "* ctapipe is developed as Open Source Software (BSD 3-Clause License) at \n",
+ "\n",
+ "* We use the \"Github-Workflow\": \n",
+ " * Few people (e.g. @kosack, @maxnoe) have write access to the main repository\n",
+ " * Contributors fork the main repository and work on branches\n",
+ " * Pull Requests are merged after Code Review and automatic execution of the test suite\n",
+ "\n",
+ "* Early developement stage ⇒ backwards-incompatible API changes might and will happen \n"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {
+ "pycharm": {},
+ "slideshow": {
+ "slide_type": "slide"
}
- ],
- "metadata": {
- "kernelspec": {
- "display_name": "Python 3 (ipykernel)",
- "language": "python",
- "name": "python3"
- },
- "language_info": {
- "codemirror_mode": {
- "name": "ipython",
- "version": 3
- },
- "file_extension": ".py",
- "mimetype": "text/x-python",
- "name": "python",
- "nbconvert_exporter": "python",
- "pygments_lexer": "ipython3",
- "version": "3.8.13"
+ },
+ "source": [
+ "### What's there?\n",
+ "\n",
+ "* Reading simtel simulation files\n",
+ "* Simple calibration, cleaning and feature extraction functions\n",
+ "* Camera and Array plotting\n",
+ "* Coordinate frames and transformations \n",
+ "* Stereo-reconstruction using line intersections\n",
+ " \n",
+ " "
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {
+ "pycharm": {},
+ "slideshow": {
+ "slide_type": "slide"
+ }
+ },
+ "source": [
+ "### What's still missing?\n",
+ "\n",
+ "* Good integration with machine learning techniques\n",
+ "* IRF calculation \n",
+ "* Documentation, e.g. formal definitions of coordinate frames "
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {
+ "pycharm": {},
+ "slideshow": {
+ "slide_type": "slide"
+ }
+ },
+ "source": [
+ "### What can you do?\n",
+ "\n",
+ "* Report issues\n",
+ " * Hard to get started? Tell us where you are stuck\n",
+ " * Tell user stories\n",
+ " * Missing features\n",
+ "\n",
+ "* Start contributing\n",
+ " * ctapipe needs more workpower\n",
+ " * Implement new reconstruction features"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {
+ "pycharm": {},
+ "slideshow": {
+ "slide_type": "slide"
}
+ },
+ "source": [
+ "## A simple hillas analysis"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {
+ "pycharm": {}
+ },
+ "source": [
+ "### Reading in simtel files"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "pycharm": {}
+ },
+ "outputs": [],
+ "source": [
+ "from ctapipe.io import EventSource\n",
+ "from ctapipe.utils.datasets import get_dataset_path\n",
+ "\n",
+ "input_url = get_dataset_path('gamma_prod5.simtel.zst')\n",
+ "\n",
+ "# EventSource() automatically detects what kind of file we are giving it,\n",
+ "# if already supported by ctapipe\n",
+ "source = EventSource(input_url, max_events=5)\n",
+ "\n",
+ "print(type(source))"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "pycharm": {}
+ },
+ "outputs": [],
+ "source": [
+ "for event in source:\n",
+ " print('Id: {}, E = {:1.3f}, Telescopes: {}'.format(event.count, event.simulation.shower.energy, len(event.r0.tel)))"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {
+ "pycharm": {}
+ },
+ "source": [
+ "Each event is a `DataContainer` holding several `Field`s of data, which can be containers or just numbers.\n",
+ "Let's look a one event:"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "pycharm": {}
+ },
+ "outputs": [],
+ "source": [
+ "event"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "pycharm": {}
+ },
+ "outputs": [],
+ "source": [
+ "source.subarray.camera_types"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "pycharm": {}
+ },
+ "outputs": [],
+ "source": [
+ "len(event.r0.tel), len(event.r1.tel)"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {
+ "pycharm": {}
+ },
+ "source": [
+ "### Data calibration\n",
+ "\n",
+ "The `CameraCalibrator` calibrates the event (obtaining the `dl1` images)."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "pycharm": {}
+ },
+ "outputs": [],
+ "source": [
+ "from ctapipe.calib import CameraCalibrator\n",
+ "\n",
+ "calibrator = CameraCalibrator(subarray=source.subarray)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "pycharm": {}
+ },
+ "outputs": [],
+ "source": [
+ "calibrator(event)"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {
+ "pycharm": {}
+ },
+ "source": [
+ "### Event displays\n",
+ "\n",
+ "Let's use ctapipe's plotting facilities to plot the telescope images"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "pycharm": {}
+ },
+ "outputs": [],
+ "source": [
+ "event.dl1.tel.keys()"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "pycharm": {}
+ },
+ "outputs": [],
+ "source": [
+ "tel_id = 130"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "pycharm": {}
+ },
+ "outputs": [],
+ "source": [
+ "geometry = source.subarray.tel[tel_id].camera.geometry\n",
+ "dl1 = event.dl1.tel[tel_id]\n",
+ "\n",
+ "geometry, dl1"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "pycharm": {}
+ },
+ "outputs": [],
+ "source": [
+ "dl1.image"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "pycharm": {}
+ },
+ "outputs": [],
+ "source": [
+ "from ctapipe.visualization import CameraDisplay\n",
+ "\n",
+ "display = CameraDisplay(geometry)\n",
+ "\n",
+ "# right now, there might be one image per gain channel.\n",
+ "# This will change as soon as \n",
+ "display.image = dl1.image\n",
+ "display.add_colorbar()"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {
+ "pycharm": {}
+ },
+ "source": [
+ "### Image Cleaning"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "pycharm": {}
+ },
+ "outputs": [],
+ "source": [
+ "from ctapipe.image.cleaning import tailcuts_clean"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "pycharm": {}
+ },
+ "outputs": [],
+ "source": [
+ "# unoptimized cleaning levels\n",
+ "cleaning_level = {\n",
+ " 'CHEC': (2, 4, 2),\n",
+ " 'LSTCam': (3.5, 7, 2),\n",
+ " 'FlashCam': (3.5, 7, 2), \n",
+ " 'NectarCam': (4, 8, 2), \n",
+ "}"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "pycharm": {}
+ },
+ "outputs": [],
+ "source": [
+ "boundary, picture, min_neighbors = cleaning_level[geometry.camera_name]\n",
+ "\n",
+ "clean = tailcuts_clean(\n",
+ " geometry, \n",
+ " dl1.image,\n",
+ " boundary_thresh=boundary,\n",
+ " picture_thresh=picture,\n",
+ " min_number_picture_neighbors=min_neighbors\n",
+ ")"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "pycharm": {}
+ },
+ "outputs": [],
+ "source": [
+ "fig, (ax1, ax2) = plt.subplots(1, 2, figsize=(15, 5))\n",
+ "\n",
+ "d1 = CameraDisplay(geometry, ax=ax1)\n",
+ "d2 = CameraDisplay(geometry, ax=ax2)\n",
+ "\n",
+ "ax1.set_title('Image')\n",
+ "d1.image = dl1.image\n",
+ "d1.add_colorbar(ax=ax1)\n",
+ "\n",
+ "ax2.set_title('Pulse Time')\n",
+ "d2.image = dl1.peak_time - np.average(dl1.peak_time, weights=dl1.image)\n",
+ "d2.cmap = 'RdBu_r'\n",
+ "d2.add_colorbar(ax=ax2)\n",
+ "d2.set_limits_minmax(-20,20)\n",
+ "\n",
+ "d1.highlight_pixels(clean, color='red', linewidth=1)"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {
+ "pycharm": {}
+ },
+ "source": [
+ "### Image Parameters"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "pycharm": {}
+ },
+ "outputs": [],
+ "source": [
+ "from ctapipe.image import hillas_parameters, leakage_parameters, concentration_parameters\n",
+ "from ctapipe.image import timing_parameters\n",
+ "from ctapipe.image import number_of_islands\n",
+ "from ctapipe.image import camera_to_shower_coordinates"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "pycharm": {}
+ },
+ "outputs": [],
+ "source": [
+ "hillas = hillas_parameters(geometry[clean], dl1.image[clean])\n",
+ "\n",
+ "print(hillas)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "pycharm": {}
+ },
+ "outputs": [],
+ "source": [
+ "display = CameraDisplay(geometry)\n",
+ "\n",
+ "# set \"unclean\" pixels to 0\n",
+ "cleaned = dl1.image.copy()\n",
+ "cleaned[~clean] = 0.0\n",
+ "\n",
+ "display.image = cleaned\n",
+ "display.add_colorbar()\n",
+ "\n",
+ "display.overlay_moments(hillas, color='xkcd:red')"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "pycharm": {}
+ },
+ "outputs": [],
+ "source": [
+ "timing = timing_parameters(\n",
+ " geometry,\n",
+ " dl1.image,\n",
+ " dl1.peak_time,\n",
+ " hillas,\n",
+ " clean\n",
+ ")\n",
+ "\n",
+ "print(timing)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "pycharm": {}
+ },
+ "outputs": [],
+ "source": [
+ "long, trans = camera_to_shower_coordinates(\n",
+ " geometry.pix_x, geometry.pix_y,hillas.x, hillas.y, hillas.psi\n",
+ ")\n",
+ "\n",
+ "plt.plot(long[clean], dl1.peak_time[clean], 'o')\n",
+ "plt.plot(long[clean], timing.slope * long[clean] + timing.intercept)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "pycharm": {}
+ },
+ "outputs": [],
+ "source": [
+ "l = leakage_parameters(geometry, dl1.image, clean)\n",
+ "print(l)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "pycharm": {}
+ },
+ "outputs": [],
+ "source": [
+ "disp = CameraDisplay(geometry)\n",
+ "disp.image = dl1.image\n",
+ "disp.highlight_pixels(geometry.get_border_pixel_mask(1), linewidth=2, color='xkcd:red')"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "pycharm": {}
+ },
+ "outputs": [],
+ "source": [
+ "n_islands, island_id = number_of_islands(geometry, clean)\n",
+ "\n",
+ "print(n_islands)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "pycharm": {}
+ },
+ "outputs": [],
+ "source": [
+ "conc = concentration_parameters(geometry, dl1.image, hillas)\n",
+ "print(conc)"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {
+ "pycharm": {}
+ },
+ "source": [
+ "### Putting it all together / Stereo reconstruction\n",
+ "\n",
+ "\n",
+ "All these steps are now unified in several components configurable through the config system, mainly:\n",
+ "\n",
+ "* CameraCalibrator for DL0 → DL1 (Images)\n",
+ "* ImageProcessor for DL1 (Images) → DL1 (Parameters)\n",
+ "* ShowerProcessor for stereo reconstruction of the shower geometry\n",
+ "* DataWriter for writing data into HDF5\n",
+ "\n",
+ "A command line tool doing these steps and writing out data in HDF5 format is available as `ctapipe-process`"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "pycharm": {},
+ "scrolled": false
+ },
+ "outputs": [],
+ "source": [
+ "import astropy.units as u\n",
+ "from astropy.coordinates import SkyCoord, AltAz\n",
+ "\n",
+ "from ctapipe.containers import ImageParametersContainer\n",
+ "from ctapipe.io import EventSource\n",
+ "from ctapipe.utils.datasets import get_dataset_path\n",
+ "\n",
+ "from ctapipe.calib import CameraCalibrator\n",
+ "\n",
+ "from ctapipe.image import ImageProcessor\n",
+ "from ctapipe.reco import ShowerProcessor\n",
+ "\n",
+ "from ctapipe.io import DataWriter\n",
+ "\n",
+ "from copy import deepcopy\n",
+ "import tempfile\n",
+ "\n",
+ "from traitlets.config import Config\n",
+ "\n",
+ "\n",
+ "image_processor_config = Config({\n",
+ " \"ImageProcessor\": {\n",
+ " \"image_cleaner_type\": \"TailcutsImageCleaner\",\n",
+ " \"TailcutsImageCleaner\": {\n",
+ " \"picture_threshold_pe\": [\n",
+ " (\"type\", \"LST_LST_LSTCam\", 7.5),\n",
+ " (\"type\", \"MST_MST_FlashCam\", 8),\n",
+ " (\"type\", \"MST_MST_NectarCam\", 8),\n",
+ " (\"type\", \"SST_ASTRI_CHEC\", 7),\n",
+ " ],\n",
+ " \"boundary_threshold_pe\": [\n",
+ " (\"type\", \"LST_LST_LSTCam\", 5),\n",
+ " (\"type\", \"MST_MST_FlashCam\", 4),\n",
+ " (\"type\", \"MST_MST_NectarCam\", 4),\n",
+ " (\"type\", \"SST_ASTRI_CHEC\", 4),\n",
+ " ]\n",
+ " \n",
+ " }\n",
+ " }\n",
+ "})\n",
+ "\n",
+ "input_url = get_dataset_path('gamma_prod5.simtel.zst')\n",
+ "source = EventSource(input_url)\n",
+ "\n",
+ "calibrator = CameraCalibrator(subarray=source.subarray)\n",
+ "image_processor = ImageProcessor(subarray=source.subarray, config=image_processor_config)\n",
+ "shower_processor = ShowerProcessor(subarray=source.subarray)\n",
+ "horizon_frame = AltAz()\n",
+ "\n",
+ "f = tempfile.NamedTemporaryFile(suffix='.hdf5')\n",
+ "\n",
+ "with DataWriter(source, output_path=f.name, overwrite=True, write_dl2=True) as writer:\n",
+ " \n",
+ " for event in source:\n",
+ " energy = event.simulation.shower.energy\n",
+ " n_telescopes_r1 = len(event.r1.tel)\n",
+ " event_id = event.index.event_id\n",
+ " print(f'Id: {event_id}, E = {energy:1.3f}, Telescopes (R1): {n_telescopes_r1}')\n",
+ " \n",
+ " calibrator(event)\n",
+ " image_processor(event)\n",
+ " shower_processor(event)\n",
+ " \n",
+ " stereo = event.dl2.stereo.geometry[\"HillasReconstructor\"]\n",
+ " if stereo.is_valid:\n",
+ " print(' Alt: {:.2f}°'.format(stereo.alt.deg))\n",
+ " print(' Az: {:.2f}°'.format(stereo.az.deg))\n",
+ " print(' Hmax: {:.0f}'.format(stereo.h_max))\n",
+ " print(' CoreX: {:.1f}'.format(stereo.core_x))\n",
+ " print(' CoreY: {:.1f}'.format(stereo.core_y))\n",
+ " print(' Multiplicity: {:d}'.format(len(stereo.tel_ids)))\n",
+ " \n",
+ " # save a nice event for plotting later\n",
+ " if event.count == 3:\n",
+ " plotting_event = deepcopy(event)\n",
+ " \n",
+ " writer(event)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "pycharm": {}
+ },
+ "outputs": [],
+ "source": [
+ "from astropy.coordinates.angle_utilities import angular_separation\n",
+ "import pandas as pd\n",
+ "\n",
+ "from ctapipe.io import TableLoader\n",
+ "\n",
+ "loader = TableLoader(f.name, load_dl2_geometry=True, load_simulated=True)\n",
+ "\n",
+ "events = loader.read_subarray_events()"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "pycharm": {}
+ },
+ "outputs": [],
+ "source": [
+ "theta = angular_separation(\n",
+ " events[\"HillasReconstructor_az\"].quantity, events[\"HillasReconstructor_alt\"].quantity,\n",
+ " events[\"true_az\"].quantity, events[\"true_alt\"].quantity\n",
+ ")\n",
+ "\n",
+ "plt.hist(theta.to_value(u.deg)**2, bins=25, range=[0, 0.3])\n",
+ "plt.xlabel(r'$\\theta² / deg²$')\n",
+ "None"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {
+ "pycharm": {}
+ },
+ "source": [
+ "## ArrayDisplay\n"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "pycharm": {}
+ },
+ "outputs": [],
+ "source": [
+ "from ctapipe.visualization import ArrayDisplay\n",
+ "\n",
+ "\n",
+ "angle_offset = plotting_event.pointing.array_azimuth\n",
+ "\n",
+ "plotting_hillas = {\n",
+ " tel_id: dl1.parameters.hillas\n",
+ " for tel_id, dl1 in plotting_event.dl1.tel.items()\n",
+ "}\n",
+ "\n",
+ "plotting_core = {\n",
+ " tel_id: dl1.parameters.core.psi\n",
+ " for tel_id, dl1 in plotting_event.dl1.tel.items()\n",
+ "}\n",
+ "\n",
+ "\n",
+ "disp = ArrayDisplay(source.subarray)\n",
+ "\n",
+ "disp.set_line_hillas(plotting_hillas, plotting_core, 500)\n",
+ "\n",
+ "plt.scatter(\n",
+ " plotting_event.simulation.shower.core_x, plotting_event.simulation.shower.core_y,\n",
+ " s=200, c='k', marker='x', label='True Impact',\n",
+ ")\n",
+ "plt.scatter(\n",
+ " plotting_event.dl2.stereo.geometry[\"HillasReconstructor\"].core_x,\n",
+ " plotting_event.dl2.stereo.geometry[\"HillasReconstructor\"].core_y,\n",
+ " s=200, c='r', marker='x', label='Estimated Impact',\n",
+ ")\n",
+ "\n",
+ "plt.legend()\n",
+ "# plt.xlim(-400, 400)\n",
+ "# plt.ylim(-400, 400)"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {
+ "pycharm": {}
+ },
+ "source": [
+ "### Reading the LST dl1 data\n"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "loader = TableLoader(f.name, load_simulated=True, load_dl1_parameters=True)\n",
+ "\n",
+ "dl1_table = loader.read_telescope_events([\"LST_LST_LSTCam\"])"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "pycharm": {}
+ },
+ "outputs": [],
+ "source": [
+ "plt.scatter(\n",
+ " np.log10(dl1_table[\"true_energy\"].quantity / u.TeV),\n",
+ " np.log10(dl1_table[\"hillas_intensity\"]),\n",
+ ")\n",
+ "plt.xlabel('log10(E / TeV)')\n",
+ "plt.ylabel('log10(intensity)')\n",
+ "None"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {
+ "pycharm": {}
+ },
+ "source": [
+ "## Isn't python slow?\n",
+ "\n",
+ "* Many of you might have heard: \"Python is slow\".\n",
+ "* That's trueish.\n",
+ "* All python objects are classes living on the heap, even integers.\n",
+ "* Looping over lots of \"primitives\" is quite slow compared to other languages.\n",
+ "\n",
+ "⇒ Vectorize as much as possible using numpy \n",
+ "⇒ Use existing interfaces to fast C / C++ / Fortran code \n",
+ "⇒ Optimize using numba \n",
+ "\n",
+ "**But: \"Premature Optimization is the root of all evil\" — Donald Knuth**\n",
+ "\n",
+ "So profile to find exactly what is slow.\n",
+ "\n",
+ "### Why use python then?\n",
+ "\n",
+ "* Python works very well as *glue* for libraries of all kinds of languages\n",
+ "* Python has a rich ecosystem for data science, physics, algorithms, astronomy\n",
+ "\n",
+ "### Example: Number of Islands\n",
+ "\n",
+ "Find all groups of pixels, that survived the cleaning"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "pycharm": {}
+ },
+ "outputs": [],
+ "source": [
+ "from ctapipe.image import toymodel\n",
+ "from ctapipe.instrument import CameraGeometry\n",
+ "\n",
+ "\n",
+ "geometry = CameraGeometry.from_name('LSTCam')"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {
+ "pycharm": {}
+ },
+ "source": [
+ "Let's create a toy images with several islands;"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "pycharm": {}
+ },
+ "outputs": [],
+ "source": [
+ "np.random.seed(42)\n",
+ "\n",
+ "image = np.zeros(geometry.n_pixels)\n",
+ "\n",
+ "\n",
+ "for i in range(9):\n",
+ " \n",
+ " model = toymodel.Gaussian(\n",
+ " x=np.random.uniform(-0.8, 0.8) * u.m,\n",
+ " y=np.random.uniform(-0.8, 0.8) * u.m,\n",
+ " width=np.random.uniform(0.05, 0.075) * u.m,\n",
+ " length=np.random.uniform(0.1, 0.15) * u.m,\n",
+ " psi=np.random.uniform(0, 2 * np.pi) * u.rad,\n",
+ " )\n",
+ "\n",
+ " new_image, sig, bg = model.generate_image(\n",
+ " geometry, \n",
+ " intensity=np.random.uniform(1000, 3000),\n",
+ " nsb_level_pe=5\n",
+ " )\n",
+ " image += new_image"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "pycharm": {}
+ },
+ "outputs": [],
+ "source": [
+ "clean = tailcuts_clean(geometry, image, picture_thresh=10, boundary_thresh=5, min_number_picture_neighbors=2)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "pycharm": {}
+ },
+ "outputs": [],
+ "source": [
+ "disp = CameraDisplay(geometry)\n",
+ "disp.image = image\n",
+ "disp.highlight_pixels(clean, color='xkcd:red', linewidth=1.5)\n",
+ "disp.add_colorbar()"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "pycharm": {}
+ },
+ "outputs": [],
+ "source": [
+ "def num_islands_python(camera, clean):\n",
+ " ''' A breadth first search to find connected islands of neighboring pixels in the cleaning set'''\n",
+ " \n",
+ " # the camera geometry has a [n_pixel, n_pixel] boolean array\n",
+ " # that is True where two pixels are neighbors\n",
+ " neighbors = camera.neighbor_matrix\n",
+ " \n",
+ " island_ids = np.zeros(camera.n_pixels)\n",
+ " current_island = 0\n",
+ " \n",
+ " # a set to remember which pixels we already visited\n",
+ " visited = set()\n",
+ "\n",
+ " # go only through the pixels, that survived cleaning\n",
+ " for pix_id in np.where(clean)[0]:\n",
+ " if pix_id not in visited:\n",
+ " # remember that we already checked this pixel\n",
+ " visited.add(pix_id)\n",
+ " \n",
+ " # if we land in the outer loop again, we found a new island\n",
+ " current_island += 1\n",
+ " island_ids[pix_id] = current_island\n",
+ " \n",
+ " # now check all neighbors of the current pixel recursively\n",
+ " to_check = set(np.where(neighbors[pix_id] & clean)[0])\n",
+ " while to_check:\n",
+ " pix_id = to_check.pop()\n",
+ " \n",
+ " if pix_id not in visited: \n",
+ " visited.add(pix_id)\n",
+ " island_ids[pix_id] = current_island\n",
+ " \n",
+ " to_check.update(np.where(neighbors[pix_id] & clean)[0])\n",
+ " \n",
+ " n_islands = current_island\n",
+ " return n_islands, island_ids"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "pycharm": {}
+ },
+ "outputs": [],
+ "source": [
+ "n_islands, island_ids = num_islands_python(geometry, clean)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "pycharm": {}
+ },
+ "outputs": [],
+ "source": [
+ "from matplotlib.colors import ListedColormap\n",
+ "\n",
+ "cmap = plt.get_cmap('Paired')\n",
+ "cmap = ListedColormap(cmap.colors[:n_islands])\n",
+ "cmap.set_under('k')\n",
+ "\n",
+ "disp = CameraDisplay(geometry)\n",
+ "disp.image = island_ids\n",
+ "disp.cmap = cmap\n",
+ "disp.set_limits_minmax(0.5, n_islands + 0.5)\n",
+ "disp.add_colorbar()"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "pycharm": {}
+ },
+ "outputs": [],
+ "source": [
+ "%timeit num_islands_python(geometry, clean)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "pycharm": {}
+ },
+ "outputs": [],
+ "source": [
+ "from scipy.sparse.csgraph import connected_components\n",
+ "\n",
+ "def num_islands_scipy(geometry, clean):\n",
+ " neighbors = geometry.neighbor_matrix_sparse\n",
+ " \n",
+ " clean_neighbors = neighbors[clean][:, clean]\n",
+ " num_islands, labels = connected_components(clean_neighbors, directed=False)\n",
+ " \n",
+ " island_ids = np.zeros(geometry.n_pixels)\n",
+ " island_ids[clean] = labels + 1\n",
+ " \n",
+ " return num_islands, island_ids"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "pycharm": {}
+ },
+ "outputs": [],
+ "source": [
+ "n_islands_s, island_ids_s = num_islands_scipy(geometry, clean)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "pycharm": {}
+ },
+ "outputs": [],
+ "source": [
+ "disp = CameraDisplay(geometry)\n",
+ "disp.image = island_ids_s\n",
+ "disp.cmap = cmap\n",
+ "disp.set_limits_minmax(0.5, n_islands_s + 0.5)\n",
+ "disp.add_colorbar()"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "pycharm": {}
+ },
+ "outputs": [],
+ "source": [
+ "%timeit num_islands_scipy(geometry, clean)"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {
+ "pycharm": {}
+ },
+ "source": [
+ "**A lot less code, and a factor 3 speed improvement**"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "Finally, current ctapipe implementation is using numba:"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "%timeit number_of_islands(geometry, clean)"
+ ]
+ }
+ ],
+ "metadata": {
+ "kernelspec": {
+ "display_name": "Python 3 (ipykernel)",
+ "language": "python",
+ "name": "python3"
},
- "nbformat": 4,
- "nbformat_minor": 4
+ "language_info": {
+ "codemirror_mode": {
+ "name": "ipython",
+ "version": 3
+ },
+ "file_extension": ".py",
+ "mimetype": "text/x-python",
+ "name": "python",
+ "nbconvert_exporter": "python",
+ "pygments_lexer": "ipython3",
+ "version": "3.8.13"
+ }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 4
}
diff --git a/docs/tutorials/raw_data_exploration.ipynb b/docs/tutorials/raw_data_exploration.ipynb
index 6c2d63207f8..884fd025696 100644
--- a/docs/tutorials/raw_data_exploration.ipynb
+++ b/docs/tutorials/raw_data_exploration.ipynb
@@ -24,11 +24,12 @@
"outputs": [],
"source": [
"from ctapipe.utils import get_dataset_path\n",
- "from ctapipe.io import EventSource, EventSeeker\n",
+ "from ctapipe.io import EventSource\n",
"from ctapipe.visualization import CameraDisplay\n",
"from ctapipe.instrument import CameraGeometry\n",
"from matplotlib import pyplot as plt\n",
"from astropy import units as u\n",
+ "\n",
"%matplotlib inline"
]
},
@@ -47,8 +48,7 @@
"metadata": {},
"outputs": [],
"source": [
- "source = EventSource(get_dataset_path(\"gamma_test_large.simtel.gz\"), max_events=100, back_seekable=True)\n",
- "seeker = EventSeeker(source)"
+ "source = EventSource(get_dataset_path(\"gamma_prod5.simtel.zst\"), max_events=5)"
]
},
{
@@ -66,8 +66,10 @@
"metadata": {},
"outputs": [],
"source": [
- "event = seeker.get_event_index(3) # get 3rd event\n",
- "event"
+ "# so we can advance through events one-by-one\n",
+ "event_iterator = iter(source)\n",
+ "\n",
+ "event = next(event_iterator)"
]
},
{
@@ -83,7 +85,7 @@
"metadata": {},
"outputs": [],
"source": [
- "print(repr(event.r0))"
+ "event.r0"
]
},
{
@@ -115,7 +117,7 @@
"cell_type": "markdown",
"metadata": {},
"source": [
- "note that the event has 2 telescopes in it: 38,40... Let's try the next one:"
+ "note that the event has 3 telescopes in it: Let's try the next one:"
]
},
{
@@ -124,7 +126,7 @@
"metadata": {},
"outputs": [],
"source": [
- "event = seeker.get_event_index(5) # get the next event\n",
+ "event = next(event_iterator)\n",
"print(event.r0.tel.keys())"
]
},
@@ -132,7 +134,7 @@
"cell_type": "markdown",
"metadata": {},
"source": [
- "now, we have a larger event with many telescopes... Let's look at the data from **CT7**:"
+ "now, we have a larger event with many telescopes... Let's look at one of them:"
]
},
{
@@ -141,7 +143,7 @@
"metadata": {},
"outputs": [],
"source": [
- "teldata = event.r0.tel[15]\n",
+ "teldata = event.r0.tel[26]\n",
"print(teldata)\n",
"teldata"
]
@@ -419,7 +421,9 @@
{
"cell_type": "code",
"execution_count": null,
- "metadata": {},
+ "metadata": {
+ "scrolled": false
+ },
"outputs": [],
"source": [
"for tel in event.r0.tel.keys():\n",
@@ -488,7 +492,7 @@
],
"metadata": {
"kernelspec": {
- "display_name": "Python 3",
+ "display_name": "Python 3 (ipykernel)",
"language": "python",
"name": "python3"
},
@@ -502,7 +506,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
- "version": "3.7.8"
+ "version": "3.8.13"
}
},
"nbformat": 4,
diff --git a/docs/tutorials/theta_square.ipynb b/docs/tutorials/theta_square.ipynb
index b141c58626e..91191ae275a 100644
--- a/docs/tutorials/theta_square.ipynb
+++ b/docs/tutorials/theta_square.ipynb
@@ -73,7 +73,10 @@
},
"outputs": [],
"source": [
- "source = EventSource(\"dataset://gamma_test_large.simtel.gz\", allowed_tels={1, 2, 3, 4})\n",
+ "source = EventSource(\n",
+ " \"dataset://gamma_prod5.simtel.zst\",\n",
+ "# allowed_tels={1, 2, 3, 4},\n",
+ ")\n",
"\n",
"subarray = source.subarray\n",
"\n",
diff --git a/environment.yml b/environment.yml
index 610818ace63..0db4cbba942 100644
--- a/environment.yml
+++ b/environment.yml
@@ -29,6 +29,7 @@ dependencies:
- pytest
- pytest-cov
- pytest-runner
+ - pytest-astropy-header
- pyyaml
- scikit-learn
- scipy
diff --git a/setup.cfg b/setup.cfg
index 38bb9945741..8bc09281423 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -12,6 +12,7 @@ show_response = 1
minversion=3.0
norecursedirs=build docs/_build
addopts = -v
+astropy_header = true
[aliases]
diff --git a/setup.py b/setup.py
index 202d55827b2..8dff48a4d0f 100755
--- a/setup.py
+++ b/setup.py
@@ -25,6 +25,7 @@
"pandas>=0.24.0",
"importlib_resources;python_version<'3.9'",
"tomli",
+ "pytest_astropy_header",
]
docs_require = [
"sphinx_rtd_theme",
@@ -63,6 +64,7 @@
"importlib_resources;python_version<'3.9'",
"jinja2~=3.0.2", # for sphinx 3.5, update when moving to 4.x
"pyyaml>=5.1",
+ "docutils",
],
# here are optional dependencies (as "tag" : "dependency spec")
extras_require={