Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Remove deprecations, remove crs, rename ARCOERA5 #252

Merged
merged 14 commits into from
Oct 8, 2024
12 changes: 11 additions & 1 deletion CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,15 +1,25 @@
# Changelog

## v0.54.1 (unreleased)
## v0.54.1

### Features

- Add [CoCiP Grid notebook](https://py.contrails.org/notebooks/CoCiPGrid.html) example to documentation.
- Implement `PSFlight.eval` on a `Fleet` source.

### Breaking changes

- Remove `attrs["crs"]` usage from `GeoVectorDataset` and child classes (`Flight`, `Fleet`). All spatial data is assumed to be EPSG:4326 (WGS84). This was previously assumed implicitly, but now the `crs` attribute is removed from the `attrs` dictionary.
- Change the return type of `GeoVectorDataset.transform_crs` to a pair of numpy arrays representing `x` and `y` coordinates in the target CRS.
- Remove deprecated `MetDataset.variables` property in favor of `MetDataset.indexes`.
- Remove `**kwargs` in `MetDataArray` constructor.
- Rename `ARCOERA5` to `ERA5ARCO` for consistency with the `ERA5` and `ERA5ModelLevel` interfaces.

### Fixes

- Fix the integration time step in `CocipGrid.calc_evolve_one_step`. The previous implementation assumed a time interval of `params["dt_integration"]`. This may not be the case for all `source` parameters (for example, this could occur if running `CocipGrid` over a collection of ADS-B waypoints).
- Raise an exception in constructing `MetDataset(ds, copy=False)` when `ds["level"]` has float32 dtype. Per interpolation conventions, all coordinate variables must have float64 dtype. (This was previously enforced in longitude and latitude, but was overlooked in the level coordinate.)
- Allow `AircraftPerformance.ensure_true_airspeed_on_source` to use `eastward_wind` and `northward_wind` fields on the `source` if available. This is useful when the `source` has already been interpolated to met data.

## v0.54.0

Expand Down
4 changes: 2 additions & 2 deletions docs/integrations/APCEMM.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@
"\n",
"from pycontrails import Flight, MetVariable\n",
"from pycontrails.core.met_var import Geopotential\n",
"from pycontrails.datalib.ecmwf import ARCOERA5\n",
"from pycontrails.datalib.ecmwf import ERA5ARCO\n",
"from pycontrails.models.apcemm.apcemm import APCEMM\n",
"from pycontrails.models.humidity_scaling import HistogramMatching\n",
"from pycontrails.models.issr import ISSR\n",
Expand Down Expand Up @@ -74,7 +74,7 @@
"outputs": [],
"source": [
"variables = (v if isinstance(v, MetVariable) else Geopotential for v in APCEMM.met_variables)\n",
"era5ml = ARCOERA5(time=time_bounds, variables=APCEMM.met_variables, n_jobs=4)"
"era5ml = ERA5ARCO(time=time_bounds, variables=APCEMM.met_variables)"
]
},
{
Expand Down
8 changes: 4 additions & 4 deletions docs/notebooks/ARCO-ERA5.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
"source": [
"# ARCO ERA5\n",
"\n",
"This notebook demonstrates how to load [ARCO ERA5](https://cloud.google.com/storage/docs/public-datasets/era5) data from [Google Cloud Storage](https://cloud.google.com/storage) through the pycontrails `ARCOERA5` interface."
"This notebook demonstrates how to load [ARCO ERA5](https://cloud.google.com/storage/docs/public-datasets/era5) data from [Google Cloud Storage](https://cloud.google.com/storage) through the pycontrails `ERA5ARCO` interface."
]
},
{
Expand All @@ -22,7 +22,7 @@
"import xarray as xr\n",
"\n",
"from pycontrails import Flight, MetDataset\n",
"from pycontrails.datalib.ecmwf import ARCOERA5\n",
"from pycontrails.datalib.ecmwf import ERA5ARCO\n",
"from pycontrails.models.cocip import Cocip\n",
"from pycontrails.models.humidity_scaling import ConstantHumidityScaling\n",
"from pycontrails.models.issr import ISSR\n",
Expand All @@ -47,15 +47,15 @@
"source": [
"time = (\"2019-01-01T00\", \"2019-01-01T12\")\n",
"\n",
"era5_pl = ARCOERA5(\n",
"era5_pl = ERA5ARCO(\n",
" time=time,\n",
" variables=(*Cocip.met_variables, *Cocip.optional_met_variables),\n",
" cachestore=None,\n",
")\n",
"met = era5_pl.open_metdataset()\n",
"\n",
"\n",
"era5_sl = ARCOERA5(\n",
"era5_sl = ERA5ARCO(\n",
" time=time,\n",
" variables=Cocip.rad_variables,\n",
" pressure_levels=-1,\n",
Expand Down
127 changes: 65 additions & 62 deletions docs/notebooks/Flight.ipynb

Large diffs are not rendered by default.

8 changes: 4 additions & 4 deletions docs/notebooks/specific-humidity-interpolation.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@
"source": [
"## ERA5 data\n",
"\n",
"In this notebook, we use the `ARCOERA5` interface to the publicly available [ARCO ERA5](https://cloud.google.com/storage/docs/public-datasets/era5) \"model-level\" data."
"In this notebook, we use the `ERA5ARCO` interface to the publicly available [ARCO ERA5](https://cloud.google.com/storage/docs/public-datasets/era5) \"model-level\" data."
]
},
{
Expand All @@ -35,7 +35,7 @@
"from scipy.interpolate import PchipInterpolator\n",
"\n",
"from pycontrails.core import models\n",
"from pycontrails.datalib.ecmwf import ARCOERA5\n",
"from pycontrails.datalib.ecmwf import ERA5ARCO\n",
"from pycontrails.physics import thermo"
]
},
Expand Down Expand Up @@ -65,11 +65,11 @@
"time = \"2021-03-14T15\"\n",
"variables = [\"t\", \"q\", \"ciwc\"]\n",
"\n",
"arco_coarse = ARCOERA5(time, variables, pl_coarse)\n",
"arco_coarse = ERA5ARCO(time, variables, pl_coarse)\n",
"ds_coarse = arco_coarse.open_metdataset().data\n",
"ds_coarse.load()\n",
"\n",
"arco_fine = ARCOERA5(time, variables, pl_fine)\n",
"arco_fine = ERA5ARCO(time, variables, pl_fine)\n",
"ds_fine = arco_fine.open_metdataset().data\n",
"ds_fine.load();"
]
Expand Down
12 changes: 9 additions & 3 deletions pycontrails/core/aircraft_performance.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@
from overrides import overrides

from pycontrails.core import flight, fuel
from pycontrails.core.fleet import Fleet
from pycontrails.core.flight import Flight
from pycontrails.core.met import MetDataset
from pycontrails.core.models import Model, ModelParams, interpolate_met
Expand Down Expand Up @@ -76,6 +77,10 @@ class AircraftPerformance(Model):

source: Flight

@abc.abstractmethod
@overload
def eval(self, source: Fleet, **params: Any) -> Fleet: ...

@abc.abstractmethod
@overload
def eval(self, source: Flight, **params: Any) -> Flight: ...
Expand Down Expand Up @@ -467,10 +472,11 @@ def ensure_true_airspeed_on_source(self) -> npt.NDArray[np.float64]:
tas[cond] = self.source.segment_groundspeed()[cond]
return tas

met_incomplete = (
self.met is None or "eastward_wind" not in self.met or "northward_wind" not in self.met
wind_available = ("eastward_wind" in self.source and "northward_wind" in self.source) or (
self.met is not None and "eastward_wind" in self.met and "northward_wind" in self.met
)
if met_incomplete:

if not wind_available:
if fill_with_groundspeed:
tas = self.source.segment_groundspeed()
self.source["true_airspeed"] = tas
Expand Down
27 changes: 5 additions & 22 deletions pycontrails/core/fleet.py
Original file line number Diff line number Diff line change
Expand Up @@ -196,17 +196,15 @@ def _maybe_warn(fl: Flight) -> Flight:

fl_attrs: dict[str, Any] = {}

# Pluck from the first flight to get fuel, data_keys, and crs
# Pluck from the first flight to get fuel and data_keys
fuel = seq[0].fuel
data_keys = set(seq[0]) # convert to a new instance to because we mutate seq[0]
crs = seq[0].attrs["crs"]

for fl in seq:
_validate_fl(
fl,
fl_attrs=fl_attrs,
data_keys=data_keys,
crs=crs,
fuel=fuel,
broadcast_numeric=broadcast_numeric,
)
Expand Down Expand Up @@ -318,10 +316,9 @@ def calc_tas(fl: Flight) -> npt.NDArray[np.float64]:

@overrides
def segment_groundspeed(self, *args: Any, **kwargs: Any) -> npt.NDArray[np.float64]:
# Implement if we have a usecase for this.
# Because the super() method uses a smoothing pattern, it will not reliably
# work on Fleet.
raise NotImplementedError
fls = self.to_flight_list(copy=False)
gs = [fl.segment_groundspeed(*args, **kwargs) for fl in fls]
return np.concatenate(gs)

@overrides
def resample_and_fill(self, *args: Any, **kwargs: Any) -> Fleet:
Expand All @@ -336,10 +333,6 @@ def segment_length(self) -> npt.NDArray[np.float64]:
@property
@overrides
def max_distance_gap(self) -> float:
if self.attrs["crs"] != "EPSG:4326":
msg = "Only implemented for EPSG:4326 CRS."
raise NotImplementedError(msg)

return np.nanmax(self.segment_length()).item()

@overrides
Expand Down Expand Up @@ -400,7 +393,6 @@ def _validate_fl(
*,
fl_attrs: dict[str, Any],
data_keys: set[str],
crs: str,
fuel: Fuel,
broadcast_numeric: bool,
) -> None:
Expand All @@ -419,8 +411,6 @@ def _validate_fl(
Set of data keys expected in each flight.
fuel : Fuel
Fuel used all flights
crs : str
CRS to use all flights
broadcast_numeric : bool
If True, broadcast numeric attributes to data variables.

Expand All @@ -429,7 +419,7 @@ def _validate_fl(
KeyError
``fl`` does not have a ``flight_id`` key in :attr:`attrs`.
ValueError
If ``flight_id`` is duplicated or incompatible CRS found.
If ``flight_id`` is duplicated or if ``fuel`` or ``data_keys`` are inconsistent.
"""
flight_id = _extract_flight_id(fl)

Expand All @@ -446,13 +436,6 @@ def _validate_fl(
"The 'fuel' attributes must be consistent between flights in a Fleet."
)
raise ValueError(msg)
if fl.attrs["crs"] != crs:
msg = (
f"CRS on Flight {flight_id} ({fl.attrs['crs']}) "
f"is not inconsistent with previous flights ({crs}). "
"The 'crs' attributes must be consistent between flights in a Fleet."
)
raise ValueError(msg)
if fl.data.keys() != data_keys:
msg = (
f"Data keys on Flight {flight_id} ({fl.data.keys()}) "
Expand Down
Loading
Loading