Skip to content

Commit

Permalink
format and bump package version
Browse files Browse the repository at this point in the history
  • Loading branch information
CDJellen committed Dec 20, 2024
1 parent 8198727 commit 56eb555
Show file tree
Hide file tree
Showing 4 changed files with 670 additions and 362 deletions.
51 changes: 32 additions & 19 deletions ndbc_api/ndbc_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -380,11 +380,12 @@ def station(self,
except (ResponseException, ValueError, KeyError) as e:
raise ResponseException('Failed to handle returned data.') from e

def available_realtime(self,
station_id: Union[str, int],
full_response: bool = False,
as_df: Optional[bool] = None,
) -> Union[List[str], pd.DataFrame, dict]:
def available_realtime(
self,
station_id: Union[str, int],
full_response: bool = False,
as_df: Optional[bool] = None,
) -> Union[List[str], pd.DataFrame, dict]:
"""Get the available realtime modalities for a station.
While most data buoy (station) measurements are available over
Expand Down Expand Up @@ -414,16 +415,20 @@ def available_realtime(self,
"""
station_id = self._parse_station_id(station_id)
try:
station_realtime = self._stations_api.realtime(handler=self._handler,
station_id=station_id)
station_realtime = self._stations_api.realtime(
handler=self._handler, station_id=station_id)
full_data = {}
if full_response:
if as_df is None:
as_df = False
full_data = self._handle_data(station_realtime, as_df, cols=None)
full_data = self._handle_data(station_realtime,
as_df,
cols=None)
return full_data
else:
full_data = self._handle_data(station_realtime, as_df=False, cols=None)
full_data = self._handle_data(station_realtime,
as_df=False,
cols=None)

# Parse the modes from the full response
_modes = self.get_modes()
Expand Down Expand Up @@ -611,13 +616,15 @@ def get_data(
self.log(
level=logging.WARN,
station_id=station_id,
message=(f"Failed to process request for station_id "
f"{station_id} with error: {e}"))
message=(
f"Failed to process request for station_id "
f"{station_id} with error: {e}"))
self.log(logging.INFO, message="Finished processing request.")
return self._handle_accumulate_data(accumulated_data)


def get_modes(self, use_opendap: bool = False, as_xarray_dataset: Optional[bool] = None) -> List[str]:
def get_modes(self,
use_opendap: bool = False,
as_xarray_dataset: Optional[bool] = None) -> List[str]:
"""Get the list of supported modes for `get_data(...)`.
Args:
Expand All @@ -638,7 +645,8 @@ def get_modes(self, use_opendap: bool = False, as_xarray_dataset: Optional[bool]
return [v for v in vars(self._data_api) if not v.startswith('_')]

@staticmethod
def save_xarray_dataset(dataset: xarray.Dataset, output_filepath: str, **kwargs) -> None:
def save_xarray_dataset(dataset: xarray.Dataset, output_filepath: str,
**kwargs) -> None:
"""
Saves an `xarray.Dataset` to netCDF a user-specified file path.
Expand Down Expand Up @@ -732,18 +740,23 @@ def _handle_data(data: pd.DataFrame,

def _handle_accumulate_data(
self,
accumulated_data: Dict[str, List[Union[pd.DataFrame, dict, xarray.Dataset]]],
accumulated_data: Dict[str, List[Union[pd.DataFrame, dict,
xarray.Dataset]]],
) -> Union[pd.DataFrame, dict]:
"""Accumulate the data from multiple stations and modes."""
for k in list(accumulated_data.keys()):
if not accumulated_data[k]:
del accumulated_data[k]

if not accumulated_data:
return {}

return_as_df = isinstance(accumulated_data[list(accumulated_data.keys())[-1]][0], pd.DataFrame)
use_opendap = isinstance(accumulated_data[list(accumulated_data.keys())[-1]][0], xarray.Dataset)

return_as_df = isinstance(
accumulated_data[list(accumulated_data.keys())[-1]][0],
pd.DataFrame)
use_opendap = isinstance(
accumulated_data[list(accumulated_data.keys())[-1]][0],
xarray.Dataset)

data: Union[List[pd.DataFrame], List[xarray.Dataset],
dict] = [] if return_as_df or use_opendap else {}
Expand Down
Loading

0 comments on commit 56eb555

Please sign in to comment.