From 6ed14b741faada9d826e7101005a9f168c370c54 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 2 Feb 2024 14:35:39 +0000 Subject: [PATCH 01/21] Bump release-drafter/release-drafter from 5 to 6 Bumps [release-drafter/release-drafter](https://github.com/release-drafter/release-drafter) from 5 to 6. - [Release notes](https://github.com/release-drafter/release-drafter/releases) - [Commits](https://github.com/release-drafter/release-drafter/compare/v5...v6) --- updated-dependencies: - dependency-name: release-drafter/release-drafter dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] --- .github/workflows/draft-release.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/draft-release.yml b/.github/workflows/draft-release.yml index 3c9d14e..a9504e2 100644 --- a/.github/workflows/draft-release.yml +++ b/.github/workflows/draft-release.yml @@ -17,7 +17,7 @@ jobs: runs-on: ubuntu-latest steps: # Drafts your next Release notes as Pull Requests are merged into "master" - - uses: release-drafter/release-drafter@v5 + - uses: release-drafter/release-drafter@v6 with: config-name: release-drafter-config.yml env: From 2b37cab05103a9e74296b5bd9a6be4d2c6c31f29 Mon Sep 17 00:00:00 2001 From: Jens Keiner Date: Wed, 7 Feb 2024 17:44:36 +0100 Subject: [PATCH 02/21] Accommodate API changes. --- .../core/__init__.py | 254 ++++++++++++------ exchange_calendars_extensions/core/holiday.py | 16 +- .../core/holiday_calendar.py | 58 ++-- poetry.lock | 232 ++++++++-------- pyproject.toml | 9 +- tests/test_api.py | 46 ++-- 6 files changed, 367 insertions(+), 248 deletions(-) diff --git a/exchange_calendars_extensions/core/__init__.py b/exchange_calendars_extensions/core/__init__.py index 431d01a..2cc643f 100644 --- a/exchange_calendars_extensions/core/__init__.py +++ b/exchange_calendars_extensions/core/__init__.py @@ -1,6 +1,5 @@ import functools -import datetime as dt -from typing import Optional, Callable, Type, Union, Any, Dict +from typing import Callable, Type, Union, Dict from exchange_calendars import calendar_utils, register_calendar_type, ExchangeCalendar, get_calendar_names from exchange_calendars.calendar_utils import _default_calendar_factories @@ -28,50 +27,60 @@ from exchange_calendars.exchange_calendar_xtse import XTSEExchangeCalendar from exchange_calendars.exchange_calendar_xwar import XWARExchangeCalendar from exchange_calendars.exchange_calendar_xwbo import XWBOExchangeCalendar -from pydantic import validate_call +from pydantic import validate_call, BaseModel, conint from typing_extensions import ParamSpec, Concatenate -from exchange_calendars_extensions.api.changes import ChangeSet, ChangeSetDict, DayType, DaySpec, DaySpecWithTime, TimestampLike -from exchange_calendars_extensions.core.holiday_calendar import extend_class, ExtendedExchangeCalendar, ExchangeCalendarExtensions +from exchange_calendars_extensions.api.changes import (ChangeSet, ChangeSetDict, DayType, TimestampLike, DayPropsLike, + Tags, TimeLike) +from exchange_calendars_extensions.core.holiday_calendar import (extend_class, ExtendedExchangeCalendar, + ExchangeCalendarExtensions) # Dictionary that maps from exchange key to ExchangeCalendarChangeSet. Contains all changesets to apply when creating a -# new calendar instance. +# new calendar instance. This dictionary should only ever contain non-empty changesets. If a changeset becomes empty, +# the corresponding entry should just be removed. _changesets: Dict[str, ChangeSet] = dict() -# Dictionary that maps from exchange key to ExtendedExchangeCalendar. Contains all extended calendars classes that -# replace the vanilla classes in exchange_calendars when calling apply_extensions(). -# -# Note: The values in this dictionary use extend_class() to create the extended classes, respectively for each exchange, -# based on the respective vanilla class in exchange_calendars. Also, the changeset_provider is set to a lambda -# function that returns the changeset for the respective exchange in _changesets, or None, if no changeset exists. -_extensions = { - "ASEX": (ASEXExchangeCalendar, 4), - "XAMS": (XAMSExchangeCalendar, 4), - "XBRU": (XBRUExchangeCalendar, 4), - "XBUD": (XBUDExchangeCalendar, 4), - "XCSE": (XCSEExchangeCalendar, 4), - "XDUB": (XDUBExchangeCalendar, 4), - "XETR": (XETRExchangeCalendar, 4), - "XHEL": (XHELExchangeCalendar, 4), - "XIST": (XISTExchangeCalendar, 4), - "XJSE": (XJSEExchangeCalendar, 3), - "XLIS": (XLISExchangeCalendar, 4), - "XLON": (XLONExchangeCalendar, 4), - "XMAD": (XMADExchangeCalendar, 4), - "XMIL": (XMILExchangeCalendar, 4), - "XNYS": (XNYSExchangeCalendar, 4), - "XOSL": (XOSLExchangeCalendar, 4), - "XPAR": (XPARExchangeCalendar, 4), - "XPRA": (XPRAExchangeCalendar, 4), - "XSTO": (XSTOExchangeCalendar, 4), - "XSWX": (XSWXExchangeCalendar, 4), - "XTAE": (XTAEExchangeCalendar, 4), - "XTSE": (XTSEExchangeCalendar, 4), - "XWAR": (XWARExchangeCalendar, 4), - "XWBO": (XWBOExchangeCalendar, 4), + +class ExtensionSpec(BaseModel, arbitrary_types_allowed=True): + """Specifies how to derive an extended calendar class from a vanilla calendar class.""" + + # The base class to extend. + base: Type[ExchangeCalendar] + + # The day of the week on which options expire. If None, expiry days are not supported. + day_of_week_expiry: Union[conint(ge=0,le=6), None] = None + + +# Internal dictionary that specifies how to derive extended calendars for specific exchanges. +_extensions: Dict[str, ExtensionSpec] = { + "ASEX": ExtensionSpec(base=ASEXExchangeCalendar, day_of_week_expiry=4), + "XAMS": ExtensionSpec(base=XAMSExchangeCalendar, day_of_week_expiry=4), + "XBRU": ExtensionSpec(base=XBRUExchangeCalendar, day_of_week_expiry=4), + "XBUD": ExtensionSpec(base=XBUDExchangeCalendar, day_of_week_expiry=4), + "XCSE": ExtensionSpec(base=XCSEExchangeCalendar, day_of_week_expiry=4), + "XDUB": ExtensionSpec(base=XDUBExchangeCalendar, day_of_week_expiry=4), + "XETR": ExtensionSpec(base=XETRExchangeCalendar, day_of_week_expiry=4), + "XHEL": ExtensionSpec(base=XHELExchangeCalendar, day_of_week_expiry=4), + "XIST": ExtensionSpec(base=XISTExchangeCalendar, day_of_week_expiry=4), + "XJSE": ExtensionSpec(base=XJSEExchangeCalendar, day_of_week_expiry=3), + "XLIS": ExtensionSpec(base=XLISExchangeCalendar, day_of_week_expiry=4), + "XLON": ExtensionSpec(base=XLONExchangeCalendar, day_of_week_expiry=4), + "XMAD": ExtensionSpec(base=XMADExchangeCalendar, day_of_week_expiry=4), + "XMIL": ExtensionSpec(base=XMILExchangeCalendar, day_of_week_expiry=4), + "XNYS": ExtensionSpec(base=XNYSExchangeCalendar, day_of_week_expiry=4), + "XOSL": ExtensionSpec(base=XOSLExchangeCalendar, day_of_week_expiry=4), + "XPAR": ExtensionSpec(base=XPARExchangeCalendar, day_of_week_expiry=4), + "XPRA": ExtensionSpec(base=XPRAExchangeCalendar, day_of_week_expiry=4), + "XSTO": ExtensionSpec(base=XSTOExchangeCalendar, day_of_week_expiry=4), + "XSWX": ExtensionSpec(base=XSWXExchangeCalendar, day_of_week_expiry=4), + "XTAE": ExtensionSpec(base=XTAEExchangeCalendar, day_of_week_expiry=4), + "XTSE": ExtensionSpec(base=XTSEExchangeCalendar, day_of_week_expiry=4), + "XWAR": ExtensionSpec(base=XWARExchangeCalendar, day_of_week_expiry=4), + "XWBO": ExtensionSpec(base=XWBOExchangeCalendar, day_of_week_expiry=4), } +# Internal dictionary containing the original calendar classes. _original_classes = dict() @@ -79,39 +88,67 @@ def apply_extensions() -> None: """ Apply extensions to exchange_calendars. - This registers all extended calendars in exchange_calendars, overwriting the respective vanilla calendars. + This registers all extended calendars in exchange_calendars, replacing the respective vanilla calendars. + + This function is idempotent. If extensions have already been applied, this function does nothing. """ if len(_original_classes) > 0: # Extensions have already been applied. return + # Get all calendar names, including aliases. calendar_names = set(get_calendar_names()) def get_changeset_fn(name: str) -> Callable[[], ChangeSet]: + """Returns a function that returns the changeset for the given exchange key. + + Parameters + ---------- + name : str + The exchange key for which to return the changeset. + + Returns + ------- + Callable[[], ChangeSet] + The function that returns the changeset. + """ def fn() -> ChangeSet: return _changesets.get(name) + return fn + # Create and register extended calendar classes for all calendars for which no explicit rules have been defined. for k in calendar_names - set(_extensions.keys()): + # Get the original class. cls = _default_calendar_factories.get(k) + if cls is not None: # Store the original class for later use. _original_classes[k] = cls - # Create extended class. + + # Create extended class without support for expiry days. cls = extend_class(cls, day_of_week_expiry=None, changeset_provider=get_changeset_fn(k)) + # Register extended class. register_calendar_type(k, cls, force=True) + # Remove original class from factory cache. _remove_calendar_from_factory_cache(k) + # Create and register extended calendar classes for all calendars for which explicit rules have been defined. for k, v in _extensions.items(): - cls, day_of_week_expiry = v + # Get the original class and the day of the week for expiry days. + cls, day_of_week_expiry = v.base, v.day_of_week_expiry + # Store the original class for later use. _original_classes[k] = cls - # Create extended class. + + # Create extended class with support for expiry days. cls = extend_class(cls, day_of_week_expiry=day_of_week_expiry, changeset_provider=get_changeset_fn(k)) + # Register extended class. register_calendar_type(k, cls, force=True) + # Remove original class from factory cache. _remove_calendar_from_factory_cache(k) @@ -129,13 +166,15 @@ def remove_extensions() -> None: for k, v in _original_classes.items(): # Register original class. register_calendar_type(k, v, force=True) + # Remove extended class from factory cache. _remove_calendar_from_factory_cache(k) + # Clear original classes. _original_classes.clear() -def register_extension(name: str, cls: Type[ExchangeCalendar], day_of_week_expiry: Optional[int] = None) -> None: +def register_extension(name: str, cls: Type[ExchangeCalendar], day_of_week_expiry: Union[int, None] = None) -> None: """ Register an extended calendar class for a given exchange key and a given base class. @@ -156,7 +195,7 @@ def register_extension(name: str, cls: Type[ExchangeCalendar], day_of_week_expir ------- None """ - _extensions[name] = (cls, day_of_week_expiry) + _extensions[name] = ExtensionSpec(base=cls, day_of_week_expiry=day_of_week_expiry) def _remove_calendar_from_factory_cache(name: str): @@ -221,7 +260,7 @@ def wrapper(exchange: str, *args: P.args, **kwargs: P.kwargs) -> None: @_with_changeset -def _add_day(cs: ChangeSet, spec: Union[DaySpec, DaySpecWithTime, dict]) -> ChangeSet: +def _add_day(cs: ChangeSet, date: TimestampLike, props: DayPropsLike) -> ChangeSet: """ Add a day of a given type to the changeset for a given exchange calendar. @@ -229,8 +268,10 @@ def _add_day(cs: ChangeSet, spec: Union[DaySpec, DaySpecWithTime, dict]) -> Chan ---------- cs : ChangeSet The changeset to which to add the day. - spec : Union[DaySpec, DaySpecWithTime, dict] - The properties to add for the day. Must match the properties required by the given day type. + date : TimestampLike + The date to add. Must be convertible to pandas.Timestamp. + props : DayPropsLike + The properties of the day to add. Returns ------- @@ -242,11 +283,11 @@ def _add_day(cs: ChangeSet, spec: Union[DaySpec, DaySpecWithTime, dict]) -> Chan ValueError If the changeset would be inconsistent after adding the day. """ - return cs.add_day(spec) + return cs.add_day(date, props) -@validate_call -def add_day(exchange: str, spec: Union[DaySpec, DaySpecWithTime, dict]) -> None: +@validate_call(config={'arbitrary_types_allowed': True}) +def add_day(exchange: str, date: TimestampLike, props: DayPropsLike) -> None: """ Add a day of a given type to the given exchange calendar. @@ -254,7 +295,9 @@ def add_day(exchange: str, spec: Union[DaySpec, DaySpecWithTime, dict]) -> None: ---------- exchange : str The exchange key for which to add the day. - spec : Union[DaySpec, DaySpecWithTime, dict] + date : TimestampLike + The date to add. Must be convertible to pandas.Timestamp. + props : Union[DaySpec, DaySpecWithTime, dict] The properties to add for the day. Must match the properties required by the given day type. Returns @@ -266,7 +309,7 @@ def add_day(exchange: str, spec: Union[DaySpec, DaySpecWithTime, dict]) -> None: ValidationError If strict is True and the changeset for the exchange would be inconsistent after adding the day. """ - _add_day(exchange, spec) + _add_day(exchange, date, props) @_with_changeset @@ -294,6 +337,7 @@ def _remove_day(cs: ChangeSet, date: TimestampLike) -> ChangeSet: return cs.remove_day(date) +@validate_call(config={'arbitrary_types_allowed': True}) def remove_day(exchange: str, date: TimestampLike) -> None: """ Remove a day of a given type from the given exchange calendar. @@ -318,7 +362,60 @@ def remove_day(exchange: str, date: TimestampLike) -> None: @_with_changeset -def _reset_day(cs: ChangeSet, date: TimestampLike) -> ChangeSet: +def _set_tags(cs: ChangeSet, date: TimestampLike, tags: Tags) -> ChangeSet: + """ + Set tags for a given day in the given exchange calendar. + + Parameters + ---------- + cs : ChangeSet + The changeset where to set the tags. + date : TimestampLike + The date for which to set the tags. + tags : Tags + The tags to set. + + Returns + ------- + ChangeSet + The changeset with the given tags set for the given day. + + Raises + ------ + ValidationError + If strict is True and the changeset for the exchange would be inconsistent after removing the day. + """ + return cs.remove_day(date) + + +@validate_call(config={'arbitrary_types_allowed': True}) +def set_tags(exchange: str, date: TimestampLike, tags: Tags) -> None: + """ + Set tags for a given day in the given exchange calendar. + + Parameters + ---------- + exchange : str + The exchange for which to set the tags. + date : TimestampLike + The date for which to set the tags. + tags : Tags + The tags to set. + + Returns + ------- + None + + Raises + ------ + ValidationError + If strict is True and the changeset for the exchange would be inconsistent after removing the day. + """ + _set_tags(exchange, date, tags) + + +@_with_changeset +def _reset_day(cs: ChangeSet, date: TimestampLike, include_tags: bool) -> ChangeSet: """ Clear a day of a given type from the changeset for a given exchange calendar. @@ -328,16 +425,19 @@ def _reset_day(cs: ChangeSet, date: TimestampLike) -> ChangeSet: The changeset from which to clear the day. date : TimestampLike The date to clear. Must be convertible to pandas.Timestamp. + include_tags : bool + Whether to also clear the tags for the day. Returns ------- ChangeSet The changeset with the cleared day. """ - return cs.clear_day(date) + return cs.clear_day(date, include_meta=include_tags) -def reset_day(exchange: str, date: TimestampLike) -> None: +@validate_call(config={'arbitrary_types_allowed': True}) +def reset_day(exchange: str, date: TimestampLike, include_tags: bool = False) -> None: """ Clear a day of a given type from the given exchange calendar. @@ -347,12 +447,14 @@ def reset_day(exchange: str, date: TimestampLike) -> None: The exchange key for which to clear the day. date : TimestampLike The date to clear. Must be convertible to pandas.Timestamp. + include_tags : bool + Whether to also clear the tags for the day. Defaults to False. Returns ------- None """ - _reset_day(exchange, date) + _reset_day(exchange, date, include_tags=include_tags) def add_holiday(exchange: str, date: TimestampLike, name: str = "Holiday") -> None: @@ -377,10 +479,10 @@ def add_holiday(exchange: str, date: TimestampLike, name: str = "Holiday") -> No ValidationError If strict is True and the changeset for the exchange would be inconsistent after adding the day. """ - _add_day(exchange, {'date': date, 'type': DayType.HOLIDAY, 'name': name}) + _add_day(exchange, date, {'type': DayType.HOLIDAY, 'name': name}) -def add_special_open(exchange: str, date: TimestampLike, time: Union[dt.time, str], name: str = "Special Open") -> None: +def add_special_open(exchange: str, date: TimestampLike, time: TimeLike, name: str = "Special Open") -> None: """ Add a special open to an exchange calendar. @@ -390,7 +492,7 @@ def add_special_open(exchange: str, date: TimestampLike, time: Union[dt.time, st The exchange key for which to add the day. date : TimestampLike The date to add. Must be convertible to pandas.Timestamp. - time : Union[time, str] + time : TimeLike The time of the special open. If a string, must be in the format 'HH:MM' or 'HH:MM:SS'. name : str The name of the special open. @@ -404,10 +506,10 @@ def add_special_open(exchange: str, date: TimestampLike, time: Union[dt.time, st ValidationError If strict is True and the changeset for the exchange would be inconsistent after adding the day. """ - _add_day(exchange, {'date': date, 'type': DayType.SPECIAL_OPEN, 'name': name, 'time': time}) + _add_day(exchange, date, {'type': DayType.SPECIAL_OPEN, 'name': name, 'time': time}) -def add_special_close(exchange: str, date: TimestampLike, time: Union[dt.time, str], name: str = "Special Close") -> None: +def add_special_close(exchange: str, date: TimestampLike, time: TimeLike, name: str = "Special Close") -> None: """ Add a special close to an exchange calendar. @@ -417,7 +519,7 @@ def add_special_close(exchange: str, date: TimestampLike, time: Union[dt.time, s The exchange key for which to add the day. date : TimestampLike The date to add. Must be convertible to pandas.Timestamp. - time : Union[time, str] + time : TimeLike The time of the special close. If a string, must be in the format 'HH:MM' or 'HH:MM:SS'. name : str The name of the special close. @@ -431,7 +533,7 @@ def add_special_close(exchange: str, date: TimestampLike, time: Union[dt.time, s ValidationError If strict is True and the changeset for the exchange would be inconsistent after adding the day. """ - _add_day(exchange, {'date': date, 'type': DayType.SPECIAL_CLOSE, 'name': name, 'time': time}) + _add_day(exchange, date, {'type': DayType.SPECIAL_CLOSE, 'name': name, 'time': time}) def add_quarterly_expiry(exchange: str, date: TimestampLike, name: str = "Quarterly Expiry") -> None: @@ -456,10 +558,10 @@ def add_quarterly_expiry(exchange: str, date: TimestampLike, name: str = "Quarte ValidationError If strict is True and the changeset for the exchange would be inconsistent after adding the day. """ - _add_day(exchange, {'date': date, 'type': DayType.QUARTERLY_EXPIRY, 'name': name}) + _add_day(exchange, date, {'type': DayType.QUARTERLY_EXPIRY, 'name': name}) -def add_monthly_expiry(exchange: str, date: Any, name: str = "Monthly Expiry") -> None: +def add_monthly_expiry(exchange: str, date: TimestampLike, name: str = "Monthly Expiry") -> None: """ Add a monthly expiry to an exchange calendar. @@ -481,11 +583,11 @@ def add_monthly_expiry(exchange: str, date: Any, name: str = "Monthly Expiry") - ValidationError If strict is True and the changeset for the exchange would be inconsistent after adding the day. """ - _add_day(exchange, {'date': date, 'type': DayType.MONTHLY_EXPIRY, 'name': name}) + _add_day(exchange, date, {'type': DayType.MONTHLY_EXPIRY, 'name': name}) @_with_changeset -def _reset_calendar(cs: ChangeSet) -> ChangeSet: +def _reset_calendar(cs: ChangeSet, include_tags: bool) -> ChangeSet: """ Reset an exchange calendar to its original state. @@ -499,7 +601,7 @@ def _reset_calendar(cs: ChangeSet) -> ChangeSet: ChangeSet The reset changeset. """ - return cs.clear() + return cs.clear(include_meta=include_tags) def reset_calendar(exchange: str) -> None: @@ -515,7 +617,7 @@ def reset_calendar(exchange: str) -> None: ------- None """ - _reset_calendar(exchange) + _reset_calendar(exchange, include_tags=True) def reset_all_calendars() -> None: @@ -544,7 +646,7 @@ def update_calendar(exchange: str, changes: Union[ChangeSet, dict]) -> None: ---------- exchange : str The exchange key for which to apply the changes. - changes : dict + changes : ChangeSet The changes to apply. Returns @@ -554,7 +656,7 @@ def update_calendar(exchange: str, changes: Union[ChangeSet, dict]) -> None: _update_calendar(exchange, changes) -def get_changes_for_calendar(exchange: str) -> ChangeSet: +def get_changes_for_calendar(exchange: str) -> Union[ChangeSet, None]: """ Get the changes for an exchange calendar. @@ -566,9 +668,9 @@ def get_changes_for_calendar(exchange: str) -> ChangeSet: Returns ------- ChangeSet - The changes for the exchange. + The changeset for the given exchange, or None, if no changes have been registered. """ - cs: Optional[ChangeSet] = _changesets.get(exchange, None) + cs: Union[ChangeSet, None] = _changesets.get(exchange, None) if cs is not None: cs = cs.model_copy(deep=True) @@ -590,10 +692,10 @@ def get_changes_for_all_calendars() -> ChangeSetDict: # Declare public names. __all__ = ["apply_extensions", "remove_extensions", "register_extension", "extend_class", "DayType", "add_day", - "remove_day", "reset_day", "DaySpec", "DaySpecWithTime", "add_holiday", "add_special_close", - "add_special_open", "add_quarterly_expiry", "add_monthly_expiry", "reset_calendar", "reset_all_calendars", - "update_calendar", "get_changes_for_calendar", "get_changes_for_all_calendars", "ChangeSet", - "ExtendedExchangeCalendar", "ExchangeCalendarExtensions"] + "remove_day", "reset_day", "DayPropsLike", "add_holiday", "add_special_close", "add_special_open", + "add_quarterly_expiry", "add_monthly_expiry", "reset_calendar", "reset_all_calendars", "update_calendar", + "get_changes_for_calendar", "get_changes_for_all_calendars", "ChangeSet", "ExtendedExchangeCalendar", + "ExchangeCalendarExtensions"] __version__ = None diff --git a/exchange_calendars_extensions/core/holiday.py b/exchange_calendars_extensions/core/holiday.py index b1294c8..337e405 100644 --- a/exchange_calendars_extensions/core/holiday.py +++ b/exchange_calendars_extensions/core/holiday.py @@ -13,10 +13,10 @@ def get_monthly_expiry_holiday( name: str, day_of_week: int, month: int, - observance: Optional[Callable[[pd.Timestamp], pd.Timestamp]] = None, - start_date: Optional[pd.Timestamp] = None, - end_date: Optional[pd.Timestamp] = None, - tz: Optional[tzinfo] = None) -> Holiday: + observance: Union[Callable[[pd.Timestamp], pd.Timestamp], None] = None, + start_date: Union[pd.Timestamp, None] = None, + end_date: Union[pd.Timestamp, None] = None, + tz: Union[tzinfo, None] = None) -> Holiday: """ Return a holiday that occurs yearly on the third given day of the week in the given month of the year. @@ -53,10 +53,10 @@ def get_monthly_expiry_holiday( def get_last_day_of_month_holiday( name: str, month: int, - observance: Optional[Callable[[pd.Timestamp], pd.Timestamp]] = None, - start_date: Optional[pd.Timestamp] = None, - end_date: Optional[pd.Timestamp] = None, - tz: Optional[tzinfo] = None) -> Holiday: + observance: Union[Callable[[pd.Timestamp], pd.Timestamp], None] = None, + start_date: Union[pd.Timestamp, None] = None, + end_date: Union[pd.Timestamp, None] = None, + tz: Union[tzinfo, None] = None) -> Holiday: """ Return a holiday that occurs yearly on the last day of the given month of the year. diff --git a/exchange_calendars_extensions/core/holiday_calendar.py b/exchange_calendars_extensions/core/holiday_calendar.py index 238e62a..6ae1190 100644 --- a/exchange_calendars_extensions/core/holiday_calendar.py +++ b/exchange_calendars_extensions/core/holiday_calendar.py @@ -474,6 +474,10 @@ def last_regular_trading_days_of_months(self) -> Union[ExchangeCalendarsHolidayC """ ... + @property + def tags(self): + ... + @dataclass class AdjustedProperties: @@ -519,8 +523,8 @@ class ExtendedExchangeCalendar(ExchangeCalendar, ExchangeCalendarExtensions, ABC ... -def extend_class(cls: Type[ExchangeCalendar], day_of_week_expiry: Optional[int] = None, - changeset_provider: Callable[[], ChangeSet] = None) -> type: +def extend_class(cls: Type[ExchangeCalendar], day_of_week_expiry: Union[int, None] = None, + changeset_provider: Union[Callable[[], ChangeSet], None] = None) -> type: """ Extend the given ExchangeCalendar class with additional properties. @@ -528,9 +532,9 @@ def extend_class(cls: Type[ExchangeCalendar], day_of_week_expiry: Optional[int] ---------- cls : Type[ExchangeCalendar] The input class to extend. - day_of_week_expiry : int, optional + day_of_week_expiry : Union[int, None] The day of the week when expiry days are observed, where 0 is Monday and 6 is Sunday. Defaults to 4 (Friday). - changeset_provider : Callable[[], ExchangeCalendarChangeSet], optional + changeset_provider : Union[Callable[[], ChangeSet], None] The optional function that returns a changeset to apply to the calendar. Returns @@ -661,7 +665,7 @@ def remove_day_from_rules(ts: pd.Timestamp, rules: List[Holiday]) -> List[Holida # Modify rules to exclude ts. for rule in remove: # Create copies of rule with end date set to ts - 1 day and ts + 1 day, respectively. - rule_before_ts = clone_holiday(rule, end_date=ts - pd.Timedelta(days=1)) + rule_before_ts = clone_holiday(rule, end_date=ts + pd.Timedelta(days=-1)) rule_after_ts = clone_holiday(rule, start_date=ts + pd.Timedelta(days=1)) # Determine index of rule in list. rule_index = rules.index(rule) @@ -792,7 +796,7 @@ def __init__(self, *args, **kwargs): adhoc_special_opens=list(copy(adhoc_special_opens_orig(self)))) # Get changeset from provider, maybe. - changeset: ChangeSet = changeset_provider() if changeset_provider is not None else None + changeset: Union[ChangeSet, None] = changeset_provider() if changeset_provider is not None else None # Set changeset to None if it is empty. if changeset is not None and len(changeset) <= 0: @@ -800,7 +804,7 @@ def __init__(self, *args, **kwargs): if changeset is not None: # Remove all changed days from holidays, special opens, and special closes. - for ts in changeset.all_days: + for ts in changeset.all_days(include_meta=False): a.regular_holidays, a.adhoc_holidays = remove_holiday(ts, a.regular_holidays, a.adhoc_holidays) a.special_opens, a.adhoc_special_opens = remove_special_session(ts, a.special_opens, a.adhoc_special_opens) @@ -808,17 +812,17 @@ def __init__(self, *args, **kwargs): a.adhoc_special_closes) # Add holiday, special opens, and special closes. - for spec in changeset.add: - if spec.type == DayType.HOLIDAY: + for date, props in changeset.add.items(): + if props.type == DayType.HOLIDAY: # Add the holiday. - a.regular_holidays.append(Holiday(spec.name, year=spec.date.year, month=spec.date.month, - day=spec.date.day)) - elif spec.type == DayType.SPECIAL_OPEN: + a.regular_holidays.append(Holiday(props.name, year=date.year, month=date.month, + day=date.day)) + elif props.type == DayType.SPECIAL_OPEN: # Add the special open. - a.special_opens = add_special_session(spec.name, spec.date, spec.time, a.special_opens) - elif spec.type == DayType.SPECIAL_CLOSE: + a.special_opens = add_special_session(props.name, date, props.time, a.special_opens) + elif props.type == DayType.SPECIAL_CLOSE: # Add the special close. - a.special_closes = add_special_session(spec.name, spec.date, spec.time, a.special_closes) + a.special_closes = add_special_session(props.name, date, props.time, a.special_closes) self._adjusted_properties = a @@ -832,20 +836,20 @@ def __init__(self, *args, **kwargs): if changeset is not None: # Remove all changed days from monthly and quarterly expiries. - for ts in changeset.all_days: + for ts in changeset.all_days(include_meta=False): a.monthly_expiries, _ = remove_holiday(ts, a.monthly_expiries) a.quarterly_expiries, _ = remove_holiday(ts, a.quarterly_expiries) # Add monthly and quarterly expiries. - for spec in changeset.add: - if spec.type == DayType.MONTHLY_EXPIRY: + for date, props in changeset.add.items(): + if props.type == DayType.MONTHLY_EXPIRY: # Add the monthly expiry. - a.monthly_expiries.append(Holiday(spec.name, year=spec.date.year, month=spec.date.month, - day=spec.date.day)) - elif spec.type == DayType.QUARTERLY_EXPIRY: + a.monthly_expiries.append(Holiday(props.name, year=date.year, month=date.month, + day=date.day)) + elif props.type == DayType.QUARTERLY_EXPIRY: # Add the quarterly expiry. - a.quarterly_expiries.append(Holiday(spec.name, year=spec.date.year, month=spec.date.month, - day=spec.date.day)) + a.quarterly_expiries.append(Holiday(props.name, year=date.year, month=date.month, + day=date.day)) # Set up last trading days of the month. a.last_trading_days_of_months = get_last_day_of_month_rules('last trading day of month') @@ -903,25 +907,25 @@ def special_closes_all(self) -> Union[HolidayCalendar, None]: return get_special_closes_calendar(self) @property - def monthly_expiries(self) -> Union[HolidayCalendar, None]: + def monthly_expiries(self) -> Union[ExchangeCalendarsHolidayCalendar, None]: return AdjustedHolidayCalendar(rules=self._adjusted_properties.monthly_expiries, other=self._holidays_and_special_business_days_shared, weekmask=self.weekmask, roll_fn=roll_one_day_same_month) @property - def quarterly_expiries(self) -> Union[HolidayCalendar, None]: + def quarterly_expiries(self) -> Union[ExchangeCalendarsHolidayCalendar, None]: return AdjustedHolidayCalendar(rules=self._adjusted_properties.quarterly_expiries, other=self._holidays_and_special_business_days_shared, weekmask=self.weekmask, roll_fn=roll_one_day_same_month) @property - def last_trading_days_of_months(self) -> Union[HolidayCalendar, None]: + def last_trading_days_of_months(self) -> Union[ExchangeCalendarsHolidayCalendar, None]: return AdjustedHolidayCalendar(rules=self._adjusted_properties.last_trading_days_of_months, other=self._holidays_shared, weekmask=self.weekmask, roll_fn=roll_one_day_same_month) @property - def last_regular_trading_days_of_months(self) -> Union[HolidayCalendar, None]: + def last_regular_trading_days_of_months(self) -> Union[ExchangeCalendarsHolidayCalendar, None]: return AdjustedHolidayCalendar(rules=self._adjusted_properties.last_regular_trading_days_of_months, other=self._holidays_and_special_business_days_shared, weekmask=self.weekmask, roll_fn=roll_one_day_same_month) diff --git a/poetry.lock b/poetry.lock index aedb27d..7892be6 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.5.1 and should not be changed by hand. [[package]] name = "annotated-types" @@ -11,9 +11,6 @@ files = [ {file = "annotated_types-0.6.0.tar.gz", hash = "sha256:563339e807e53ffd9c267e99fc6d9ea23eb8443c08f112651963e24e22f84a5d"}, ] -[package.dependencies] -typing-extensions = {version = ">=4.0.0", markers = "python_version < \"3.9\""} - [[package]] name = "cfgv" version = "3.4.0" @@ -130,43 +127,44 @@ test = ["pytest (>=6)"] [[package]] name = "exchange-calendars" -version = "4.2.8" +version = "4.5.2" description = "Calendars for securities exchanges" optional = false -python-versions = "~=3.8" +python-versions = "~=3.9" files = [ - {file = "exchange_calendars-4.2.8-py3-none-any.whl", hash = "sha256:3695afd0608c6507ce3016dfcb68a1698220016a049b45d42b4dfa9ecf85a15c"}, - {file = "exchange_calendars-4.2.8.tar.gz", hash = "sha256:1598b6219a58e7be218c640f389375e39c9c12513c7db82d7591ae56f64467f9"}, + {file = "exchange_calendars-4.5.2-py3-none-any.whl", hash = "sha256:0ff762fdebd9d178870b52cae231f630a2be45f9f089801fb17b5953e5ce9d2d"}, + {file = "exchange_calendars-4.5.2.tar.gz", hash = "sha256:6f80512de85b5750caf18f2e17e40d68acde0500e20d952d445377bb367c7209"}, ] [package.dependencies] korean-lunar-calendar = "*" -numpy = "*" -pandas = ">=1.1" +numpy = "<2" +pandas = ">=1.5" pyluach = "*" -python-dateutil = "*" -pytz = "*" toolz = "*" +tzdata = "*" [package.extras] dev = ["flake8", "hypothesis", "pip-tools", "pytest", "pytest-benchmark", "pytest-xdist"] [[package]] name = "exchange-calendars-extensions-api" -version = "0.2.0" +version = "0" description = "A package that defines parts of the API of the exchange-calendars-extensions package." optional = false -python-versions = ">=3.8,<4.0" -files = [ - {file = "exchange_calendars_extensions_api-0.2.0-py3-none-any.whl", hash = "sha256:acaed8a1bc76fefc5ba29ed53b6f965230bb15b0d7677f361e8252028305a413"}, - {file = "exchange_calendars_extensions_api-0.2.0.tar.gz", hash = "sha256:e8de5a84e9d5821f1b8d74584eea931354b086089cde52df78e03fe9fd8c3b2f"}, -] +python-versions = "~=3.9" +files = [] +develop = false [package.dependencies] -pandas = ">=1" +pandas = "^2" pydantic = ">=2,<3" typing-extensions = ">=4.0,<5" +[package.source] +type = "directory" +url = "../exchange_calendars_extensions_api" + [[package]] name = "filelock" version = "3.13.1" @@ -235,39 +233,47 @@ setuptools = "*" [[package]] name = "numpy" -version = "1.24.4" +version = "1.26.3" description = "Fundamental package for array computing in Python" optional = false -python-versions = ">=3.8" -files = [ - {file = "numpy-1.24.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c0bfb52d2169d58c1cdb8cc1f16989101639b34c7d3ce60ed70b19c63eba0b64"}, - {file = "numpy-1.24.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ed094d4f0c177b1b8e7aa9cba7d6ceed51c0e569a5318ac0ca9a090680a6a1b1"}, - {file = "numpy-1.24.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:79fc682a374c4a8ed08b331bef9c5f582585d1048fa6d80bc6c35bc384eee9b4"}, - {file = "numpy-1.24.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ffe43c74893dbf38c2b0a1f5428760a1a9c98285553c89e12d70a96a7f3a4d6"}, - {file = "numpy-1.24.4-cp310-cp310-win32.whl", hash = "sha256:4c21decb6ea94057331e111a5bed9a79d335658c27ce2adb580fb4d54f2ad9bc"}, - {file = "numpy-1.24.4-cp310-cp310-win_amd64.whl", hash = "sha256:b4bea75e47d9586d31e892a7401f76e909712a0fd510f58f5337bea9572c571e"}, - {file = "numpy-1.24.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f136bab9c2cfd8da131132c2cf6cc27331dd6fae65f95f69dcd4ae3c3639c810"}, - {file = "numpy-1.24.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e2926dac25b313635e4d6cf4dc4e51c8c0ebfed60b801c799ffc4c32bf3d1254"}, - {file = "numpy-1.24.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:222e40d0e2548690405b0b3c7b21d1169117391c2e82c378467ef9ab4c8f0da7"}, - {file = "numpy-1.24.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7215847ce88a85ce39baf9e89070cb860c98fdddacbaa6c0da3ffb31b3350bd5"}, - {file = "numpy-1.24.4-cp311-cp311-win32.whl", hash = "sha256:4979217d7de511a8d57f4b4b5b2b965f707768440c17cb70fbf254c4b225238d"}, - {file = "numpy-1.24.4-cp311-cp311-win_amd64.whl", hash = "sha256:b7b1fc9864d7d39e28f41d089bfd6353cb5f27ecd9905348c24187a768c79694"}, - {file = "numpy-1.24.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1452241c290f3e2a312c137a9999cdbf63f78864d63c79039bda65ee86943f61"}, - {file = "numpy-1.24.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:04640dab83f7c6c85abf9cd729c5b65f1ebd0ccf9de90b270cd61935eef0197f"}, - {file = "numpy-1.24.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5425b114831d1e77e4b5d812b69d11d962e104095a5b9c3b641a218abcc050e"}, - {file = "numpy-1.24.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd80e219fd4c71fc3699fc1dadac5dcf4fd882bfc6f7ec53d30fa197b8ee22dc"}, - {file = "numpy-1.24.4-cp38-cp38-win32.whl", hash = "sha256:4602244f345453db537be5314d3983dbf5834a9701b7723ec28923e2889e0bb2"}, - {file = "numpy-1.24.4-cp38-cp38-win_amd64.whl", hash = "sha256:692f2e0f55794943c5bfff12b3f56f99af76f902fc47487bdfe97856de51a706"}, - {file = "numpy-1.24.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2541312fbf09977f3b3ad449c4e5f4bb55d0dbf79226d7724211acc905049400"}, - {file = "numpy-1.24.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9667575fb6d13c95f1b36aca12c5ee3356bf001b714fc354eb5465ce1609e62f"}, - {file = "numpy-1.24.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f3a86ed21e4f87050382c7bc96571755193c4c1392490744ac73d660e8f564a9"}, - {file = "numpy-1.24.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d11efb4dbecbdf22508d55e48d9c8384db795e1b7b51ea735289ff96613ff74d"}, - {file = "numpy-1.24.4-cp39-cp39-win32.whl", hash = "sha256:6620c0acd41dbcb368610bb2f4d83145674040025e5536954782467100aa8835"}, - {file = "numpy-1.24.4-cp39-cp39-win_amd64.whl", hash = "sha256:befe2bf740fd8373cf56149a5c23a0f601e82869598d41f8e188a0e9869926f8"}, - {file = "numpy-1.24.4-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:31f13e25b4e304632a4619d0e0777662c2ffea99fcae2029556b17d8ff958aef"}, - {file = "numpy-1.24.4-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95f7ac6540e95bc440ad77f56e520da5bf877f87dca58bd095288dce8940532a"}, - {file = "numpy-1.24.4-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:e98f220aa76ca2a977fe435f5b04d7b3470c0a2e6312907b37ba6068f26787f2"}, - {file = "numpy-1.24.4.tar.gz", hash = "sha256:80f5e3a4e498641401868df4208b74581206afbee7cf7b8329daae82676d9463"}, +python-versions = ">=3.9" +files = [ + {file = "numpy-1.26.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:806dd64230dbbfaca8a27faa64e2f414bf1c6622ab78cc4264f7f5f028fee3bf"}, + {file = "numpy-1.26.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:02f98011ba4ab17f46f80f7f8f1c291ee7d855fcef0a5a98db80767a468c85cd"}, + {file = "numpy-1.26.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6d45b3ec2faed4baca41c76617fcdcfa4f684ff7a151ce6fc78ad3b6e85af0a6"}, + {file = "numpy-1.26.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bdd2b45bf079d9ad90377048e2747a0c82351989a2165821f0c96831b4a2a54b"}, + {file = "numpy-1.26.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:211ddd1e94817ed2d175b60b6374120244a4dd2287f4ece45d49228b4d529178"}, + {file = "numpy-1.26.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b1240f767f69d7c4c8a29adde2310b871153df9b26b5cb2b54a561ac85146485"}, + {file = "numpy-1.26.3-cp310-cp310-win32.whl", hash = "sha256:21a9484e75ad018974a2fdaa216524d64ed4212e418e0a551a2d83403b0531d3"}, + {file = "numpy-1.26.3-cp310-cp310-win_amd64.whl", hash = "sha256:9e1591f6ae98bcfac2a4bbf9221c0b92ab49762228f38287f6eeb5f3f55905ce"}, + {file = "numpy-1.26.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b831295e5472954104ecb46cd98c08b98b49c69fdb7040483aff799a755a7374"}, + {file = "numpy-1.26.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9e87562b91f68dd8b1c39149d0323b42e0082db7ddb8e934ab4c292094d575d6"}, + {file = "numpy-1.26.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c66d6fec467e8c0f975818c1796d25c53521124b7cfb760114be0abad53a0a2"}, + {file = "numpy-1.26.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f25e2811a9c932e43943a2615e65fc487a0b6b49218899e62e426e7f0a57eeda"}, + {file = "numpy-1.26.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:af36e0aa45e25c9f57bf684b1175e59ea05d9a7d3e8e87b7ae1a1da246f2767e"}, + {file = "numpy-1.26.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:51c7f1b344f302067b02e0f5b5d2daa9ed4a721cf49f070280ac202738ea7f00"}, + {file = "numpy-1.26.3-cp311-cp311-win32.whl", hash = "sha256:7ca4f24341df071877849eb2034948459ce3a07915c2734f1abb4018d9c49d7b"}, + {file = "numpy-1.26.3-cp311-cp311-win_amd64.whl", hash = "sha256:39763aee6dfdd4878032361b30b2b12593fb445ddb66bbac802e2113eb8a6ac4"}, + {file = "numpy-1.26.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:a7081fd19a6d573e1a05e600c82a1c421011db7935ed0d5c483e9dd96b99cf13"}, + {file = "numpy-1.26.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:12c70ac274b32bc00c7f61b515126c9205323703abb99cd41836e8125ea0043e"}, + {file = "numpy-1.26.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7f784e13e598e9594750b2ef6729bcd5a47f6cfe4a12cca13def35e06d8163e3"}, + {file = "numpy-1.26.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5f24750ef94d56ce6e33e4019a8a4d68cfdb1ef661a52cdaee628a56d2437419"}, + {file = "numpy-1.26.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:77810ef29e0fb1d289d225cabb9ee6cf4d11978a00bb99f7f8ec2132a84e0166"}, + {file = "numpy-1.26.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8ed07a90f5450d99dad60d3799f9c03c6566709bd53b497eb9ccad9a55867f36"}, + {file = "numpy-1.26.3-cp312-cp312-win32.whl", hash = "sha256:f73497e8c38295aaa4741bdfa4fda1a5aedda5473074369eca10626835445511"}, + {file = "numpy-1.26.3-cp312-cp312-win_amd64.whl", hash = "sha256:da4b0c6c699a0ad73c810736303f7fbae483bcb012e38d7eb06a5e3b432c981b"}, + {file = "numpy-1.26.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1666f634cb3c80ccbd77ec97bc17337718f56d6658acf5d3b906ca03e90ce87f"}, + {file = "numpy-1.26.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:18c3319a7d39b2c6a9e3bb75aab2304ab79a811ac0168a671a62e6346c29b03f"}, + {file = "numpy-1.26.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0b7e807d6888da0db6e7e75838444d62495e2b588b99e90dd80c3459594e857b"}, + {file = "numpy-1.26.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b4d362e17bcb0011738c2d83e0a65ea8ce627057b2fdda37678f4374a382a137"}, + {file = "numpy-1.26.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b8c275f0ae90069496068c714387b4a0eba5d531aace269559ff2b43655edd58"}, + {file = "numpy-1.26.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:cc0743f0302b94f397a4a65a660d4cd24267439eb16493fb3caad2e4389bccbb"}, + {file = "numpy-1.26.3-cp39-cp39-win32.whl", hash = "sha256:9bc6d1a7f8cedd519c4b7b1156d98e051b726bf160715b769106661d567b3f03"}, + {file = "numpy-1.26.3-cp39-cp39-win_amd64.whl", hash = "sha256:867e3644e208c8922a3be26fc6bbf112a035f50f0a86497f98f228c50c607bb2"}, + {file = "numpy-1.26.3-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:3c67423b3703f8fbd90f5adaa37f85b5794d3366948efe9a5190a5f3a83fc34e"}, + {file = "numpy-1.26.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:46f47ee566d98849323f01b349d58f2557f02167ee301e5e28809a8c0e27a2d0"}, + {file = "numpy-1.26.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:a8474703bffc65ca15853d5fd4d06b18138ae90c17c8d12169968e998e448bb5"}, + {file = "numpy-1.26.3.tar.gz", hash = "sha256:697df43e2b6310ecc9d95f05d5ef20eacc09c7c4ecc9da3f235d39e71b7da1e4"}, ] [[package]] @@ -283,70 +289,75 @@ files = [ [[package]] name = "pandas" -version = "2.0.3" +version = "2.2.0" description = "Powerful data structures for data analysis, time series, and statistics" optional = false -python-versions = ">=3.8" -files = [ - {file = "pandas-2.0.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e4c7c9f27a4185304c7caf96dc7d91bc60bc162221152de697c98eb0b2648dd8"}, - {file = "pandas-2.0.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f167beed68918d62bffb6ec64f2e1d8a7d297a038f86d4aed056b9493fca407f"}, - {file = "pandas-2.0.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ce0c6f76a0f1ba361551f3e6dceaff06bde7514a374aa43e33b588ec10420183"}, - {file = "pandas-2.0.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba619e410a21d8c387a1ea6e8a0e49bb42216474436245718d7f2e88a2f8d7c0"}, - {file = "pandas-2.0.3-cp310-cp310-win32.whl", hash = "sha256:3ef285093b4fe5058eefd756100a367f27029913760773c8bf1d2d8bebe5d210"}, - {file = "pandas-2.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:9ee1a69328d5c36c98d8e74db06f4ad518a1840e8ccb94a4ba86920986bb617e"}, - {file = "pandas-2.0.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b084b91d8d66ab19f5bb3256cbd5ea661848338301940e17f4492b2ce0801fe8"}, - {file = "pandas-2.0.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:37673e3bdf1551b95bf5d4ce372b37770f9529743d2498032439371fc7b7eb26"}, - {file = "pandas-2.0.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b9cb1e14fdb546396b7e1b923ffaeeac24e4cedd14266c3497216dd4448e4f2d"}, - {file = "pandas-2.0.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d9cd88488cceb7635aebb84809d087468eb33551097d600c6dad13602029c2df"}, - {file = "pandas-2.0.3-cp311-cp311-win32.whl", hash = "sha256:694888a81198786f0e164ee3a581df7d505024fbb1f15202fc7db88a71d84ebd"}, - {file = "pandas-2.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:6a21ab5c89dcbd57f78d0ae16630b090eec626360085a4148693def5452d8a6b"}, - {file = "pandas-2.0.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9e4da0d45e7f34c069fe4d522359df7d23badf83abc1d1cef398895822d11061"}, - {file = "pandas-2.0.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:32fca2ee1b0d93dd71d979726b12b61faa06aeb93cf77468776287f41ff8fdc5"}, - {file = "pandas-2.0.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:258d3624b3ae734490e4d63c430256e716f488c4fcb7c8e9bde2d3aa46c29089"}, - {file = "pandas-2.0.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9eae3dc34fa1aa7772dd3fc60270d13ced7346fcbcfee017d3132ec625e23bb0"}, - {file = "pandas-2.0.3-cp38-cp38-win32.whl", hash = "sha256:f3421a7afb1a43f7e38e82e844e2bca9a6d793d66c1a7f9f0ff39a795bbc5e02"}, - {file = "pandas-2.0.3-cp38-cp38-win_amd64.whl", hash = "sha256:69d7f3884c95da3a31ef82b7618af5710dba95bb885ffab339aad925c3e8ce78"}, - {file = "pandas-2.0.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5247fb1ba347c1261cbbf0fcfba4a3121fbb4029d95d9ef4dc45406620b25c8b"}, - {file = "pandas-2.0.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:81af086f4543c9d8bb128328b5d32e9986e0c84d3ee673a2ac6fb57fd14f755e"}, - {file = "pandas-2.0.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1994c789bf12a7c5098277fb43836ce090f1073858c10f9220998ac74f37c69b"}, - {file = "pandas-2.0.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5ec591c48e29226bcbb316e0c1e9423622bc7a4eaf1ef7c3c9fa1a3981f89641"}, - {file = "pandas-2.0.3-cp39-cp39-win32.whl", hash = "sha256:04dbdbaf2e4d46ca8da896e1805bc04eb85caa9a82e259e8eed00254d5e0c682"}, - {file = "pandas-2.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:1168574b036cd8b93abc746171c9b4f1b83467438a5e45909fed645cf8692dbc"}, - {file = "pandas-2.0.3.tar.gz", hash = "sha256:c02f372a88e0d17f36d3093a644c73cfc1788e876a7c4bcb4020a77512e2043c"}, +python-versions = ">=3.9" +files = [ + {file = "pandas-2.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8108ee1712bb4fa2c16981fba7e68b3f6ea330277f5ca34fa8d557e986a11670"}, + {file = "pandas-2.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:736da9ad4033aeab51d067fc3bd69a0ba36f5a60f66a527b3d72e2030e63280a"}, + {file = "pandas-2.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:38e0b4fc3ddceb56ec8a287313bc22abe17ab0eb184069f08fc6a9352a769b18"}, + {file = "pandas-2.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:20404d2adefe92aed3b38da41d0847a143a09be982a31b85bc7dd565bdba0f4e"}, + {file = "pandas-2.2.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7ea3ee3f125032bfcade3a4cf85131ed064b4f8dd23e5ce6fa16473e48ebcaf5"}, + {file = "pandas-2.2.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f9670b3ac00a387620489dfc1bca66db47a787f4e55911f1293063a78b108df1"}, + {file = "pandas-2.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:5a946f210383c7e6d16312d30b238fd508d80d927014f3b33fb5b15c2f895430"}, + {file = "pandas-2.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a1b438fa26b208005c997e78672f1aa8138f67002e833312e6230f3e57fa87d5"}, + {file = "pandas-2.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8ce2fbc8d9bf303ce54a476116165220a1fedf15985b09656b4b4275300e920b"}, + {file = "pandas-2.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2707514a7bec41a4ab81f2ccce8b382961a29fbe9492eab1305bb075b2b1ff4f"}, + {file = "pandas-2.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85793cbdc2d5bc32620dc8ffa715423f0c680dacacf55056ba13454a5be5de88"}, + {file = "pandas-2.2.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:cfd6c2491dc821b10c716ad6776e7ab311f7df5d16038d0b7458bc0b67dc10f3"}, + {file = "pandas-2.2.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:a146b9dcacc3123aa2b399df1a284de5f46287a4ab4fbfc237eac98a92ebcb71"}, + {file = "pandas-2.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:fbc1b53c0e1fdf16388c33c3cca160f798d38aea2978004dd3f4d3dec56454c9"}, + {file = "pandas-2.2.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:a41d06f308a024981dcaa6c41f2f2be46a6b186b902c94c2674e8cb5c42985bc"}, + {file = "pandas-2.2.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:159205c99d7a5ce89ecfc37cb08ed179de7783737cea403b295b5eda8e9c56d1"}, + {file = "pandas-2.2.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eb1e1f3861ea9132b32f2133788f3b14911b68102d562715d71bd0013bc45440"}, + {file = "pandas-2.2.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:761cb99b42a69005dec2b08854fb1d4888fdf7b05db23a8c5a099e4b886a2106"}, + {file = "pandas-2.2.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:a20628faaf444da122b2a64b1e5360cde100ee6283ae8effa0d8745153809a2e"}, + {file = "pandas-2.2.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:f5be5d03ea2073627e7111f61b9f1f0d9625dc3c4d8dda72cc827b0c58a1d042"}, + {file = "pandas-2.2.0-cp312-cp312-win_amd64.whl", hash = "sha256:a626795722d893ed6aacb64d2401d017ddc8a2341b49e0384ab9bf7112bdec30"}, + {file = "pandas-2.2.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9f66419d4a41132eb7e9a73dcec9486cf5019f52d90dd35547af11bc58f8637d"}, + {file = "pandas-2.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:57abcaeda83fb80d447f28ab0cc7b32b13978f6f733875ebd1ed14f8fbc0f4ab"}, + {file = "pandas-2.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e60f1f7dba3c2d5ca159e18c46a34e7ca7247a73b5dd1a22b6d59707ed6b899a"}, + {file = "pandas-2.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eb61dc8567b798b969bcc1fc964788f5a68214d333cade8319c7ab33e2b5d88a"}, + {file = "pandas-2.2.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:52826b5f4ed658fa2b729264d63f6732b8b29949c7fd234510d57c61dbeadfcd"}, + {file = "pandas-2.2.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:bde2bc699dbd80d7bc7f9cab1e23a95c4375de615860ca089f34e7c64f4a8de7"}, + {file = "pandas-2.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:3de918a754bbf2da2381e8a3dcc45eede8cd7775b047b923f9006d5f876802ae"}, + {file = "pandas-2.2.0.tar.gz", hash = "sha256:30b83f7c3eb217fb4d1b494a57a2fda5444f17834f5df2de6b2ffff68dc3c8e2"}, ] [package.dependencies] numpy = [ - {version = ">=1.20.3", markers = "python_version < \"3.10\""}, - {version = ">=1.23.2", markers = "python_version >= \"3.11\""}, - {version = ">=1.21.0", markers = "python_version >= \"3.10\" and python_version < \"3.11\""}, + {version = ">=1.22.4,<2", markers = "python_version < \"3.11\""}, + {version = ">=1.23.2,<2", markers = "python_version == \"3.11\""}, + {version = ">=1.26.0,<2", markers = "python_version >= \"3.12\""}, ] python-dateutil = ">=2.8.2" pytz = ">=2020.1" -tzdata = ">=2022.1" +tzdata = ">=2022.7" [package.extras] -all = ["PyQt5 (>=5.15.1)", "SQLAlchemy (>=1.4.16)", "beautifulsoup4 (>=4.9.3)", "bottleneck (>=1.3.2)", "brotlipy (>=0.7.0)", "fastparquet (>=0.6.3)", "fsspec (>=2021.07.0)", "gcsfs (>=2021.07.0)", "html5lib (>=1.1)", "hypothesis (>=6.34.2)", "jinja2 (>=3.0.0)", "lxml (>=4.6.3)", "matplotlib (>=3.6.1)", "numba (>=0.53.1)", "numexpr (>=2.7.3)", "odfpy (>=1.4.1)", "openpyxl (>=3.0.7)", "pandas-gbq (>=0.15.0)", "psycopg2 (>=2.8.6)", "pyarrow (>=7.0.0)", "pymysql (>=1.0.2)", "pyreadstat (>=1.1.2)", "pytest (>=7.3.2)", "pytest-asyncio (>=0.17.0)", "pytest-xdist (>=2.2.0)", "python-snappy (>=0.6.0)", "pyxlsb (>=1.0.8)", "qtpy (>=2.2.0)", "s3fs (>=2021.08.0)", "scipy (>=1.7.1)", "tables (>=3.6.1)", "tabulate (>=0.8.9)", "xarray (>=0.21.0)", "xlrd (>=2.0.1)", "xlsxwriter (>=1.4.3)", "zstandard (>=0.15.2)"] -aws = ["s3fs (>=2021.08.0)"] -clipboard = ["PyQt5 (>=5.15.1)", "qtpy (>=2.2.0)"] -compression = ["brotlipy (>=0.7.0)", "python-snappy (>=0.6.0)", "zstandard (>=0.15.2)"] -computation = ["scipy (>=1.7.1)", "xarray (>=0.21.0)"] -excel = ["odfpy (>=1.4.1)", "openpyxl (>=3.0.7)", "pyxlsb (>=1.0.8)", "xlrd (>=2.0.1)", "xlsxwriter (>=1.4.3)"] -feather = ["pyarrow (>=7.0.0)"] -fss = ["fsspec (>=2021.07.0)"] -gcp = ["gcsfs (>=2021.07.0)", "pandas-gbq (>=0.15.0)"] -hdf5 = ["tables (>=3.6.1)"] -html = ["beautifulsoup4 (>=4.9.3)", "html5lib (>=1.1)", "lxml (>=4.6.3)"] -mysql = ["SQLAlchemy (>=1.4.16)", "pymysql (>=1.0.2)"] -output-formatting = ["jinja2 (>=3.0.0)", "tabulate (>=0.8.9)"] -parquet = ["pyarrow (>=7.0.0)"] -performance = ["bottleneck (>=1.3.2)", "numba (>=0.53.1)", "numexpr (>=2.7.1)"] -plot = ["matplotlib (>=3.6.1)"] -postgresql = ["SQLAlchemy (>=1.4.16)", "psycopg2 (>=2.8.6)"] -spss = ["pyreadstat (>=1.1.2)"] -sql-other = ["SQLAlchemy (>=1.4.16)"] -test = ["hypothesis (>=6.34.2)", "pytest (>=7.3.2)", "pytest-asyncio (>=0.17.0)", "pytest-xdist (>=2.2.0)"] -xml = ["lxml (>=4.6.3)"] +all = ["PyQt5 (>=5.15.9)", "SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "adbc-driver-sqlite (>=0.8.0)", "beautifulsoup4 (>=4.11.2)", "bottleneck (>=1.3.6)", "dataframe-api-compat (>=0.1.7)", "fastparquet (>=2022.12.0)", "fsspec (>=2022.11.0)", "gcsfs (>=2022.11.0)", "html5lib (>=1.1)", "hypothesis (>=6.46.1)", "jinja2 (>=3.1.2)", "lxml (>=4.9.2)", "matplotlib (>=3.6.3)", "numba (>=0.56.4)", "numexpr (>=2.8.4)", "odfpy (>=1.4.1)", "openpyxl (>=3.1.0)", "pandas-gbq (>=0.19.0)", "psycopg2 (>=2.9.6)", "pyarrow (>=10.0.1)", "pymysql (>=1.0.2)", "pyreadstat (>=1.2.0)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)", "python-calamine (>=0.1.7)", "pyxlsb (>=1.0.10)", "qtpy (>=2.3.0)", "s3fs (>=2022.11.0)", "scipy (>=1.10.0)", "tables (>=3.8.0)", "tabulate (>=0.9.0)", "xarray (>=2022.12.0)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.5)", "zstandard (>=0.19.0)"] +aws = ["s3fs (>=2022.11.0)"] +clipboard = ["PyQt5 (>=5.15.9)", "qtpy (>=2.3.0)"] +compression = ["zstandard (>=0.19.0)"] +computation = ["scipy (>=1.10.0)", "xarray (>=2022.12.0)"] +consortium-standard = ["dataframe-api-compat (>=0.1.7)"] +excel = ["odfpy (>=1.4.1)", "openpyxl (>=3.1.0)", "python-calamine (>=0.1.7)", "pyxlsb (>=1.0.10)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.5)"] +feather = ["pyarrow (>=10.0.1)"] +fss = ["fsspec (>=2022.11.0)"] +gcp = ["gcsfs (>=2022.11.0)", "pandas-gbq (>=0.19.0)"] +hdf5 = ["tables (>=3.8.0)"] +html = ["beautifulsoup4 (>=4.11.2)", "html5lib (>=1.1)", "lxml (>=4.9.2)"] +mysql = ["SQLAlchemy (>=2.0.0)", "pymysql (>=1.0.2)"] +output-formatting = ["jinja2 (>=3.1.2)", "tabulate (>=0.9.0)"] +parquet = ["pyarrow (>=10.0.1)"] +performance = ["bottleneck (>=1.3.6)", "numba (>=0.56.4)", "numexpr (>=2.8.4)"] +plot = ["matplotlib (>=3.6.3)"] +postgresql = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "psycopg2 (>=2.9.6)"] +spss = ["pyreadstat (>=1.2.0)"] +sql-other = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "adbc-driver-sqlite (>=0.8.0)"] +test = ["hypothesis (>=6.46.1)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)"] +xml = ["lxml (>=4.9.2)"] [[package]] name = "platformdirs" @@ -615,7 +626,6 @@ files = [ {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, - {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, @@ -623,16 +633,8 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, - {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, - {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, - {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, - {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, @@ -649,7 +651,6 @@ files = [ {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, - {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, @@ -657,7 +658,6 @@ files = [ {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, - {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, @@ -756,5 +756,5 @@ test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess [metadata] lock-version = "2.0" -python-versions = "~=3.8" -content-hash = "a84dff8313a9632b8ccfa8bb764df74812fb0aca892e8b3c0919aadbc6cef26d" +python-versions = "~=3.9" +content-hash = "9f5825d3711cdec1fb8bcbcedcffedee510777c24d9e9ebd349e919a3310d16a" diff --git a/pyproject.toml b/pyproject.toml index 3200c32..864ed80 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -22,7 +22,6 @@ classifiers = [ "Operating System :: OS Independent", "Programming Language :: Python", "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", @@ -33,8 +32,8 @@ classifiers = [ packages = [{include = "exchange_calendars_extensions"}] [tool.poetry.dependencies] -python = "~=3.8" -exchange-calendars-extensions-api = ">=0.2.0,<1.0.0" +python = "~=3.9" +exchange-calendars-extensions-api = {path="../exchange_calendars_extensions_api/", develop=false} # ">=0.2.0,<1.0.0" exchange-calendars = ">=4.0.1,<5" typing-extensions = ">=4.0,<5" pydantic = ">=2.0,<3" @@ -88,8 +87,8 @@ line-length = 88 # Allow unused variables when underscore-prefixed. dummy-variable-rgx = "^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$" -# Assume Python 3.10. -target-version = "py38" +# Assume Python 3.9. +target-version = "py39" [tool.ruff.mccabe] # Unlike Flake8, default to a complexity level of 10. diff --git a/tests/test_api.py b/tests/test_api.py index c64761c..56a5b94 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -1637,14 +1637,21 @@ def test_apply_changeset(): import exchange_calendars_extensions.core as ece changes = { - 'add': [ - {'date': '2023-01-02', 'type': 'holiday', 'name': INSERTED_HOLIDAY}, - {'date': '2023-05-02', 'type': 'special_open', 'name': "Inserted Special Open", 'time': '11:00'}, - {'date': '2023-03-02', 'type': 'special_close', 'name': "Inserted Special Close", 'time': '14:00'}, - {'date': '2023-08-17', 'type': 'monthly_expiry', 'name': "Inserted Monthly Expiry"}, - {'date': '2023-09-14', 'type': 'quarterly_expiry', 'name': "Inserted Quarterly Expiry"}, - ], - 'remove': ['2023-01-01', '2023-05-01', '2023-03-01', '2023-08-18', '2023-09-15'] + 'add': { + '2023-01-02': {'type': 'holiday', 'name': INSERTED_HOLIDAY}, + '2023-05-02': {'type': 'special_open', 'name': "Inserted Special Open", 'time': '11:00'}, + '2023-03-02': {'type': 'special_close', 'name': "Inserted Special Close", 'time': '14:00'}, + '2023-08-17': {'type': 'monthly_expiry', 'name': "Inserted Monthly Expiry"}, + '2023-09-14': {'type': 'quarterly_expiry', 'name': "Inserted Quarterly Expiry"}, + }, + 'remove': ['2023-01-01', '2023-05-01', '2023-03-01', '2023-08-18', '2023-09-15'], + 'tags': { + '2023-01-02': ['tag1', 'tag2'], + '2023-05-02': ['tag1', 'tag2'], + '2023-03-02': ['tag1', 'tag2'], + '2023-08-17': ['tag1', 'tag2'], + '2023-09-14': ['tag1', 'tag2'], + } } ece.update_calendar("TEST", changes) c = ec.get_calendar("TEST") @@ -1763,14 +1770,21 @@ def test_test(): import exchange_calendars as ec changes = { - 'add': [ - {'date': '2022-01-10', 'type': 'holiday', 'name': 'Holiday'}, - {'date': '2022-01-12', 'type': 'special_open', 'name': 'Special Open', 'time': '10:00'}, - {'date': '2022-01-14', 'type': 'special_close', 'name': 'Special Close', 'time': '16:00'}, - {'date': '2022-01-18', 'type': 'monthly_expiry', 'name': MONTHLY_EXPIRY}, - {'date': '2022-01-20', 'type': 'quarterly_expiry', 'name': QUARTERLY_EXPIRY} - ], - 'remove': ['2022-01-11', '2022-01-13', '2022-01-17', '2022-01-19', '2022-01-21'] + 'add': { + '2022-01-10': {'type': 'holiday', 'name': 'Holiday'}, + '2022-01-12': {'type': 'special_open', 'name': 'Special Open', 'time': '10:00'}, + '2022-01-14': {'type': 'special_close', 'name': 'Special Close', 'time': '16:00'}, + '2022-01-18': {'type': 'monthly_expiry', 'name': MONTHLY_EXPIRY}, + '2022-01-20': {'type': 'quarterly_expiry', 'name': QUARTERLY_EXPIRY} + }, + 'remove': ['2022-01-11', '2022-01-13', '2022-01-17', '2022-01-19', '2022-01-21'], + 'tags': { + '2022-01-10': ['tag1', 'tag2'], + '2022-01-12': ['tag1', 'tag2'], + '2022-01-14': ['tag1', 'tag2'], + '2022-01-18': ['tag1', 'tag2'], + '2022-01-20': ['tag1', 'tag2'], + } } ece.update_calendar('XLON', changes) From c8bc00d11e5ca3db8a6200eb06aa3874306e2f49 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 26 Feb 2024 14:14:29 +0000 Subject: [PATCH 03/21] Bump typing-extensions from 4.9.0 to 4.10.0 Bumps [typing-extensions](https://github.com/python/typing_extensions) from 4.9.0 to 4.10.0. - [Release notes](https://github.com/python/typing_extensions/releases) - [Changelog](https://github.com/python/typing_extensions/blob/main/CHANGELOG.md) - [Commits](https://github.com/python/typing_extensions/commits) --- updated-dependencies: - dependency-name: typing-extensions dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] --- poetry.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/poetry.lock b/poetry.lock index aedb27d..6dafabe 100644 --- a/poetry.lock +++ b/poetry.lock @@ -714,13 +714,13 @@ files = [ [[package]] name = "typing-extensions" -version = "4.9.0" +version = "4.10.0" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.9.0-py3-none-any.whl", hash = "sha256:af72aea155e91adfc61c3ae9e0e342dbc0cba726d6cba4b6c72c1f34e47291cd"}, - {file = "typing_extensions-4.9.0.tar.gz", hash = "sha256:23478f88c37f27d76ac8aee6c905017a143b0b1b886c3c9f66bc2fd94f9f5783"}, + {file = "typing_extensions-4.10.0-py3-none-any.whl", hash = "sha256:69b1a937c3a517342112fb4c6df7e72fc39a38e7891a5730ed4985b5214b5475"}, + {file = "typing_extensions-4.10.0.tar.gz", hash = "sha256:b0abd7c89e8fb96f98db18d86106ff1d90ab692004eb746cf6eda2682f91b3cb"}, ] [[package]] From 18d6a994c68741270e717a4382828b7a53126c27 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 26 Feb 2024 14:15:27 +0000 Subject: [PATCH 04/21] Bump pytest from 8.0.0 to 8.0.2 Bumps [pytest](https://github.com/pytest-dev/pytest) from 8.0.0 to 8.0.2. - [Release notes](https://github.com/pytest-dev/pytest/releases) - [Changelog](https://github.com/pytest-dev/pytest/blob/main/CHANGELOG.rst) - [Commits](https://github.com/pytest-dev/pytest/compare/8.0.0...8.0.2) --- updated-dependencies: - dependency-name: pytest dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- poetry.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/poetry.lock b/poetry.lock index aedb27d..87d93a0 100644 --- a/poetry.lock +++ b/poetry.lock @@ -523,13 +523,13 @@ test = ["beautifulsoup4", "flake8", "pytest", "pytest-cov"] [[package]] name = "pytest" -version = "8.0.0" +version = "8.0.2" description = "pytest: simple powerful testing with Python" optional = false python-versions = ">=3.8" files = [ - {file = "pytest-8.0.0-py3-none-any.whl", hash = "sha256:50fb9cbe836c3f20f0dfa99c565201fb75dc54c8d76373cd1bde06b06657bdb6"}, - {file = "pytest-8.0.0.tar.gz", hash = "sha256:249b1b0864530ba251b7438274c4d251c58d868edaaec8762893ad4a0d71c36c"}, + {file = "pytest-8.0.2-py3-none-any.whl", hash = "sha256:edfaaef32ce5172d5466b5127b42e0d6d35ebbe4453f0e3505d96afd93f6b096"}, + {file = "pytest-8.0.2.tar.gz", hash = "sha256:d4051d623a2e0b7e51960ba963193b09ce6daeb9759a451844a21e4ddedfc1bd"}, ] [package.dependencies] From 88bac072acbe95396bdaabb09f4cd2212cbc47ee Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 26 Feb 2024 14:16:56 +0000 Subject: [PATCH 05/21] Bump pydantic from 2.6.0 to 2.6.2 Bumps [pydantic](https://github.com/pydantic/pydantic) from 2.6.0 to 2.6.2. - [Release notes](https://github.com/pydantic/pydantic/releases) - [Changelog](https://github.com/pydantic/pydantic/blob/main/HISTORY.md) - [Commits](https://github.com/pydantic/pydantic/compare/v2.6.0...v2.6.2) --- updated-dependencies: - dependency-name: pydantic dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- poetry.lock | 168 ++++++++++++++++++++++++++-------------------------- 1 file changed, 84 insertions(+), 84 deletions(-) diff --git a/poetry.lock b/poetry.lock index aedb27d..0888528 100644 --- a/poetry.lock +++ b/poetry.lock @@ -398,18 +398,18 @@ virtualenv = ">=20.10.0" [[package]] name = "pydantic" -version = "2.6.0" +version = "2.6.2" description = "Data validation using Python type hints" optional = false python-versions = ">=3.8" files = [ - {file = "pydantic-2.6.0-py3-none-any.whl", hash = "sha256:1440966574e1b5b99cf75a13bec7b20e3512e8a61b894ae252f56275e2c465ae"}, - {file = "pydantic-2.6.0.tar.gz", hash = "sha256:ae887bd94eb404b09d86e4d12f93893bdca79d766e738528c6fa1c849f3c6bcf"}, + {file = "pydantic-2.6.2-py3-none-any.whl", hash = "sha256:37a5432e54b12fecaa1049c5195f3d860a10e01bdfd24f1840ef14bd0d3aeab3"}, + {file = "pydantic-2.6.2.tar.gz", hash = "sha256:a09be1c3d28f3abe37f8a78af58284b236a92ce520105ddc91a6d29ea1176ba7"}, ] [package.dependencies] annotated-types = ">=0.4.0" -pydantic-core = "2.16.1" +pydantic-core = "2.16.3" typing-extensions = ">=4.6.1" [package.extras] @@ -417,90 +417,90 @@ email = ["email-validator (>=2.0.0)"] [[package]] name = "pydantic-core" -version = "2.16.1" +version = "2.16.3" description = "" optional = false python-versions = ">=3.8" files = [ - {file = "pydantic_core-2.16.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:300616102fb71241ff477a2cbbc847321dbec49428434a2f17f37528721c4948"}, - {file = "pydantic_core-2.16.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5511f962dd1b9b553e9534c3b9c6a4b0c9ded3d8c2be96e61d56f933feef9e1f"}, - {file = "pydantic_core-2.16.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:98f0edee7ee9cc7f9221af2e1b95bd02810e1c7a6d115cfd82698803d385b28f"}, - {file = "pydantic_core-2.16.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9795f56aa6b2296f05ac79d8a424e94056730c0b860a62b0fdcfe6340b658cc8"}, - {file = "pydantic_core-2.16.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c45f62e4107ebd05166717ac58f6feb44471ed450d07fecd90e5f69d9bf03c48"}, - {file = "pydantic_core-2.16.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:462d599299c5971f03c676e2b63aa80fec5ebc572d89ce766cd11ca8bcb56f3f"}, - {file = "pydantic_core-2.16.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21ebaa4bf6386a3b22eec518da7d679c8363fb7fb70cf6972161e5542f470798"}, - {file = "pydantic_core-2.16.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:99f9a50b56713a598d33bc23a9912224fc5d7f9f292444e6664236ae471ddf17"}, - {file = "pydantic_core-2.16.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:8ec364e280db4235389b5e1e6ee924723c693cbc98e9d28dc1767041ff9bc388"}, - {file = "pydantic_core-2.16.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:653a5dfd00f601a0ed6654a8b877b18d65ac32c9d9997456e0ab240807be6cf7"}, - {file = "pydantic_core-2.16.1-cp310-none-win32.whl", hash = "sha256:1661c668c1bb67b7cec96914329d9ab66755911d093bb9063c4c8914188af6d4"}, - {file = "pydantic_core-2.16.1-cp310-none-win_amd64.whl", hash = "sha256:561be4e3e952c2f9056fba5267b99be4ec2afadc27261505d4992c50b33c513c"}, - {file = "pydantic_core-2.16.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:102569d371fadc40d8f8598a59379c37ec60164315884467052830b28cc4e9da"}, - {file = "pydantic_core-2.16.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:735dceec50fa907a3c314b84ed609dec54b76a814aa14eb90da31d1d36873a5e"}, - {file = "pydantic_core-2.16.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e83ebbf020be727d6e0991c1b192a5c2e7113eb66e3def0cd0c62f9f266247e4"}, - {file = "pydantic_core-2.16.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:30a8259569fbeec49cfac7fda3ec8123486ef1b729225222f0d41d5f840b476f"}, - {file = "pydantic_core-2.16.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:920c4897e55e2881db6a6da151198e5001552c3777cd42b8a4c2f72eedc2ee91"}, - {file = "pydantic_core-2.16.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f5247a3d74355f8b1d780d0f3b32a23dd9f6d3ff43ef2037c6dcd249f35ecf4c"}, - {file = "pydantic_core-2.16.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2d5bea8012df5bb6dda1e67d0563ac50b7f64a5d5858348b5c8cb5043811c19d"}, - {file = "pydantic_core-2.16.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ed3025a8a7e5a59817b7494686d449ebfbe301f3e757b852c8d0d1961d6be864"}, - {file = "pydantic_core-2.16.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:06f0d5a1d9e1b7932477c172cc720b3b23c18762ed7a8efa8398298a59d177c7"}, - {file = "pydantic_core-2.16.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:150ba5c86f502c040b822777e2e519b5625b47813bd05f9273a8ed169c97d9ae"}, - {file = "pydantic_core-2.16.1-cp311-none-win32.whl", hash = "sha256:d6cbdf12ef967a6aa401cf5cdf47850559e59eedad10e781471c960583f25aa1"}, - {file = "pydantic_core-2.16.1-cp311-none-win_amd64.whl", hash = "sha256:afa01d25769af33a8dac0d905d5c7bb2d73c7c3d5161b2dd6f8b5b5eea6a3c4c"}, - {file = "pydantic_core-2.16.1-cp311-none-win_arm64.whl", hash = "sha256:1a2fe7b00a49b51047334d84aafd7e39f80b7675cad0083678c58983662da89b"}, - {file = "pydantic_core-2.16.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:0f478ec204772a5c8218e30eb813ca43e34005dff2eafa03931b3d8caef87d51"}, - {file = "pydantic_core-2.16.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f1936ef138bed2165dd8573aa65e3095ef7c2b6247faccd0e15186aabdda7f66"}, - {file = "pydantic_core-2.16.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:99d3a433ef5dc3021c9534a58a3686c88363c591974c16c54a01af7efd741f13"}, - {file = "pydantic_core-2.16.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bd88f40f2294440d3f3c6308e50d96a0d3d0973d6f1a5732875d10f569acef49"}, - {file = "pydantic_core-2.16.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3fac641bbfa43d5a1bed99d28aa1fded1984d31c670a95aac1bf1d36ac6ce137"}, - {file = "pydantic_core-2.16.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:72bf9308a82b75039b8c8edd2be2924c352eda5da14a920551a8b65d5ee89253"}, - {file = "pydantic_core-2.16.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fb4363e6c9fc87365c2bc777a1f585a22f2f56642501885ffc7942138499bf54"}, - {file = "pydantic_core-2.16.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:20f724a023042588d0f4396bbbcf4cffd0ddd0ad3ed4f0d8e6d4ac4264bae81e"}, - {file = "pydantic_core-2.16.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:fb4370b15111905bf8b5ba2129b926af9470f014cb0493a67d23e9d7a48348e8"}, - {file = "pydantic_core-2.16.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:23632132f1fd608034f1a56cc3e484be00854db845b3a4a508834be5a6435a6f"}, - {file = "pydantic_core-2.16.1-cp312-none-win32.whl", hash = "sha256:b9f3e0bffad6e238f7acc20c393c1ed8fab4371e3b3bc311020dfa6020d99212"}, - {file = "pydantic_core-2.16.1-cp312-none-win_amd64.whl", hash = "sha256:a0b4cfe408cd84c53bab7d83e4209458de676a6ec5e9c623ae914ce1cb79b96f"}, - {file = "pydantic_core-2.16.1-cp312-none-win_arm64.whl", hash = "sha256:d195add190abccefc70ad0f9a0141ad7da53e16183048380e688b466702195dd"}, - {file = "pydantic_core-2.16.1-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:502c062a18d84452858f8aea1e520e12a4d5228fc3621ea5061409d666ea1706"}, - {file = "pydantic_core-2.16.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:d8c032ccee90b37b44e05948b449a2d6baed7e614df3d3f47fe432c952c21b60"}, - {file = "pydantic_core-2.16.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:920f4633bee43d7a2818e1a1a788906df5a17b7ab6fe411220ed92b42940f818"}, - {file = "pydantic_core-2.16.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9f5d37ff01edcbace53a402e80793640c25798fb7208f105d87a25e6fcc9ea06"}, - {file = "pydantic_core-2.16.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:399166f24c33a0c5759ecc4801f040dbc87d412c1a6d6292b2349b4c505effc9"}, - {file = "pydantic_core-2.16.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ac89ccc39cd1d556cc72d6752f252dc869dde41c7c936e86beac5eb555041b66"}, - {file = "pydantic_core-2.16.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:73802194f10c394c2bedce7a135ba1d8ba6cff23adf4217612bfc5cf060de34c"}, - {file = "pydantic_core-2.16.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8fa00fa24ffd8c31fac081bf7be7eb495be6d248db127f8776575a746fa55c95"}, - {file = "pydantic_core-2.16.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:601d3e42452cd4f2891c13fa8c70366d71851c1593ed42f57bf37f40f7dca3c8"}, - {file = "pydantic_core-2.16.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:07982b82d121ed3fc1c51faf6e8f57ff09b1325d2efccaa257dd8c0dd937acca"}, - {file = "pydantic_core-2.16.1-cp38-none-win32.whl", hash = "sha256:d0bf6f93a55d3fa7a079d811b29100b019784e2ee6bc06b0bb839538272a5610"}, - {file = "pydantic_core-2.16.1-cp38-none-win_amd64.whl", hash = "sha256:fbec2af0ebafa57eb82c18c304b37c86a8abddf7022955d1742b3d5471a6339e"}, - {file = "pydantic_core-2.16.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:a497be217818c318d93f07e14502ef93d44e6a20c72b04c530611e45e54c2196"}, - {file = "pydantic_core-2.16.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:694a5e9f1f2c124a17ff2d0be613fd53ba0c26de588eb4bdab8bca855e550d95"}, - {file = "pydantic_core-2.16.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8d4dfc66abea3ec6d9f83e837a8f8a7d9d3a76d25c9911735c76d6745950e62c"}, - {file = "pydantic_core-2.16.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8655f55fe68c4685673265a650ef71beb2d31871c049c8b80262026f23605ee3"}, - {file = "pydantic_core-2.16.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:21e3298486c4ea4e4d5cc6fb69e06fb02a4e22089304308817035ac006a7f506"}, - {file = "pydantic_core-2.16.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:71b4a48a7427f14679f0015b13c712863d28bb1ab700bd11776a5368135c7d60"}, - {file = "pydantic_core-2.16.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10dca874e35bb60ce4f9f6665bfbfad050dd7573596608aeb9e098621ac331dc"}, - {file = "pydantic_core-2.16.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:fa496cd45cda0165d597e9d6f01e36c33c9508f75cf03c0a650018c5048f578e"}, - {file = "pydantic_core-2.16.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5317c04349472e683803da262c781c42c5628a9be73f4750ac7d13040efb5d2d"}, - {file = "pydantic_core-2.16.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:42c29d54ed4501a30cd71015bf982fa95e4a60117b44e1a200290ce687d3e640"}, - {file = "pydantic_core-2.16.1-cp39-none-win32.whl", hash = "sha256:ba07646f35e4e49376c9831130039d1b478fbfa1215ae62ad62d2ee63cf9c18f"}, - {file = "pydantic_core-2.16.1-cp39-none-win_amd64.whl", hash = "sha256:2133b0e412a47868a358713287ff9f9a328879da547dc88be67481cdac529118"}, - {file = "pydantic_core-2.16.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:d25ef0c33f22649b7a088035fd65ac1ce6464fa2876578df1adad9472f918a76"}, - {file = "pydantic_core-2.16.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:99c095457eea8550c9fa9a7a992e842aeae1429dab6b6b378710f62bfb70b394"}, - {file = "pydantic_core-2.16.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b49c604ace7a7aa8af31196abbf8f2193be605db6739ed905ecaf62af31ccae0"}, - {file = "pydantic_core-2.16.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c56da23034fe66221f2208c813d8aa509eea34d97328ce2add56e219c3a9f41c"}, - {file = "pydantic_core-2.16.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cebf8d56fee3b08ad40d332a807ecccd4153d3f1ba8231e111d9759f02edfd05"}, - {file = "pydantic_core-2.16.1-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:1ae8048cba95f382dba56766525abca438328455e35c283bb202964f41a780b0"}, - {file = "pydantic_core-2.16.1-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:780daad9e35b18d10d7219d24bfb30148ca2afc309928e1d4d53de86822593dc"}, - {file = "pydantic_core-2.16.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:c94b5537bf6ce66e4d7830c6993152940a188600f6ae044435287753044a8fe2"}, - {file = "pydantic_core-2.16.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:adf28099d061a25fbcc6531febb7a091e027605385de9fe14dd6a97319d614cf"}, - {file = "pydantic_core-2.16.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:644904600c15816a1f9a1bafa6aab0d21db2788abcdf4e2a77951280473f33e1"}, - {file = "pydantic_core-2.16.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:87bce04f09f0552b66fca0c4e10da78d17cb0e71c205864bab4e9595122cb9d9"}, - {file = "pydantic_core-2.16.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:877045a7969ace04d59516d5d6a7dee13106822f99a5d8df5e6822941f7bedc8"}, - {file = "pydantic_core-2.16.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9c46e556ee266ed3fb7b7a882b53df3c76b45e872fdab8d9cf49ae5e91147fd7"}, - {file = "pydantic_core-2.16.1-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:4eebbd049008eb800f519578e944b8dc8e0f7d59a5abb5924cc2d4ed3a1834ff"}, - {file = "pydantic_core-2.16.1-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:c0be58529d43d38ae849a91932391eb93275a06b93b79a8ab828b012e916a206"}, - {file = "pydantic_core-2.16.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:b1fc07896fc1851558f532dffc8987e526b682ec73140886c831d773cef44b76"}, - {file = "pydantic_core-2.16.1.tar.gz", hash = "sha256:daff04257b49ab7f4b3f73f98283d3dbb1a65bf3500d55c7beac3c66c310fe34"}, + {file = "pydantic_core-2.16.3-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:75b81e678d1c1ede0785c7f46690621e4c6e63ccd9192af1f0bd9d504bbb6bf4"}, + {file = "pydantic_core-2.16.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9c865a7ee6f93783bd5d781af5a4c43dadc37053a5b42f7d18dc019f8c9d2bd1"}, + {file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:162e498303d2b1c036b957a1278fa0899d02b2842f1ff901b6395104c5554a45"}, + {file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2f583bd01bbfbff4eaee0868e6fc607efdfcc2b03c1c766b06a707abbc856187"}, + {file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b926dd38db1519ed3043a4de50214e0d600d404099c3392f098a7f9d75029ff8"}, + {file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:716b542728d4c742353448765aa7cdaa519a7b82f9564130e2b3f6766018c9ec"}, + {file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc4ad7f7ee1a13d9cb49d8198cd7d7e3aa93e425f371a68235f784e99741561f"}, + {file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bd87f48924f360e5d1c5f770d6155ce0e7d83f7b4e10c2f9ec001c73cf475c99"}, + {file = "pydantic_core-2.16.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0df446663464884297c793874573549229f9eca73b59360878f382a0fc085979"}, + {file = "pydantic_core-2.16.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4df8a199d9f6afc5ae9a65f8f95ee52cae389a8c6b20163762bde0426275b7db"}, + {file = "pydantic_core-2.16.3-cp310-none-win32.whl", hash = "sha256:456855f57b413f077dff513a5a28ed838dbbb15082ba00f80750377eed23d132"}, + {file = "pydantic_core-2.16.3-cp310-none-win_amd64.whl", hash = "sha256:732da3243e1b8d3eab8c6ae23ae6a58548849d2e4a4e03a1924c8ddf71a387cb"}, + {file = "pydantic_core-2.16.3-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:519ae0312616026bf4cedc0fe459e982734f3ca82ee8c7246c19b650b60a5ee4"}, + {file = "pydantic_core-2.16.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b3992a322a5617ded0a9f23fd06dbc1e4bd7cf39bc4ccf344b10f80af58beacd"}, + {file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8d62da299c6ecb04df729e4b5c52dc0d53f4f8430b4492b93aa8de1f541c4aac"}, + {file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2acca2be4bb2f2147ada8cac612f8a98fc09f41c89f87add7256ad27332c2fda"}, + {file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1b662180108c55dfbf1280d865b2d116633d436cfc0bba82323554873967b340"}, + {file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e7c6ed0dc9d8e65f24f5824291550139fe6f37fac03788d4580da0d33bc00c97"}, + {file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a6b1bb0827f56654b4437955555dc3aeeebeddc47c2d7ed575477f082622c49e"}, + {file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e56f8186d6210ac7ece503193ec84104da7ceb98f68ce18c07282fcc2452e76f"}, + {file = "pydantic_core-2.16.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:936e5db01dd49476fa8f4383c259b8b1303d5dd5fb34c97de194560698cc2c5e"}, + {file = "pydantic_core-2.16.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:33809aebac276089b78db106ee692bdc9044710e26f24a9a2eaa35a0f9fa70ba"}, + {file = "pydantic_core-2.16.3-cp311-none-win32.whl", hash = "sha256:ded1c35f15c9dea16ead9bffcde9bb5c7c031bff076355dc58dcb1cb436c4721"}, + {file = "pydantic_core-2.16.3-cp311-none-win_amd64.whl", hash = "sha256:d89ca19cdd0dd5f31606a9329e309d4fcbb3df860960acec32630297d61820df"}, + {file = "pydantic_core-2.16.3-cp311-none-win_arm64.whl", hash = "sha256:6162f8d2dc27ba21027f261e4fa26f8bcb3cf9784b7f9499466a311ac284b5b9"}, + {file = "pydantic_core-2.16.3-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:0f56ae86b60ea987ae8bcd6654a887238fd53d1384f9b222ac457070b7ac4cff"}, + {file = "pydantic_core-2.16.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c9bd22a2a639e26171068f8ebb5400ce2c1bc7d17959f60a3b753ae13c632975"}, + {file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4204e773b4b408062960e65468d5346bdfe139247ee5f1ca2a378983e11388a2"}, + {file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f651dd19363c632f4abe3480a7c87a9773be27cfe1341aef06e8759599454120"}, + {file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aaf09e615a0bf98d406657e0008e4a8701b11481840be7d31755dc9f97c44053"}, + {file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8e47755d8152c1ab5b55928ab422a76e2e7b22b5ed8e90a7d584268dd49e9c6b"}, + {file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:500960cb3a0543a724a81ba859da816e8cf01b0e6aaeedf2c3775d12ee49cade"}, + {file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cf6204fe865da605285c34cf1172879d0314ff267b1c35ff59de7154f35fdc2e"}, + {file = "pydantic_core-2.16.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d33dd21f572545649f90c38c227cc8631268ba25c460b5569abebdd0ec5974ca"}, + {file = "pydantic_core-2.16.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:49d5d58abd4b83fb8ce763be7794d09b2f50f10aa65c0f0c1696c677edeb7cbf"}, + {file = "pydantic_core-2.16.3-cp312-none-win32.whl", hash = "sha256:f53aace168a2a10582e570b7736cc5bef12cae9cf21775e3eafac597e8551fbe"}, + {file = "pydantic_core-2.16.3-cp312-none-win_amd64.whl", hash = "sha256:0d32576b1de5a30d9a97f300cc6a3f4694c428d956adbc7e6e2f9cad279e45ed"}, + {file = "pydantic_core-2.16.3-cp312-none-win_arm64.whl", hash = "sha256:ec08be75bb268473677edb83ba71e7e74b43c008e4a7b1907c6d57e940bf34b6"}, + {file = "pydantic_core-2.16.3-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:b1f6f5938d63c6139860f044e2538baeee6f0b251a1816e7adb6cbce106a1f01"}, + {file = "pydantic_core-2.16.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2a1ef6a36fdbf71538142ed604ad19b82f67b05749512e47f247a6ddd06afdc7"}, + {file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:704d35ecc7e9c31d48926150afada60401c55efa3b46cd1ded5a01bdffaf1d48"}, + {file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d937653a696465677ed583124b94a4b2d79f5e30b2c46115a68e482c6a591c8a"}, + {file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c9803edf8e29bd825f43481f19c37f50d2b01899448273b3a7758441b512acf8"}, + {file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:72282ad4892a9fb2da25defeac8c2e84352c108705c972db82ab121d15f14e6d"}, + {file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f752826b5b8361193df55afcdf8ca6a57d0232653494ba473630a83ba50d8c9"}, + {file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4384a8f68ddb31a0b0c3deae88765f5868a1b9148939c3f4121233314ad5532c"}, + {file = "pydantic_core-2.16.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:a4b2bf78342c40b3dc830880106f54328928ff03e357935ad26c7128bbd66ce8"}, + {file = "pydantic_core-2.16.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:13dcc4802961b5f843a9385fc821a0b0135e8c07fc3d9949fd49627c1a5e6ae5"}, + {file = "pydantic_core-2.16.3-cp38-none-win32.whl", hash = "sha256:e3e70c94a0c3841e6aa831edab1619ad5c511199be94d0c11ba75fe06efe107a"}, + {file = "pydantic_core-2.16.3-cp38-none-win_amd64.whl", hash = "sha256:ecdf6bf5f578615f2e985a5e1f6572e23aa632c4bd1dc67f8f406d445ac115ed"}, + {file = "pydantic_core-2.16.3-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:bda1ee3e08252b8d41fa5537413ffdddd58fa73107171a126d3b9ff001b9b820"}, + {file = "pydantic_core-2.16.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:21b888c973e4f26b7a96491c0965a8a312e13be108022ee510248fe379a5fa23"}, + {file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:be0ec334369316fa73448cc8c982c01e5d2a81c95969d58b8f6e272884df0074"}, + {file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b5b6079cc452a7c53dd378c6f881ac528246b3ac9aae0f8eef98498a75657805"}, + {file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7ee8d5f878dccb6d499ba4d30d757111847b6849ae07acdd1205fffa1fc1253c"}, + {file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7233d65d9d651242a68801159763d09e9ec96e8a158dbf118dc090cd77a104c9"}, + {file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c6119dc90483a5cb50a1306adb8d52c66e447da88ea44f323e0ae1a5fcb14256"}, + {file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:578114bc803a4c1ff9946d977c221e4376620a46cf78da267d946397dc9514a8"}, + {file = "pydantic_core-2.16.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d8f99b147ff3fcf6b3cc60cb0c39ea443884d5559a30b1481e92495f2310ff2b"}, + {file = "pydantic_core-2.16.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4ac6b4ce1e7283d715c4b729d8f9dab9627586dafce81d9eaa009dd7f25dd972"}, + {file = "pydantic_core-2.16.3-cp39-none-win32.whl", hash = "sha256:e7774b570e61cb998490c5235740d475413a1f6de823169b4cf94e2fe9e9f6b2"}, + {file = "pydantic_core-2.16.3-cp39-none-win_amd64.whl", hash = "sha256:9091632a25b8b87b9a605ec0e61f241c456e9248bfdcf7abdf344fdb169c81cf"}, + {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:36fa178aacbc277bc6b62a2c3da95226520da4f4e9e206fdf076484363895d2c"}, + {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:dcca5d2bf65c6fb591fff92da03f94cd4f315972f97c21975398bd4bd046854a"}, + {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2a72fb9963cba4cd5793854fd12f4cfee731e86df140f59ff52a49b3552db241"}, + {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b60cc1a081f80a2105a59385b92d82278b15d80ebb3adb200542ae165cd7d183"}, + {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cbcc558401de90a746d02ef330c528f2e668c83350f045833543cd57ecead1ad"}, + {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:fee427241c2d9fb7192b658190f9f5fd6dfe41e02f3c1489d2ec1e6a5ab1e04a"}, + {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f4cb85f693044e0f71f394ff76c98ddc1bc0953e48c061725e540396d5c8a2e1"}, + {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:b29eeb887aa931c2fcef5aa515d9d176d25006794610c264ddc114c053bf96fe"}, + {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:a425479ee40ff021f8216c9d07a6a3b54b31c8267c6e17aa88b70d7ebd0e5e5b"}, + {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:5c5cbc703168d1b7a838668998308018a2718c2130595e8e190220238addc96f"}, + {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:99b6add4c0b39a513d323d3b93bc173dac663c27b99860dd5bf491b240d26137"}, + {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75f76ee558751746d6a38f89d60b6228fa174e5172d143886af0f85aa306fd89"}, + {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:00ee1c97b5364b84cb0bd82e9bbf645d5e2871fb8c58059d158412fee2d33d8a"}, + {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:287073c66748f624be4cef893ef9174e3eb88fe0b8a78dc22e88eca4bc357ca6"}, + {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:ed25e1835c00a332cb10c683cd39da96a719ab1dfc08427d476bce41b92531fc"}, + {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:86b3d0033580bd6bbe07590152007275bd7af95f98eaa5bd36f3da219dcd93da"}, + {file = "pydantic_core-2.16.3.tar.gz", hash = "sha256:1cac689f80a3abab2d3c0048b29eea5751114054f032a941a32de4c852c59cad"}, ] [package.dependencies] From 437f1c26bda1709c1079f9d4874066f41f40398f Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 5 Apr 2024 14:37:41 +0000 Subject: [PATCH 06/21] Bump typing-extensions from 4.10.0 to 4.11.0 Bumps [typing-extensions](https://github.com/python/typing_extensions) from 4.10.0 to 4.11.0. - [Release notes](https://github.com/python/typing_extensions/releases) - [Changelog](https://github.com/python/typing_extensions/blob/main/CHANGELOG.md) - [Commits](https://github.com/python/typing_extensions/compare/4.10.0...4.11.0) --- updated-dependencies: - dependency-name: typing-extensions dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] --- poetry.lock | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/poetry.lock b/poetry.lock index d666a13..a8c1a57 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand. [[package]] name = "annotated-types" @@ -714,13 +714,13 @@ files = [ [[package]] name = "typing-extensions" -version = "4.10.0" +version = "4.11.0" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.10.0-py3-none-any.whl", hash = "sha256:69b1a937c3a517342112fb4c6df7e72fc39a38e7891a5730ed4985b5214b5475"}, - {file = "typing_extensions-4.10.0.tar.gz", hash = "sha256:b0abd7c89e8fb96f98db18d86106ff1d90ab692004eb746cf6eda2682f91b3cb"}, + {file = "typing_extensions-4.11.0-py3-none-any.whl", hash = "sha256:c1f94d72897edaf4ce775bb7558d5b79d8126906a14ea5ed1635921406c0387a"}, + {file = "typing_extensions-4.11.0.tar.gz", hash = "sha256:83f085bd5ca59c80295fc2a82ab5dac679cbe02b9f33f7d83af68e241bea51b0"}, ] [[package]] From c181aacfb5c1073a7d8cccb59f0f44387bcd030e Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 23 Apr 2024 15:00:36 +0000 Subject: [PATCH 07/21] Bump pydantic from 2.6.2 to 2.7.1 Bumps [pydantic](https://github.com/pydantic/pydantic) from 2.6.2 to 2.7.1. - [Release notes](https://github.com/pydantic/pydantic/releases) - [Changelog](https://github.com/pydantic/pydantic/blob/main/HISTORY.md) - [Commits](https://github.com/pydantic/pydantic/compare/v2.6.2...v2.7.1) --- updated-dependencies: - dependency-name: pydantic dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] --- poetry.lock | 172 ++++++++++++++++++++++++++-------------------------- 1 file changed, 86 insertions(+), 86 deletions(-) diff --git a/poetry.lock b/poetry.lock index d666a13..3f8ec54 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand. [[package]] name = "annotated-types" @@ -398,18 +398,18 @@ virtualenv = ">=20.10.0" [[package]] name = "pydantic" -version = "2.6.2" +version = "2.7.1" description = "Data validation using Python type hints" optional = false python-versions = ">=3.8" files = [ - {file = "pydantic-2.6.2-py3-none-any.whl", hash = "sha256:37a5432e54b12fecaa1049c5195f3d860a10e01bdfd24f1840ef14bd0d3aeab3"}, - {file = "pydantic-2.6.2.tar.gz", hash = "sha256:a09be1c3d28f3abe37f8a78af58284b236a92ce520105ddc91a6d29ea1176ba7"}, + {file = "pydantic-2.7.1-py3-none-any.whl", hash = "sha256:e029badca45266732a9a79898a15ae2e8b14840b1eabbb25844be28f0b33f3d5"}, + {file = "pydantic-2.7.1.tar.gz", hash = "sha256:e9dbb5eada8abe4d9ae5f46b9939aead650cd2b68f249bb3a8139dbe125803cc"}, ] [package.dependencies] annotated-types = ">=0.4.0" -pydantic-core = "2.16.3" +pydantic-core = "2.18.2" typing-extensions = ">=4.6.1" [package.extras] @@ -417,90 +417,90 @@ email = ["email-validator (>=2.0.0)"] [[package]] name = "pydantic-core" -version = "2.16.3" -description = "" +version = "2.18.2" +description = "Core functionality for Pydantic validation and serialization" optional = false python-versions = ">=3.8" files = [ - {file = "pydantic_core-2.16.3-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:75b81e678d1c1ede0785c7f46690621e4c6e63ccd9192af1f0bd9d504bbb6bf4"}, - {file = "pydantic_core-2.16.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9c865a7ee6f93783bd5d781af5a4c43dadc37053a5b42f7d18dc019f8c9d2bd1"}, - {file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:162e498303d2b1c036b957a1278fa0899d02b2842f1ff901b6395104c5554a45"}, - {file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2f583bd01bbfbff4eaee0868e6fc607efdfcc2b03c1c766b06a707abbc856187"}, - {file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b926dd38db1519ed3043a4de50214e0d600d404099c3392f098a7f9d75029ff8"}, - {file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:716b542728d4c742353448765aa7cdaa519a7b82f9564130e2b3f6766018c9ec"}, - {file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc4ad7f7ee1a13d9cb49d8198cd7d7e3aa93e425f371a68235f784e99741561f"}, - {file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bd87f48924f360e5d1c5f770d6155ce0e7d83f7b4e10c2f9ec001c73cf475c99"}, - {file = "pydantic_core-2.16.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0df446663464884297c793874573549229f9eca73b59360878f382a0fc085979"}, - {file = "pydantic_core-2.16.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4df8a199d9f6afc5ae9a65f8f95ee52cae389a8c6b20163762bde0426275b7db"}, - {file = "pydantic_core-2.16.3-cp310-none-win32.whl", hash = "sha256:456855f57b413f077dff513a5a28ed838dbbb15082ba00f80750377eed23d132"}, - {file = "pydantic_core-2.16.3-cp310-none-win_amd64.whl", hash = "sha256:732da3243e1b8d3eab8c6ae23ae6a58548849d2e4a4e03a1924c8ddf71a387cb"}, - {file = "pydantic_core-2.16.3-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:519ae0312616026bf4cedc0fe459e982734f3ca82ee8c7246c19b650b60a5ee4"}, - {file = "pydantic_core-2.16.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b3992a322a5617ded0a9f23fd06dbc1e4bd7cf39bc4ccf344b10f80af58beacd"}, - {file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8d62da299c6ecb04df729e4b5c52dc0d53f4f8430b4492b93aa8de1f541c4aac"}, - {file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2acca2be4bb2f2147ada8cac612f8a98fc09f41c89f87add7256ad27332c2fda"}, - {file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1b662180108c55dfbf1280d865b2d116633d436cfc0bba82323554873967b340"}, - {file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e7c6ed0dc9d8e65f24f5824291550139fe6f37fac03788d4580da0d33bc00c97"}, - {file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a6b1bb0827f56654b4437955555dc3aeeebeddc47c2d7ed575477f082622c49e"}, - {file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e56f8186d6210ac7ece503193ec84104da7ceb98f68ce18c07282fcc2452e76f"}, - {file = "pydantic_core-2.16.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:936e5db01dd49476fa8f4383c259b8b1303d5dd5fb34c97de194560698cc2c5e"}, - {file = "pydantic_core-2.16.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:33809aebac276089b78db106ee692bdc9044710e26f24a9a2eaa35a0f9fa70ba"}, - {file = "pydantic_core-2.16.3-cp311-none-win32.whl", hash = "sha256:ded1c35f15c9dea16ead9bffcde9bb5c7c031bff076355dc58dcb1cb436c4721"}, - {file = "pydantic_core-2.16.3-cp311-none-win_amd64.whl", hash = "sha256:d89ca19cdd0dd5f31606a9329e309d4fcbb3df860960acec32630297d61820df"}, - {file = "pydantic_core-2.16.3-cp311-none-win_arm64.whl", hash = "sha256:6162f8d2dc27ba21027f261e4fa26f8bcb3cf9784b7f9499466a311ac284b5b9"}, - {file = "pydantic_core-2.16.3-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:0f56ae86b60ea987ae8bcd6654a887238fd53d1384f9b222ac457070b7ac4cff"}, - {file = "pydantic_core-2.16.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c9bd22a2a639e26171068f8ebb5400ce2c1bc7d17959f60a3b753ae13c632975"}, - {file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4204e773b4b408062960e65468d5346bdfe139247ee5f1ca2a378983e11388a2"}, - {file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f651dd19363c632f4abe3480a7c87a9773be27cfe1341aef06e8759599454120"}, - {file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aaf09e615a0bf98d406657e0008e4a8701b11481840be7d31755dc9f97c44053"}, - {file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8e47755d8152c1ab5b55928ab422a76e2e7b22b5ed8e90a7d584268dd49e9c6b"}, - {file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:500960cb3a0543a724a81ba859da816e8cf01b0e6aaeedf2c3775d12ee49cade"}, - {file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cf6204fe865da605285c34cf1172879d0314ff267b1c35ff59de7154f35fdc2e"}, - {file = "pydantic_core-2.16.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d33dd21f572545649f90c38c227cc8631268ba25c460b5569abebdd0ec5974ca"}, - {file = "pydantic_core-2.16.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:49d5d58abd4b83fb8ce763be7794d09b2f50f10aa65c0f0c1696c677edeb7cbf"}, - {file = "pydantic_core-2.16.3-cp312-none-win32.whl", hash = "sha256:f53aace168a2a10582e570b7736cc5bef12cae9cf21775e3eafac597e8551fbe"}, - {file = "pydantic_core-2.16.3-cp312-none-win_amd64.whl", hash = "sha256:0d32576b1de5a30d9a97f300cc6a3f4694c428d956adbc7e6e2f9cad279e45ed"}, - {file = "pydantic_core-2.16.3-cp312-none-win_arm64.whl", hash = "sha256:ec08be75bb268473677edb83ba71e7e74b43c008e4a7b1907c6d57e940bf34b6"}, - {file = "pydantic_core-2.16.3-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:b1f6f5938d63c6139860f044e2538baeee6f0b251a1816e7adb6cbce106a1f01"}, - {file = "pydantic_core-2.16.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2a1ef6a36fdbf71538142ed604ad19b82f67b05749512e47f247a6ddd06afdc7"}, - {file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:704d35ecc7e9c31d48926150afada60401c55efa3b46cd1ded5a01bdffaf1d48"}, - {file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d937653a696465677ed583124b94a4b2d79f5e30b2c46115a68e482c6a591c8a"}, - {file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c9803edf8e29bd825f43481f19c37f50d2b01899448273b3a7758441b512acf8"}, - {file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:72282ad4892a9fb2da25defeac8c2e84352c108705c972db82ab121d15f14e6d"}, - {file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f752826b5b8361193df55afcdf8ca6a57d0232653494ba473630a83ba50d8c9"}, - {file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4384a8f68ddb31a0b0c3deae88765f5868a1b9148939c3f4121233314ad5532c"}, - {file = "pydantic_core-2.16.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:a4b2bf78342c40b3dc830880106f54328928ff03e357935ad26c7128bbd66ce8"}, - {file = "pydantic_core-2.16.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:13dcc4802961b5f843a9385fc821a0b0135e8c07fc3d9949fd49627c1a5e6ae5"}, - {file = "pydantic_core-2.16.3-cp38-none-win32.whl", hash = "sha256:e3e70c94a0c3841e6aa831edab1619ad5c511199be94d0c11ba75fe06efe107a"}, - {file = "pydantic_core-2.16.3-cp38-none-win_amd64.whl", hash = "sha256:ecdf6bf5f578615f2e985a5e1f6572e23aa632c4bd1dc67f8f406d445ac115ed"}, - {file = "pydantic_core-2.16.3-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:bda1ee3e08252b8d41fa5537413ffdddd58fa73107171a126d3b9ff001b9b820"}, - {file = "pydantic_core-2.16.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:21b888c973e4f26b7a96491c0965a8a312e13be108022ee510248fe379a5fa23"}, - {file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:be0ec334369316fa73448cc8c982c01e5d2a81c95969d58b8f6e272884df0074"}, - {file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b5b6079cc452a7c53dd378c6f881ac528246b3ac9aae0f8eef98498a75657805"}, - {file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7ee8d5f878dccb6d499ba4d30d757111847b6849ae07acdd1205fffa1fc1253c"}, - {file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7233d65d9d651242a68801159763d09e9ec96e8a158dbf118dc090cd77a104c9"}, - {file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c6119dc90483a5cb50a1306adb8d52c66e447da88ea44f323e0ae1a5fcb14256"}, - {file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:578114bc803a4c1ff9946d977c221e4376620a46cf78da267d946397dc9514a8"}, - {file = "pydantic_core-2.16.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d8f99b147ff3fcf6b3cc60cb0c39ea443884d5559a30b1481e92495f2310ff2b"}, - {file = "pydantic_core-2.16.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4ac6b4ce1e7283d715c4b729d8f9dab9627586dafce81d9eaa009dd7f25dd972"}, - {file = "pydantic_core-2.16.3-cp39-none-win32.whl", hash = "sha256:e7774b570e61cb998490c5235740d475413a1f6de823169b4cf94e2fe9e9f6b2"}, - {file = "pydantic_core-2.16.3-cp39-none-win_amd64.whl", hash = "sha256:9091632a25b8b87b9a605ec0e61f241c456e9248bfdcf7abdf344fdb169c81cf"}, - {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:36fa178aacbc277bc6b62a2c3da95226520da4f4e9e206fdf076484363895d2c"}, - {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:dcca5d2bf65c6fb591fff92da03f94cd4f315972f97c21975398bd4bd046854a"}, - {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2a72fb9963cba4cd5793854fd12f4cfee731e86df140f59ff52a49b3552db241"}, - {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b60cc1a081f80a2105a59385b92d82278b15d80ebb3adb200542ae165cd7d183"}, - {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cbcc558401de90a746d02ef330c528f2e668c83350f045833543cd57ecead1ad"}, - {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:fee427241c2d9fb7192b658190f9f5fd6dfe41e02f3c1489d2ec1e6a5ab1e04a"}, - {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f4cb85f693044e0f71f394ff76c98ddc1bc0953e48c061725e540396d5c8a2e1"}, - {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:b29eeb887aa931c2fcef5aa515d9d176d25006794610c264ddc114c053bf96fe"}, - {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:a425479ee40ff021f8216c9d07a6a3b54b31c8267c6e17aa88b70d7ebd0e5e5b"}, - {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:5c5cbc703168d1b7a838668998308018a2718c2130595e8e190220238addc96f"}, - {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:99b6add4c0b39a513d323d3b93bc173dac663c27b99860dd5bf491b240d26137"}, - {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75f76ee558751746d6a38f89d60b6228fa174e5172d143886af0f85aa306fd89"}, - {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:00ee1c97b5364b84cb0bd82e9bbf645d5e2871fb8c58059d158412fee2d33d8a"}, - {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:287073c66748f624be4cef893ef9174e3eb88fe0b8a78dc22e88eca4bc357ca6"}, - {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:ed25e1835c00a332cb10c683cd39da96a719ab1dfc08427d476bce41b92531fc"}, - {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:86b3d0033580bd6bbe07590152007275bd7af95f98eaa5bd36f3da219dcd93da"}, - {file = "pydantic_core-2.16.3.tar.gz", hash = "sha256:1cac689f80a3abab2d3c0048b29eea5751114054f032a941a32de4c852c59cad"}, + {file = "pydantic_core-2.18.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:9e08e867b306f525802df7cd16c44ff5ebbe747ff0ca6cf3fde7f36c05a59a81"}, + {file = "pydantic_core-2.18.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f0a21cbaa69900cbe1a2e7cad2aa74ac3cf21b10c3efb0fa0b80305274c0e8a2"}, + {file = "pydantic_core-2.18.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0680b1f1f11fda801397de52c36ce38ef1c1dc841a0927a94f226dea29c3ae3d"}, + {file = "pydantic_core-2.18.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:95b9d5e72481d3780ba3442eac863eae92ae43a5f3adb5b4d0a1de89d42bb250"}, + {file = "pydantic_core-2.18.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c4fcf5cd9c4b655ad666ca332b9a081112cd7a58a8b5a6ca7a3104bc950f2038"}, + {file = "pydantic_core-2.18.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b5155ff768083cb1d62f3e143b49a8a3432e6789a3abee8acd005c3c7af1c74"}, + {file = "pydantic_core-2.18.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:553ef617b6836fc7e4df130bb851e32fe357ce36336d897fd6646d6058d980af"}, + {file = "pydantic_core-2.18.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b89ed9eb7d616ef5714e5590e6cf7f23b02d0d539767d33561e3675d6f9e3857"}, + {file = "pydantic_core-2.18.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:75f7e9488238e920ab6204399ded280dc4c307d034f3924cd7f90a38b1829563"}, + {file = "pydantic_core-2.18.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ef26c9e94a8c04a1b2924149a9cb081836913818e55681722d7f29af88fe7b38"}, + {file = "pydantic_core-2.18.2-cp310-none-win32.whl", hash = "sha256:182245ff6b0039e82b6bb585ed55a64d7c81c560715d1bad0cbad6dfa07b4027"}, + {file = "pydantic_core-2.18.2-cp310-none-win_amd64.whl", hash = "sha256:e23ec367a948b6d812301afc1b13f8094ab7b2c280af66ef450efc357d2ae543"}, + {file = "pydantic_core-2.18.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:219da3f096d50a157f33645a1cf31c0ad1fe829a92181dd1311022f986e5fbe3"}, + {file = "pydantic_core-2.18.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:cc1cfd88a64e012b74e94cd00bbe0f9c6df57049c97f02bb07d39e9c852e19a4"}, + {file = "pydantic_core-2.18.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:05b7133a6e6aeb8df37d6f413f7705a37ab4031597f64ab56384c94d98fa0e90"}, + {file = "pydantic_core-2.18.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:224c421235f6102e8737032483f43c1a8cfb1d2f45740c44166219599358c2cd"}, + {file = "pydantic_core-2.18.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b14d82cdb934e99dda6d9d60dc84a24379820176cc4a0d123f88df319ae9c150"}, + {file = "pydantic_core-2.18.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2728b01246a3bba6de144f9e3115b532ee44bd6cf39795194fb75491824a1413"}, + {file = "pydantic_core-2.18.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:470b94480bb5ee929f5acba6995251ada5e059a5ef3e0dfc63cca287283ebfa6"}, + {file = "pydantic_core-2.18.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:997abc4df705d1295a42f95b4eec4950a37ad8ae46d913caeee117b6b198811c"}, + {file = "pydantic_core-2.18.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:75250dbc5290e3f1a0f4618db35e51a165186f9034eff158f3d490b3fed9f8a0"}, + {file = "pydantic_core-2.18.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4456f2dca97c425231d7315737d45239b2b51a50dc2b6f0c2bb181fce6207664"}, + {file = "pydantic_core-2.18.2-cp311-none-win32.whl", hash = "sha256:269322dcc3d8bdb69f054681edff86276b2ff972447863cf34c8b860f5188e2e"}, + {file = "pydantic_core-2.18.2-cp311-none-win_amd64.whl", hash = "sha256:800d60565aec896f25bc3cfa56d2277d52d5182af08162f7954f938c06dc4ee3"}, + {file = "pydantic_core-2.18.2-cp311-none-win_arm64.whl", hash = "sha256:1404c69d6a676245199767ba4f633cce5f4ad4181f9d0ccb0577e1f66cf4c46d"}, + {file = "pydantic_core-2.18.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:fb2bd7be70c0fe4dfd32c951bc813d9fe6ebcbfdd15a07527796c8204bd36242"}, + {file = "pydantic_core-2.18.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6132dd3bd52838acddca05a72aafb6eab6536aa145e923bb50f45e78b7251043"}, + {file = "pydantic_core-2.18.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7d904828195733c183d20a54230c0df0eb46ec746ea1a666730787353e87182"}, + {file = "pydantic_core-2.18.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c9bd70772c720142be1020eac55f8143a34ec9f82d75a8e7a07852023e46617f"}, + {file = "pydantic_core-2.18.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2b8ed04b3582771764538f7ee7001b02e1170223cf9b75dff0bc698fadb00cf3"}, + {file = "pydantic_core-2.18.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e6dac87ddb34aaec85f873d737e9d06a3555a1cc1a8e0c44b7f8d5daeb89d86f"}, + {file = "pydantic_core-2.18.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ca4ae5a27ad7a4ee5170aebce1574b375de390bc01284f87b18d43a3984df72"}, + {file = "pydantic_core-2.18.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:886eec03591b7cf058467a70a87733b35f44707bd86cf64a615584fd72488b7c"}, + {file = "pydantic_core-2.18.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ca7b0c1f1c983e064caa85f3792dd2fe3526b3505378874afa84baf662e12241"}, + {file = "pydantic_core-2.18.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4b4356d3538c3649337df4074e81b85f0616b79731fe22dd11b99499b2ebbdf3"}, + {file = "pydantic_core-2.18.2-cp312-none-win32.whl", hash = "sha256:8b172601454f2d7701121bbec3425dd71efcb787a027edf49724c9cefc14c038"}, + {file = "pydantic_core-2.18.2-cp312-none-win_amd64.whl", hash = "sha256:b1bd7e47b1558ea872bd16c8502c414f9e90dcf12f1395129d7bb42a09a95438"}, + {file = "pydantic_core-2.18.2-cp312-none-win_arm64.whl", hash = "sha256:98758d627ff397e752bc339272c14c98199c613f922d4a384ddc07526c86a2ec"}, + {file = "pydantic_core-2.18.2-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:9fdad8e35f278b2c3eb77cbdc5c0a49dada440657bf738d6905ce106dc1de439"}, + {file = "pydantic_core-2.18.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:1d90c3265ae107f91a4f279f4d6f6f1d4907ac76c6868b27dc7fb33688cfb347"}, + {file = "pydantic_core-2.18.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:390193c770399861d8df9670fb0d1874f330c79caaca4642332df7c682bf6b91"}, + {file = "pydantic_core-2.18.2-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:82d5d4d78e4448683cb467897fe24e2b74bb7b973a541ea1dcfec1d3cbce39fb"}, + {file = "pydantic_core-2.18.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4774f3184d2ef3e14e8693194f661dea5a4d6ca4e3dc8e39786d33a94865cefd"}, + {file = "pydantic_core-2.18.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d4d938ec0adf5167cb335acb25a4ee69a8107e4984f8fbd2e897021d9e4ca21b"}, + {file = "pydantic_core-2.18.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e0e8b1be28239fc64a88a8189d1df7fad8be8c1ae47fcc33e43d4be15f99cc70"}, + {file = "pydantic_core-2.18.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:868649da93e5a3d5eacc2b5b3b9235c98ccdbfd443832f31e075f54419e1b96b"}, + {file = "pydantic_core-2.18.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:78363590ef93d5d226ba21a90a03ea89a20738ee5b7da83d771d283fd8a56761"}, + {file = "pydantic_core-2.18.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:852e966fbd035a6468fc0a3496589b45e2208ec7ca95c26470a54daed82a0788"}, + {file = "pydantic_core-2.18.2-cp38-none-win32.whl", hash = "sha256:6a46e22a707e7ad4484ac9ee9f290f9d501df45954184e23fc29408dfad61350"}, + {file = "pydantic_core-2.18.2-cp38-none-win_amd64.whl", hash = "sha256:d91cb5ea8b11607cc757675051f61b3d93f15eca3cefb3e6c704a5d6e8440f4e"}, + {file = "pydantic_core-2.18.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:ae0a8a797a5e56c053610fa7be147993fe50960fa43609ff2a9552b0e07013e8"}, + {file = "pydantic_core-2.18.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:042473b6280246b1dbf530559246f6842b56119c2926d1e52b631bdc46075f2a"}, + {file = "pydantic_core-2.18.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a388a77e629b9ec814c1b1e6b3b595fe521d2cdc625fcca26fbc2d44c816804"}, + {file = "pydantic_core-2.18.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e25add29b8f3b233ae90ccef2d902d0ae0432eb0d45370fe315d1a5cf231004b"}, + {file = "pydantic_core-2.18.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f459a5ce8434614dfd39bbebf1041952ae01da6bed9855008cb33b875cb024c0"}, + {file = "pydantic_core-2.18.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:eff2de745698eb46eeb51193a9f41d67d834d50e424aef27df2fcdee1b153845"}, + {file = "pydantic_core-2.18.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8309f67285bdfe65c372ea3722b7a5642680f3dba538566340a9d36e920b5f0"}, + {file = "pydantic_core-2.18.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f93a8a2e3938ff656a7c1bc57193b1319960ac015b6e87d76c76bf14fe0244b4"}, + {file = "pydantic_core-2.18.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:22057013c8c1e272eb8d0eebc796701167d8377441ec894a8fed1af64a0bf399"}, + {file = "pydantic_core-2.18.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:cfeecd1ac6cc1fb2692c3d5110781c965aabd4ec5d32799773ca7b1456ac636b"}, + {file = "pydantic_core-2.18.2-cp39-none-win32.whl", hash = "sha256:0d69b4c2f6bb3e130dba60d34c0845ba31b69babdd3f78f7c0c8fae5021a253e"}, + {file = "pydantic_core-2.18.2-cp39-none-win_amd64.whl", hash = "sha256:d9319e499827271b09b4e411905b24a426b8fb69464dfa1696258f53a3334641"}, + {file = "pydantic_core-2.18.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:a1874c6dd4113308bd0eb568418e6114b252afe44319ead2b4081e9b9521fe75"}, + {file = "pydantic_core-2.18.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:ccdd111c03bfd3666bd2472b674c6899550e09e9f298954cfc896ab92b5b0e6d"}, + {file = "pydantic_core-2.18.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e18609ceaa6eed63753037fc06ebb16041d17d28199ae5aba0052c51449650a9"}, + {file = "pydantic_core-2.18.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e5c584d357c4e2baf0ff7baf44f4994be121e16a2c88918a5817331fc7599d7"}, + {file = "pydantic_core-2.18.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:43f0f463cf89ace478de71a318b1b4f05ebc456a9b9300d027b4b57c1a2064fb"}, + {file = "pydantic_core-2.18.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:e1b395e58b10b73b07b7cf740d728dd4ff9365ac46c18751bf8b3d8cca8f625a"}, + {file = "pydantic_core-2.18.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:0098300eebb1c837271d3d1a2cd2911e7c11b396eac9661655ee524a7f10587b"}, + {file = "pydantic_core-2.18.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:36789b70d613fbac0a25bb07ab3d9dba4d2e38af609c020cf4d888d165ee0bf3"}, + {file = "pydantic_core-2.18.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:3f9a801e7c8f1ef8718da265bba008fa121243dfe37c1cea17840b0944dfd72c"}, + {file = "pydantic_core-2.18.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:3a6515ebc6e69d85502b4951d89131ca4e036078ea35533bb76327f8424531ce"}, + {file = "pydantic_core-2.18.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:20aca1e2298c56ececfd8ed159ae4dde2df0781988c97ef77d5c16ff4bd5b400"}, + {file = "pydantic_core-2.18.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:223ee893d77a310a0391dca6df00f70bbc2f36a71a895cecd9a0e762dc37b349"}, + {file = "pydantic_core-2.18.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2334ce8c673ee93a1d6a65bd90327588387ba073c17e61bf19b4fd97d688d63c"}, + {file = "pydantic_core-2.18.2-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:cbca948f2d14b09d20268cda7b0367723d79063f26c4ffc523af9042cad95592"}, + {file = "pydantic_core-2.18.2-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:b3ef08e20ec49e02d5c6717a91bb5af9b20f1805583cb0adfe9ba2c6b505b5ae"}, + {file = "pydantic_core-2.18.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:c6fdc8627910eed0c01aed6a390a252fe3ea6d472ee70fdde56273f198938374"}, + {file = "pydantic_core-2.18.2.tar.gz", hash = "sha256:2e29d20810dfc3043ee13ac7d9e25105799817683348823f305ab3f349b9386e"}, ] [package.dependencies] From b797e308a53bd2a27e9ad72d04df38c8bd821337 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 29 Apr 2024 14:12:22 +0000 Subject: [PATCH 08/21] Bump pytest from 8.0.2 to 8.2.0 Bumps [pytest](https://github.com/pytest-dev/pytest) from 8.0.2 to 8.2.0. - [Release notes](https://github.com/pytest-dev/pytest/releases) - [Changelog](https://github.com/pytest-dev/pytest/blob/main/CHANGELOG.rst) - [Commits](https://github.com/pytest-dev/pytest/compare/8.0.2...8.2.0) --- updated-dependencies: - dependency-name: pytest dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] --- poetry.lock | 22 +++++++++++----------- pyproject.toml | 2 +- 2 files changed, 12 insertions(+), 12 deletions(-) diff --git a/poetry.lock b/poetry.lock index d666a13..10f72e9 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand. [[package]] name = "annotated-types" @@ -365,13 +365,13 @@ test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest- [[package]] name = "pluggy" -version = "1.4.0" +version = "1.5.0" description = "plugin and hook calling mechanisms for python" optional = false python-versions = ">=3.8" files = [ - {file = "pluggy-1.4.0-py3-none-any.whl", hash = "sha256:7db9f7b503d67d1c5b95f59773ebb58a8c1c288129a88665838012cfb07b8981"}, - {file = "pluggy-1.4.0.tar.gz", hash = "sha256:8c85c2876142a764e5b7548e7d9a0e0ddb46f5185161049a79b7e974454223be"}, + {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, + {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, ] [package.extras] @@ -523,13 +523,13 @@ test = ["beautifulsoup4", "flake8", "pytest", "pytest-cov"] [[package]] name = "pytest" -version = "8.0.2" +version = "8.2.0" description = "pytest: simple powerful testing with Python" optional = false python-versions = ">=3.8" files = [ - {file = "pytest-8.0.2-py3-none-any.whl", hash = "sha256:edfaaef32ce5172d5466b5127b42e0d6d35ebbe4453f0e3505d96afd93f6b096"}, - {file = "pytest-8.0.2.tar.gz", hash = "sha256:d4051d623a2e0b7e51960ba963193b09ce6daeb9759a451844a21e4ddedfc1bd"}, + {file = "pytest-8.2.0-py3-none-any.whl", hash = "sha256:1733f0620f6cda4095bbf0d9ff8022486e91892245bb9e7d5542c018f612f233"}, + {file = "pytest-8.2.0.tar.gz", hash = "sha256:d507d4482197eac0ba2bae2e9babf0672eb333017bcedaa5fb1a3d42c1174b3f"}, ] [package.dependencies] @@ -537,11 +537,11 @@ colorama = {version = "*", markers = "sys_platform == \"win32\""} exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} iniconfig = "*" packaging = "*" -pluggy = ">=1.3.0,<2.0" -tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} +pluggy = ">=1.5,<2.0" +tomli = {version = ">=1", markers = "python_version < \"3.11\""} [package.extras] -testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] +dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] [[package]] name = "pytest-cov" @@ -757,4 +757,4 @@ test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess [metadata] lock-version = "2.0" python-versions = "~=3.8" -content-hash = "a84dff8313a9632b8ccfa8bb764df74812fb0aca892e8b3c0919aadbc6cef26d" +content-hash = "2b39b85f595742cf65e8d1ba4a748a43d58df29815d59eded8e2d070b7b5e63f" diff --git a/pyproject.toml b/pyproject.toml index 3200c32..2dc1a41 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -40,7 +40,7 @@ typing-extensions = ">=4.0,<5" pydantic = ">=2.0,<3" [tool.poetry.group.dev.dependencies] -pytest = ">=7.3.1,<8.1.0" +pytest = ">=7.3.1,<8.3.0" pytest-mock = ">=3.11.1,<3.13.0" pytest-cov = "~=4.1.0" pre-commit = ">=3.3.3,<3.6.0" From a9cf87b071b5383af2ed96cfecbc6066297c606a Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 7 May 2024 10:37:51 +0000 Subject: [PATCH 09/21] Bump pytest-cov from 4.1.0 to 5.0.0 Bumps [pytest-cov](https://github.com/pytest-dev/pytest-cov) from 4.1.0 to 5.0.0. - [Changelog](https://github.com/pytest-dev/pytest-cov/blob/master/CHANGELOG.rst) - [Commits](https://github.com/pytest-dev/pytest-cov/compare/v4.1.0...v5.0.0) --- updated-dependencies: - dependency-name: pytest-cov dependency-type: direct:development update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] --- poetry.lock | 12 ++++++------ pyproject.toml | 2 +- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/poetry.lock b/poetry.lock index 3e310bb..4c7fdd0 100644 --- a/poetry.lock +++ b/poetry.lock @@ -545,13 +545,13 @@ dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments [[package]] name = "pytest-cov" -version = "4.1.0" +version = "5.0.0" description = "Pytest plugin for measuring coverage." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "pytest-cov-4.1.0.tar.gz", hash = "sha256:3904b13dfbfec47f003b8e77fd5b589cd11904a21ddf1ab38a64f204d6a10ef6"}, - {file = "pytest_cov-4.1.0-py3-none-any.whl", hash = "sha256:6ba70b9e97e69fcc3fb45bfeab2d0a138fb65c4d0d6a41ef33983ad114be8c3a"}, + {file = "pytest-cov-5.0.0.tar.gz", hash = "sha256:5837b58e9f6ebd335b0f8060eecce69b662415b16dc503883a02f45dfeb14857"}, + {file = "pytest_cov-5.0.0-py3-none-any.whl", hash = "sha256:4f0764a1219df53214206bf1feea4633c3b558a2925c8b59f144f682861ce652"}, ] [package.dependencies] @@ -559,7 +559,7 @@ coverage = {version = ">=5.2.1", extras = ["toml"]} pytest = ">=4.6" [package.extras] -testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtualenv"] +testing = ["fields", "hunter", "process-tests", "pytest-xdist", "virtualenv"] [[package]] name = "pytest-mock" @@ -757,4 +757,4 @@ test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess [metadata] lock-version = "2.0" python-versions = "~=3.8" -content-hash = "2b39b85f595742cf65e8d1ba4a748a43d58df29815d59eded8e2d070b7b5e63f" +content-hash = "e3d9e42a183329bec5a5dd23cd93b2485c4c689ccccfa14d273e7e19991bd131" diff --git a/pyproject.toml b/pyproject.toml index 2dc1a41..e97d28d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -42,7 +42,7 @@ pydantic = ">=2.0,<3" [tool.poetry.group.dev.dependencies] pytest = ">=7.3.1,<8.3.0" pytest-mock = ">=3.11.1,<3.13.0" -pytest-cov = "~=4.1.0" +pytest-cov = ">=4.1,<5.1" pre-commit = ">=3.3.3,<3.6.0" [tool.pytest.ini_options] From 384c732e9ed4a8c8f93a9290da248aa451c5103a Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 7 May 2024 10:43:53 +0000 Subject: [PATCH 10/21] Bump pytest-mock from 3.12.0 to 3.14.0 Bumps [pytest-mock](https://github.com/pytest-dev/pytest-mock) from 3.12.0 to 3.14.0. - [Release notes](https://github.com/pytest-dev/pytest-mock/releases) - [Changelog](https://github.com/pytest-dev/pytest-mock/blob/main/CHANGELOG.rst) - [Commits](https://github.com/pytest-dev/pytest-mock/compare/v3.12.0...v3.14.0) --- updated-dependencies: - dependency-name: pytest-mock dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] --- poetry.lock | 10 +++++----- pyproject.toml | 2 +- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/poetry.lock b/poetry.lock index 4c7fdd0..36d9574 100644 --- a/poetry.lock +++ b/poetry.lock @@ -563,17 +563,17 @@ testing = ["fields", "hunter", "process-tests", "pytest-xdist", "virtualenv"] [[package]] name = "pytest-mock" -version = "3.12.0" +version = "3.14.0" description = "Thin-wrapper around the mock package for easier use with pytest" optional = false python-versions = ">=3.8" files = [ - {file = "pytest-mock-3.12.0.tar.gz", hash = "sha256:31a40f038c22cad32287bb43932054451ff5583ff094bca6f675df2f8bc1a6e9"}, - {file = "pytest_mock-3.12.0-py3-none-any.whl", hash = "sha256:0972719a7263072da3a21c7f4773069bcc7486027d7e8e1f81d98a47e701bc4f"}, + {file = "pytest-mock-3.14.0.tar.gz", hash = "sha256:2719255a1efeceadbc056d6bf3df3d1c5015530fb40cf347c0f9afac88410bd0"}, + {file = "pytest_mock-3.14.0-py3-none-any.whl", hash = "sha256:0b72c38033392a5f4621342fe11e9219ac11ec9d375f8e2a0c164539e0d70f6f"}, ] [package.dependencies] -pytest = ">=5.0" +pytest = ">=6.2.5" [package.extras] dev = ["pre-commit", "pytest-asyncio", "tox"] @@ -757,4 +757,4 @@ test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess [metadata] lock-version = "2.0" python-versions = "~=3.8" -content-hash = "e3d9e42a183329bec5a5dd23cd93b2485c4c689ccccfa14d273e7e19991bd131" +content-hash = "309a36a89b14b9a01b53be3abc2050186122152488fa49b1c4296d0593949f3b" diff --git a/pyproject.toml b/pyproject.toml index e97d28d..9dd4cd7 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -41,7 +41,7 @@ pydantic = ">=2.0,<3" [tool.poetry.group.dev.dependencies] pytest = ">=7.3.1,<8.3.0" -pytest-mock = ">=3.11.1,<3.13.0" +pytest-mock = ">=3.11.1,<3.15.0" pytest-cov = ">=4.1,<5.1" pre-commit = ">=3.3.3,<3.6.0" From 9760ef745f9e8bef5e14bb9406e3af873de78a60 Mon Sep 17 00:00:00 2001 From: Jens Keiner Date: Tue, 7 May 2024 13:02:41 +0200 Subject: [PATCH 11/21] Remove obsolete commentary. --- .../core/__init__.py | 20 ------------------- 1 file changed, 20 deletions(-) diff --git a/exchange_calendars_extensions/core/__init__.py b/exchange_calendars_extensions/core/__init__.py index 2cc643f..30f2657 100644 --- a/exchange_calendars_extensions/core/__init__.py +++ b/exchange_calendars_extensions/core/__init__.py @@ -328,11 +328,6 @@ def _remove_day(cs: ChangeSet, date: TimestampLike) -> ChangeSet: ------- ChangeSet The changeset with the removed day. - - Raises - ------ - ValidationError - If strict is True and the changeset for the exchange would be inconsistent after removing the day. """ return cs.remove_day(date) @@ -352,11 +347,6 @@ def remove_day(exchange: str, date: TimestampLike) -> None: Returns ------- None - - Raises - ------ - ValidationError - If strict is True and the changeset for the exchange would be inconsistent after removing the day. """ _remove_day(exchange, date) @@ -379,11 +369,6 @@ def _set_tags(cs: ChangeSet, date: TimestampLike, tags: Tags) -> ChangeSet: ------- ChangeSet The changeset with the given tags set for the given day. - - Raises - ------ - ValidationError - If strict is True and the changeset for the exchange would be inconsistent after removing the day. """ return cs.remove_day(date) @@ -405,11 +390,6 @@ def set_tags(exchange: str, date: TimestampLike, tags: Tags) -> None: Returns ------- None - - Raises - ------ - ValidationError - If strict is True and the changeset for the exchange would be inconsistent after removing the day. """ _set_tags(exchange, date, tags) From 48d6221ab485503fb9891677c5534dcadc806bc1 Mon Sep 17 00:00:00 2001 From: Jens Keiner Date: Tue, 7 May 2024 13:07:08 +0200 Subject: [PATCH 12/21] Sync with latest API changes and add metadata APIs. --- docs/calendar_modifications.md | 67 +-- docs/features.md | 19 +- docs/metadata.md | 99 ++++ .../core/__init__.py | 124 ++++- .../core/holiday_calendar.py | 42 +- tests/test_api.py | 426 +++++++++++++----- 6 files changed, 622 insertions(+), 155 deletions(-) create mode 100644 docs/metadata.md diff --git a/docs/calendar_modifications.md b/docs/calendar_modifications.md index fed42bb..64bc771 100644 --- a/docs/calendar_modifications.md +++ b/docs/calendar_modifications.md @@ -12,6 +12,28 @@ menu: Extended exchange calendars provide an API to support modifications at runtime. +## Dates, Times, and Day Types +Calendar modifications are represented using common data types for dates, wall-clock times, and types of special +days. Thanks to Pydantic and custom annotated types, however, the API allows to pass in values in different formats that +will safely be converted into the correct used internally. + +Wherever the API expects a `pandas.Timestamp`, represented by the type `TimestampLike`, it is possible to an actual +`pandas.Timestamp`, a `datetime.date` object, a string in ISO format `YYYY-MM-DD`, or any other valid value that can be +used to initialize a timestamp. Pydantic will validate such calls and enforce the correct data type. + +There is also the special type `DateLike` which is used to represent date-like Timestamps. Such timestamps are +normalized to midnight and are timezone-naive. They represent full days starting at midnight (inclusive) and ending at +midnight (exclusive) of the following day *in the context of the exchange and the corresponding timezone they are used +in*. A `DateLike` timestamp is typically used to specify a date for a specific exchange calendar that has a timezone +attached. + +Similar to timestamps, wall clock times in the form of `datetime.time` are represented by +`TimeLike` to allow passing an actual `datetime.time` or strings in the format +`HH:MM:SS` or `HH:MM`. + +The enumeration type `DayType` represents types of special days, API calls accept either enumeration members or +their string value. For example, `DayType.HOLIDAY` and `'holiday'` can be used equivalently. + ## Adding Special Days The `exchange_calendars_extensions` module provides the following methods for adding special days: @@ -24,7 +46,7 @@ The `exchange_calendars_extensions` module provides the following methods for ad For example, ```python -import exchange_calendars_extensions as ecx +import exchange_calendars_extensions.core as ecx ecx.apply_extensions() import exchange_calendars as ec @@ -40,7 +62,7 @@ always added as regular holidays, not as ad-hoc holidays, to allow for an indivi Adding special open or close days works similarly, but needs the respective special open or close time: ```python -import exchange_calendars_extensions as ecx +import exchange_calendars_extensions.core as ecx ecx.apply_extensions() import exchange_calendars as ec @@ -55,7 +77,7 @@ assert '2022-12-28' in calendar.special_opens_all.holidays() A more generic way to add a special day is via `add_day(...)` which takes either a `DaySpec` (holidays, monthly/quarterly expiries) or `DaySpecWithTime` (special open/close days) Pydantic model: ```python -import exchange_calendars_extensions as ecx +import exchange_calendars_extensions.core as ecx ecx.apply_extensions() import exchange_calendars as ec @@ -73,7 +95,7 @@ The `DayType` enum enumerates all supported special day types. Thanks to Pydantic, an even easier way just uses suitable dictionaries: ```python -import exchange_calendars_extensions as ecx +import exchange_calendars_extensions.core as ecx ecx.apply_extensions() import exchange_calendars as ec @@ -88,11 +110,11 @@ assert '2022-12-28' in calendar.special_opens_all.holidays() ``` The dictionary format makes it particularly easy to read in changes from an external source like a file. -## Removing Special Sessions +## Removing Special Days To remove a day as a special day (of any type) from a calendar, use `remove_day`. For example, ```python -import exchange_calendars_extensions as ecx +import exchange_calendars_extensions.core as ecx ecx.apply_extensions() import exchange_calendars as ec @@ -108,22 +130,11 @@ will remove the holiday on 27 December 2022 from the calendar, thus turning this Removing a day via `remove_day(...)` that is not actually a special day, results in no change and does not throw an exception. -## Dates, Times, and Day Types -Thanks to Pydantic, dates, times, and the type of a special day can typically be specified in different formats and will -safely be parsed into the correct data type that is used internally. - -For example, wherever the API expects a date, you may pass in a `pandas.Timestamp`, a `datetime.date` object, or simply -a string in ISO format `YYYY-MM-DD`. Similarly, wall clock times can be passed as `datetime.time` objects or as strings -in the format `HH:MM:SS` or `HH:MM`. - -The enumeration type `ecx.DayType` represents types of special days, API calls accept either enumeration members or -their string value. For example, `ecx.DayType.HOLIDAY` and `'holiday'` can be used equivalently. - ## Visibility of Changes Whenever a calendar has been modified programmatically, the changes are only reflected after obtaining a new exchange calendar instance. ```python -import exchange_calendars_extensions as ecx +import exchange_calendars_extensions.core as ecx ecx.apply_extensions() import exchange_calendars as ec @@ -165,7 +176,7 @@ exchange. When a new calendar instance is created, the changes are applied to th It is also possible to create a changeset separately and then associate it with a particular exchange: ```python -import exchange_calendars_extensions as ecx +import exchange_calendars_extensions.core as ecx ecx.apply_extensions() import exchange_calendars as ec @@ -183,7 +194,7 @@ assert '2022-12-28' in calendar.holidays_all.holidays() Again, an entire changeset can also be created from a suitably formatted dictionary, making it particularly easy to read in and apply changes from an external source like a file. ```python -import exchange_calendars_extensions as ecx +import exchange_calendars_extensions.core as ecx ecx.apply_extensions() import exchange_calendars as ec @@ -204,7 +215,7 @@ assert '2022-12-28' in calendar.holidays_all.holidays() The API permits to add and remove the same day as a special day. For example, the following code will add a holiday on 28 December 2022 to the calendar, and then remove the same day as well. ```python -import exchange_calendars_extensions as ecx +import exchange_calendars_extensions.core as ecx ecx.apply_extensions() import exchange_calendars as ec @@ -220,7 +231,7 @@ more sense in a case where a day is added to change its type of special day. Con holiday for the calendar `XLON` in the original version of the calendar. The following code will change the type of special day to a special open by first removing the day (as a holiday), and then adding it back as a special open day: ```python -import exchange_calendars_extensions as ecx +import exchange_calendars_extensions.core as ecx ecx.apply_extensions() import exchange_calendars as ec @@ -239,7 +250,7 @@ allows to change the type of special day in an existing calendar from one to ano In fact, internally, each added days is always implicitly also removed from the calendar first, so that it strictly is not necessary (but allowed) to explicitly remove a day, and then adding it back as a different type of special day: ```python -import exchange_calendars_extensions as ecx +import exchange_calendars_extensions.core as ecx ecx.apply_extensions() import exchange_calendars as ec @@ -259,7 +270,7 @@ As seen above, changesets may contain the same day both in the list of days to a However, changesets enforce consistency and will raise an exception if the same day is added more than once. For example, the following code will raise an exception: ```python -import exchange_calendars_extensions as ecx +import exchange_calendars_extensions.core as ecx ecx.apply_extensions() ecx.add_holiday('XLON', date='2022-12-28', name='Holiday') @@ -268,7 +279,7 @@ ecx.add_special_open('XLON', date='2022-12-28', name='Special Open', time='11:00 In contrast, removing a day is an idempotent operation, i.e. doing it twice will not raise an exception and keep the corresponding changeset the same as after the first removal. ```python -import exchange_calendars_extensions as ecx +import exchange_calendars_extensions.core as ecx ecx.apply_extensions() ecx.remove_day('XLON', date='2022-12-27') @@ -281,7 +292,7 @@ It is sometimes necessary to revert individual changes made to a calendar. To th `reset_day`: ```python -import exchange_calendars_extensions as ecx +import exchange_calendars_extensions.core as ecx ecx.apply_extensions() import exchange_calendars as ec @@ -306,7 +317,7 @@ assert '2022-12-28' not in calendar.holidays_all.holidays() To reset an entire calendar to its original state, use the method `reset_calendar` or update the calendar with an empty ChangeSet: ```python -import exchange_calendars_extensions as ecx +import exchange_calendars_extensions.core as ecx ecx.apply_extensions() import exchange_calendars as ec @@ -331,7 +342,7 @@ assert '2022-12-28' not in calendar.holidays_all.holidays() ## Retrieving Changes For any calendar, it is possible to retrieve a copy of the associated changeset: ```python -import exchange_calendars_extensions as ecx +import exchange_calendars_extensions.core as ecx ecx.apply_extensions() ecx.add_holiday('XLON', date='2022-12-28', name='Holiday') diff --git a/docs/features.md b/docs/features.md index 2b239a3..fe24aab 100644 --- a/docs/features.md +++ b/docs/features.md @@ -63,14 +63,14 @@ Calendars for expiry day sessions are currently only available for the following {{% /note %}} ## Calendar Modifications -This package also adds the ability to modify existing calendars at runtime. This can be used to add or remove +It is also possible to modify existing calendars at runtime. This can be used to add or remove - holidays (regular and ad-hoc) - special open days (regular and ad-hoc) - special close days (regular and ad-hoc) - quarterly expiry days - monthly expiry days -This is useful to fix incorrect information from `exchange-calendars`. This regularly happens, e.g., when an +This may be useful to fix incorrect information from `exchange-calendars`. This regularly happens, e.g., when an exchange announces a change to the regular trading schedule on short notice and an updated release of the upstream package is not yet available. After some time, modifications can typically be removed when the upstream package has been updated. @@ -81,3 +81,18 @@ in the [`exchange-calendars`](https://github.com/gerrymanoim/exchange_calendars) calendars should only be used as a last resort and to bridge the time until the information has been updated at the root. {{% /warning %}} + +## Metadata +In some situations, it may be useful to be able to associate arbitrary metadata with certain dates. Here, metadata can +be a set of string tags and/or a string comment. + +{{% note %}} +For example, a tag could be used to mark days on which the exchange +deviated from the regular trading schedule in an unplanned way, e.g. a delayed open due to technical issues. That is, +tags or a comment could be useful to incorporate additional user-owned information that would normally be outside the +scope of the exchange calendars core functionality. +{{% /note %}} + +This package provides functionality to add metadata in the form of tags and/or comments to any date in any calendar. +It is then possible to filter dates by their metadata to retrieve only dates within a certain time period that e.g. +have a certain tag set. diff --git a/docs/metadata.md b/docs/metadata.md new file mode 100644 index 0000000..9400914 --- /dev/null +++ b/docs/metadata.md @@ -0,0 +1,99 @@ +--- +title: "Metadata" +draft: false +type: docs +layout: "single" + +menu: + docs_extensions: + weight: 60 +--- +# Metadata + +Metadata in the form of tags and comments can be associated with specific dates. Metadata can be a combination of a +single string comment and/or a set of string tags. For example, +```python +import pandas as pd + +import exchange_calendars_extensions.core as ecx +ecx.apply_extensions() +import exchange_calendars as ec + +# Add metadata. +ecx.set_comment('XLON', '2022-01-01', "This is a comment.") +ecx.set_tags('XLON', '2022-01-01', {'tag1', 'tag2'}) + +calendar = ec.get_calendar('XLON') + +# Get metadata. +meta_dict = calendar.meta() +print(len(meta_dict)) + +# The value for the first and only date. +meta = meta_dict[pd.Timestamp('2022-01-01')] + +print(meta.comment) +print(meta.tags) +``` +will print +```text +1 +This is a comment. +{'tag1', 'tag2'} +``` + +The `meta()` method returns an ordered dictionary of type `Dict[pd.Timestamp, DayMeta]` that contains all days that have +metadata associated with them, ordered by date. The keys are `DateLike` timezone-naive Pandas timestamps normalized to +midnight. Each timestamp represents a full day starting at midnight (inclusive) and ending at midnight (exclusive) of +the following day within the relevant timezone for the exchange. + + +{{% note %}} +Currently, the dictionary returned by the `meta()` method does not support lookup from values other than +`pandas.Timestamp`. This means that it is not possible to look up metadata for a specific date using a string. +{{% /note %}} + +Dates can be filtered by a start and an end timestamp. For example, +```python +import pandas as pd + +import exchange_calendars_extensions.core as ecx +ecx.apply_extensions() +import exchange_calendars as ec + +# Add metadata for two dates. +ecx.set_comment('XLON', '2022-01-01', "This is a comment.") +ecx.set_tags('XLON', '2022-01-01', {'tag1', 'tag2'}) +ecx.set_comment('XLON', '2022-01-02', "This is another comment.") +ecx.set_tags('XLON', '2022-01-02', {'tag3', 'tag4'}) + +calendar = ec.get_calendar('XLON') + +# Get metadata only for 2022-01-01. +meta_dict = calendar.meta(start='2022-01-01', end='2022-01-01') +print(len(meta_dict)) + +# The value for the first and only date. +meta = meta_dict[pd.Timestamp('2022-01-01')] + +print(meta.comment) +print(meta.tags) +``` +will print +```text +1 +This is a comment. +{'tag1', 'tag2'} +``` + +The `meta()` method supports `TimestampLike` start and end arguments which must be either both timezone-naive or +timezone-aware. Otherwise, a `ValueError` is raised. + +The returned dictionary includes all days with metadata that overlap with the period between the start and end +timestamps. This definition ensures that the result is the expected even in situations where the passed in start and end +timestamps are not aligned to midnight. In the above example, if start were `2022-01-01 06:00:00` and end were +`2022-01-01 18:00:00`, the result would be the same since the time period that represents the full day `2022-01-01` +overlaps with the period between start and end. + +The start and end timestamps can also be timezone-aware. In this case, the time period that represents a day with +metadata is always interpreted in the timezone of the corresponding exchange. diff --git a/exchange_calendars_extensions/core/__init__.py b/exchange_calendars_extensions/core/__init__.py index 30f2657..7440ada 100644 --- a/exchange_calendars_extensions/core/__init__.py +++ b/exchange_calendars_extensions/core/__init__.py @@ -30,8 +30,8 @@ from pydantic import validate_call, BaseModel, conint from typing_extensions import ParamSpec, Concatenate -from exchange_calendars_extensions.api.changes import (ChangeSet, ChangeSetDict, DayType, TimestampLike, DayPropsLike, - Tags, TimeLike) +from exchange_calendars_extensions.api.changes import (ChangeSet, ChangeSetDict, DayType, DateLike, DayPropsLike, + Tags, TimeLike, DayMeta) from exchange_calendars_extensions.core.holiday_calendar import (extend_class, ExtendedExchangeCalendar, ExchangeCalendarExtensions) @@ -260,7 +260,7 @@ def wrapper(exchange: str, *args: P.args, **kwargs: P.kwargs) -> None: @_with_changeset -def _add_day(cs: ChangeSet, date: TimestampLike, props: DayPropsLike) -> ChangeSet: +def _add_day(cs: ChangeSet, date: DateLike, props: DayPropsLike) -> ChangeSet: """ Add a day of a given type to the changeset for a given exchange calendar. @@ -287,7 +287,7 @@ def _add_day(cs: ChangeSet, date: TimestampLike, props: DayPropsLike) -> ChangeS @validate_call(config={'arbitrary_types_allowed': True}) -def add_day(exchange: str, date: TimestampLike, props: DayPropsLike) -> None: +def add_day(exchange: str, date: DateLike, props: DayPropsLike) -> None: """ Add a day of a given type to the given exchange calendar. @@ -313,7 +313,7 @@ def add_day(exchange: str, date: TimestampLike, props: DayPropsLike) -> None: @_with_changeset -def _remove_day(cs: ChangeSet, date: TimestampLike) -> ChangeSet: +def _remove_day(cs: ChangeSet, date: DateLike) -> ChangeSet: """ Remove a day of a given type from the changeset for a given exchange calendar. @@ -333,7 +333,7 @@ def _remove_day(cs: ChangeSet, date: TimestampLike) -> ChangeSet: @validate_call(config={'arbitrary_types_allowed': True}) -def remove_day(exchange: str, date: TimestampLike) -> None: +def remove_day(exchange: str, date: DateLike) -> None: """ Remove a day of a given type from the given exchange calendar. @@ -352,7 +352,7 @@ def remove_day(exchange: str, date: TimestampLike) -> None: @_with_changeset -def _set_tags(cs: ChangeSet, date: TimestampLike, tags: Tags) -> ChangeSet: +def _set_tags(cs: ChangeSet, date: DateLike, tags: Tags) -> ChangeSet: """ Set tags for a given day in the given exchange calendar. @@ -370,11 +370,11 @@ def _set_tags(cs: ChangeSet, date: TimestampLike, tags: Tags) -> ChangeSet: ChangeSet The changeset with the given tags set for the given day. """ - return cs.remove_day(date) + return cs.set_tags(date, tags) @validate_call(config={'arbitrary_types_allowed': True}) -def set_tags(exchange: str, date: TimestampLike, tags: Tags) -> None: +def set_tags(exchange: str, date: DateLike, tags: Tags) -> None: """ Set tags for a given day in the given exchange calendar. @@ -395,7 +395,93 @@ def set_tags(exchange: str, date: TimestampLike, tags: Tags) -> None: @_with_changeset -def _reset_day(cs: ChangeSet, date: TimestampLike, include_tags: bool) -> ChangeSet: +def _set_comment(cs: ChangeSet, date: DateLike, comment: Union[str, None]) -> ChangeSet: + """ + Set comment for a given day in the given exchange calendar. + + Parameters + ---------- + cs : ChangeSet + The changeset where to set the tags. + date : TimestampLike + The date for which to set the tags. + comment : str + The comment to set. + + Returns + ------- + ChangeSet + The changeset with the given comment set for the given day. + """ + return cs.set_comment(date, comment) + + +@validate_call(config={'arbitrary_types_allowed': True}) +def set_comment(exchange: str, date: DateLike, comment: Union[str, None]) -> None: + """ + Set tags for a given day in the given exchange calendar. + + Parameters + ---------- + exchange : str + The exchange for which to set the tags. + date : TimestampLike + The date for which to set the tags. + comment : str + The comment to set. + + Returns + ------- + None + """ + _set_comment(exchange, date, comment) + + +@_with_changeset +def _set_meta(cs: ChangeSet, date: DateLike, meta: Union[DayMeta, None]) -> ChangeSet: + """ + Set metadata for a given day in the given exchange calendar. + + Parameters + ---------- + cs : ChangeSet + The changeset where to set the tags. + date : TimestampLike + The date for which to set the tags. + meta : DayMeta + The metadata to set. + + Returns + ------- + ChangeSet + The changeset with the given metadata set for the given day. + """ + return cs.set_meta(date, meta) + + +@validate_call(config={'arbitrary_types_allowed': True}) +def set_meta(exchange: str, date: DateLike, meta: Union[DayMeta, None]) -> None: + """ + Set metadata for a given day in the given exchange calendar. + + Parameters + ---------- + exchange : str + The exchange for which to set the tags. + date : TimestampLike + The date for which to set the tags. + meta : DayMeta + The metadata to set. + + Returns + ------- + None + """ + _set_meta(exchange, date, meta) + + +@_with_changeset +def _reset_day(cs: ChangeSet, date: DateLike, include_tags: bool) -> ChangeSet: """ Clear a day of a given type from the changeset for a given exchange calendar. @@ -417,7 +503,7 @@ def _reset_day(cs: ChangeSet, date: TimestampLike, include_tags: bool) -> Change @validate_call(config={'arbitrary_types_allowed': True}) -def reset_day(exchange: str, date: TimestampLike, include_tags: bool = False) -> None: +def reset_day(exchange: str, date: DateLike, include_tags: bool = False) -> None: """ Clear a day of a given type from the given exchange calendar. @@ -437,7 +523,7 @@ def reset_day(exchange: str, date: TimestampLike, include_tags: bool = False) -> _reset_day(exchange, date, include_tags=include_tags) -def add_holiday(exchange: str, date: TimestampLike, name: str = "Holiday") -> None: +def add_holiday(exchange: str, date: DateLike, name: str = "Holiday") -> None: """ Add a holiday to an exchange calendar. @@ -462,7 +548,7 @@ def add_holiday(exchange: str, date: TimestampLike, name: str = "Holiday") -> No _add_day(exchange, date, {'type': DayType.HOLIDAY, 'name': name}) -def add_special_open(exchange: str, date: TimestampLike, time: TimeLike, name: str = "Special Open") -> None: +def add_special_open(exchange: str, date: DateLike, time: TimeLike, name: str = "Special Open") -> None: """ Add a special open to an exchange calendar. @@ -489,7 +575,7 @@ def add_special_open(exchange: str, date: TimestampLike, time: TimeLike, name: s _add_day(exchange, date, {'type': DayType.SPECIAL_OPEN, 'name': name, 'time': time}) -def add_special_close(exchange: str, date: TimestampLike, time: TimeLike, name: str = "Special Close") -> None: +def add_special_close(exchange: str, date: DateLike, time: TimeLike, name: str = "Special Close") -> None: """ Add a special close to an exchange calendar. @@ -516,7 +602,7 @@ def add_special_close(exchange: str, date: TimestampLike, time: TimeLike, name: _add_day(exchange, date, {'type': DayType.SPECIAL_CLOSE, 'name': name, 'time': time}) -def add_quarterly_expiry(exchange: str, date: TimestampLike, name: str = "Quarterly Expiry") -> None: +def add_quarterly_expiry(exchange: str, date: DateLike, name: str = "Quarterly Expiry") -> None: """ Add a quarterly expiry to an exchange calendar. @@ -541,7 +627,7 @@ def add_quarterly_expiry(exchange: str, date: TimestampLike, name: str = "Quarte _add_day(exchange, date, {'type': DayType.QUARTERLY_EXPIRY, 'name': name}) -def add_monthly_expiry(exchange: str, date: TimestampLike, name: str = "Monthly Expiry") -> None: +def add_monthly_expiry(exchange: str, date: DateLike, name: str = "Monthly Expiry") -> None: """ Add a monthly expiry to an exchange calendar. @@ -673,9 +759,9 @@ def get_changes_for_all_calendars() -> ChangeSetDict: # Declare public names. __all__ = ["apply_extensions", "remove_extensions", "register_extension", "extend_class", "DayType", "add_day", "remove_day", "reset_day", "DayPropsLike", "add_holiday", "add_special_close", "add_special_open", - "add_quarterly_expiry", "add_monthly_expiry", "reset_calendar", "reset_all_calendars", "update_calendar", - "get_changes_for_calendar", "get_changes_for_all_calendars", "ChangeSet", "ExtendedExchangeCalendar", - "ExchangeCalendarExtensions"] + "add_quarterly_expiry", "add_monthly_expiry", "set_meta", "reset_calendar", "reset_all_calendars", + "update_calendar", "get_changes_for_calendar", "get_changes_for_all_calendars", "ChangeSet", + "ExtendedExchangeCalendar", "ExchangeCalendarExtensions"] __version__ = None diff --git a/exchange_calendars_extensions/core/holiday_calendar.py b/exchange_calendars_extensions/core/holiday_calendar.py index 6ae1190..119cfba 100644 --- a/exchange_calendars_extensions/core/holiday_calendar.py +++ b/exchange_calendars_extensions/core/holiday_calendar.py @@ -1,5 +1,6 @@ import datetime from abc import ABC +from collections import OrderedDict from copy import copy from dataclasses import field, dataclass from functools import reduce @@ -11,11 +12,16 @@ from exchange_calendars.pandas_extensions.holiday import Holiday from exchange_calendars.pandas_extensions.holiday import Holiday as ExchangeCalendarsHoliday from pandas.tseries.holiday import Holiday as PandasHoliday +from pydantic import validate_call +from typing_extensions import Dict -from exchange_calendars_extensions.api.changes import ChangeSet, DayType +from exchange_calendars_extensions.api.changes import ChangeSet, DayType, DayMeta, TimestampLike from exchange_calendars_extensions.core.holiday import get_monthly_expiry_holiday, DayOfWeekPeriodicHoliday, \ get_last_day_of_month_holiday +# Timdelta that represents a day minus the smallest increment of time. +ONE_DAY_MINUS_EPS = pd.Timedelta(1, 'd') - pd.Timedelta(1, 'ns') + class HolidayCalendar(ExchangeCalendarsHolidayCalendar): """ @@ -474,8 +480,7 @@ def last_regular_trading_days_of_months(self) -> Union[ExchangeCalendarsHolidayC """ ... - @property - def tags(self): + def meta(self, start: Union[TimestampLike, None] = None, end: Union[TimestampLike, None] = None) -> Dict[pd.Timestamp, DayMeta]: ... @@ -826,6 +831,9 @@ def __init__(self, *args, **kwargs): self._adjusted_properties = a + # Save meta. + self._meta = changeset.meta if changeset is not None else {} + # Call upstream init method. init_orig(self, *args, **kwargs) @@ -930,6 +938,31 @@ def last_regular_trading_days_of_months(self) -> Union[ExchangeCalendarsHolidayC other=self._holidays_and_special_business_days_shared, weekmask=self.weekmask, roll_fn=roll_one_day_same_month) + @validate_call(config={'arbitrary_types_allowed': True}) + def meta(self, start: Union[TimestampLike, None] = None, end: Union[TimestampLike, None] = None) -> Dict[pd.Timestamp, DayMeta]: + # Check that when start and end are both given, they are both timezone-aware or both timezone-naive. + if start and end: + if bool(start.tz) != bool(end.tz): + raise ValueError("start and end must both be timezone-aware or both timezone-naive.") + + if start > end: + raise ValueError("start must be less than or equal to end.") + + # Get timezone from start or end, if given. + tz = (start and start.tz) or (end and end.tz) or None + + # Return a dictionary with all metadata for days in the given range. A day is considered to comprise the full + # period between midnight (inclusive) and the next midnight (exclusive). If that period overlaps with the given + # range, the day is included in the result. + # + # Note: The code assumes that ONE_DAY_MINUS_EPS gets applied to timezone-naive timestamps where it corresponds + # to (almost) a calendar day. The same may not be true for timezone-aware timestamps when the period includes + # e.g. a DST transition. + if tz: + return OrderedDict([(k, v) for k, v in self._meta.items() if (start is None or (k + ONE_DAY_MINUS_EPS).tz_localize(tz=self.tz) >= start) and (end is None or (k.tz_localize(tz=self.tz)) <= end)]) + else: + return OrderedDict([(k, v) for k, v in self._meta.items() if (start is None or (k + ONE_DAY_MINUS_EPS) >= start) and (end is None or k <= end)]) + # Use type to create a new class. extended = type(cls.__name__ + "Extended", (cls, ExtendedExchangeCalendar), { "__init__": __init__, @@ -946,7 +979,8 @@ def last_regular_trading_days_of_months(self) -> Union[ExchangeCalendarsHolidayC "monthly_expiries": monthly_expiries, "quarterly_expiries": quarterly_expiries, "last_trading_days_of_months": last_trading_days_of_months, - "last_regular_trading_days_of_months": last_regular_trading_days_of_months + "last_regular_trading_days_of_months": last_regular_trading_days_of_months, + 'meta': meta, }) return extended diff --git a/tests/test_api.py b/tests/test_api.py index 56a5b94..f6ccbbe 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -1,4 +1,5 @@ import datetime +from collections import OrderedDict from datetime import time from typing import Optional, Tuple, Iterable, Union @@ -9,6 +10,8 @@ from pandas.tseries.holiday import next_monday from pytz import timezone +from exchange_calendars_extensions.api.changes import DayMeta + HOLIDAY_0 = 'Holiday 0' SPECIAL_OPEN_0 = 'Special Open 0' SPECIAL_CLOSE_0 = 'Special Close 0' @@ -28,8 +31,8 @@ def apply_extensions(): """ Apply the extensions to the exchange_calendars module. """ - import exchange_calendars_extensions.core as ece - ece.apply_extensions() + import exchange_calendars_extensions.core as ecx + ecx.apply_extensions() def add_test_calendar_and_apply_extensions(holidays: Optional[Iterable[pd.Timestamp]] = (pd.Timestamp("2023-01-01"),), @@ -119,17 +122,17 @@ def weekmask(self): ec.register_calendar_type("TEST", TestCalendar) - import exchange_calendars_extensions.core as ece + import exchange_calendars_extensions.core as ecx - ece.register_extension("TEST", TestCalendar, day_of_week_expiry=day_of_week_expiry) + ecx.register_extension("TEST", TestCalendar, day_of_week_expiry=day_of_week_expiry) - ece.apply_extensions() + ecx.apply_extensions() @pytest.mark.isolated def test_unmodified_calendars(): """ Test that calendars are unmodified when the module is just imported, without calling apply_extensions() """ - import exchange_calendars_extensions.core as ece + import exchange_calendars_extensions.core as ecx import exchange_calendars as ec c = ec.get_calendar("XETR") @@ -138,8 +141,8 @@ def test_unmodified_calendars(): assert isinstance(c, ec.ExchangeCalendar) # Check if returned Calendar is not of extended type. - assert not isinstance(c, ece.ExtendedExchangeCalendar) - assert not isinstance(c, ece.ExchangeCalendarExtensions) + assert not isinstance(c, ecx.ExtendedExchangeCalendar) + assert not isinstance(c, ecx.ExchangeCalendarExtensions) @pytest.mark.isolated @@ -147,14 +150,14 @@ def test_apply_extensions(): """ Test that calendars are modified when apply_extensions() is called """ apply_extensions() import exchange_calendars as ec - import exchange_calendars_extensions.core as ece + import exchange_calendars_extensions.core as ecx c = ec.get_calendar("XETR") # Check if returned Calendar is of expected types. assert isinstance(c, ec.ExchangeCalendar) - assert isinstance(c, ece.ExtendedExchangeCalendar) - assert isinstance(c, ece.ExchangeCalendarExtensions) + assert isinstance(c, ecx.ExtendedExchangeCalendar) + assert isinstance(c, ecx.ExchangeCalendarExtensions) @pytest.mark.isolated @@ -196,11 +199,11 @@ def test_extended_calendar_xetr(): def test_extended_calendar_test(): add_test_calendar_and_apply_extensions() import exchange_calendars as ec - import exchange_calendars_extensions.core as ece + import exchange_calendars_extensions.core as ecx c = ec.get_calendar("TEST") - assert isinstance(c, ece.ExtendedExchangeCalendar) + assert isinstance(c, ecx.ExtendedExchangeCalendar) start = pd.Timestamp("2022-01-01") end = pd.Timestamp("2024-12-31") @@ -391,9 +394,9 @@ def test_extended_calendar_test(): def test_add_new_holiday(): add_test_calendar_and_apply_extensions() import exchange_calendars as ec - import exchange_calendars_extensions.core as ece + import exchange_calendars_extensions.core as ecx - ece.add_holiday("TEST", pd.Timestamp("2023-07-03"), ADDED_HOLIDAY) + ecx.add_holiday("TEST", pd.Timestamp("2023-07-03"), ADDED_HOLIDAY) c = ec.get_calendar("TEST") @@ -423,9 +426,9 @@ def test_add_new_holiday(): def test_overwrite_existing_regular_holiday(): add_test_calendar_and_apply_extensions() import exchange_calendars as ec - import exchange_calendars_extensions.core as ece + import exchange_calendars_extensions.core as ecx - ece.add_holiday("TEST", pd.Timestamp("2023-01-01"), ADDED_HOLIDAY) + ecx.add_holiday("TEST", pd.Timestamp("2023-01-01"), ADDED_HOLIDAY) c = ec.get_calendar("TEST") @@ -453,9 +456,9 @@ def test_overwrite_existing_regular_holiday(): def test_overwrite_existing_adhoc_holiday(): add_test_calendar_and_apply_extensions() import exchange_calendars as ec - import exchange_calendars_extensions.core as ece + import exchange_calendars_extensions.core as ecx - ece.add_holiday("TEST", pd.Timestamp("2023-02-01"), ADDED_HOLIDAY) + ecx.add_holiday("TEST", pd.Timestamp("2023-02-01"), ADDED_HOLIDAY) c = ec.get_calendar("TEST") @@ -484,9 +487,9 @@ def test_overwrite_existing_adhoc_holiday(): def test_remove_existing_regular_holiday(): add_test_calendar_and_apply_extensions() import exchange_calendars as ec - import exchange_calendars_extensions.core as ece + import exchange_calendars_extensions.core as ecx - ece.remove_day("TEST", pd.Timestamp("2023-01-01")) + ecx.remove_day("TEST", pd.Timestamp("2023-01-01")) c = ec.get_calendar("TEST") @@ -512,9 +515,9 @@ def test_remove_existing_regular_holiday(): def test_remove_existing_adhoc_holiday(): add_test_calendar_and_apply_extensions() import exchange_calendars as ec - import exchange_calendars_extensions.core as ece + import exchange_calendars_extensions.core as ecx - ece.remove_day("TEST", pd.Timestamp("2023-02-01")) + ecx.remove_day("TEST", pd.Timestamp("2023-02-01")) c = ec.get_calendar("TEST") @@ -541,9 +544,9 @@ def test_remove_existing_adhoc_holiday(): def test_remove_non_existent_holiday(): add_test_calendar_and_apply_extensions() import exchange_calendars as ec - import exchange_calendars_extensions.core as ece + import exchange_calendars_extensions.core as ecx - ece.remove_day("TEST", pd.Timestamp("2023-07-03")) + ecx.remove_day("TEST", pd.Timestamp("2023-07-03")) c = ec.get_calendar("TEST") @@ -571,11 +574,11 @@ def test_remove_non_existent_holiday(): def test_add_and_remove_new_holiday(): add_test_calendar_and_apply_extensions() import exchange_calendars as ec - import exchange_calendars_extensions.core as ece + import exchange_calendars_extensions.core as ecx # Add and then remove the same day. The day should stay added. - ece.add_holiday("TEST", pd.Timestamp("2023-07-03"), ADDED_HOLIDAY) - ece.remove_day("TEST", pd.Timestamp("2023-07-03")) + ecx.add_holiday("TEST", pd.Timestamp("2023-07-03"), ADDED_HOLIDAY) + ecx.remove_day("TEST", pd.Timestamp("2023-07-03")) c = ec.get_calendar("TEST") @@ -605,11 +608,11 @@ def test_add_and_remove_new_holiday(): def test_add_and_remove_existing_holiday(): add_test_calendar_and_apply_extensions() import exchange_calendars as ec - import exchange_calendars_extensions.core as ece + import exchange_calendars_extensions.core as ecx # Add and then remove the same existing holiday. The day should still be added. - ece.add_holiday("TEST", pd.Timestamp("2023-01-01"), ADDED_HOLIDAY) - ece.remove_day("TEST", pd.Timestamp("2023-01-01")) + ecx.add_holiday("TEST", pd.Timestamp("2023-01-01"), ADDED_HOLIDAY) + ecx.remove_day("TEST", pd.Timestamp("2023-01-01")) c = ec.get_calendar("TEST") @@ -637,12 +640,12 @@ def test_add_and_remove_existing_holiday(): def test_remove_and_add_new_holiday(): add_test_calendar_and_apply_extensions() import exchange_calendars as ec - import exchange_calendars_extensions.core as ece + import exchange_calendars_extensions.core as ecx # Remove and then add the same new holiday. The removal of a non-existent holiday should be ignored, so the day # should be added eventually. - ece.remove_day("TEST", pd.Timestamp("2023-07-03")) - ece.add_holiday("TEST", pd.Timestamp("2023-07-03"), ADDED_HOLIDAY) + ecx.remove_day("TEST", pd.Timestamp("2023-07-03")) + ecx.add_holiday("TEST", pd.Timestamp("2023-07-03"), ADDED_HOLIDAY) c = ec.get_calendar("TEST") @@ -672,12 +675,12 @@ def test_remove_and_add_new_holiday(): def test_remove_and_add_existing_regular_holiday(): add_test_calendar_and_apply_extensions() import exchange_calendars as ec - import exchange_calendars_extensions.core as ece + import exchange_calendars_extensions.core as ecx # Remove and then add the same existent holiday. This should be equivalent to just adding (and thereby overwriting) # the existing regular holiday. - ece.remove_day("TEST", pd.Timestamp("2023-01-01")) - ece.add_holiday("TEST", pd.Timestamp("2023-01-01"), ADDED_HOLIDAY) + ecx.remove_day("TEST", pd.Timestamp("2023-01-01")) + ecx.add_holiday("TEST", pd.Timestamp("2023-01-01"), ADDED_HOLIDAY) c = ec.get_calendar("TEST") @@ -705,12 +708,12 @@ def test_remove_and_add_existing_regular_holiday(): def test_remove_and_add_existing_adhoc_holiday(): add_test_calendar_and_apply_extensions() import exchange_calendars as ec - import exchange_calendars_extensions.core as ece + import exchange_calendars_extensions.core as ecx # Remove and then add the same existent holiday. This should be equivalent to just adding (and thereby overwriting) # the existing regular holiday. - ece.remove_day("TEST", pd.Timestamp("2023-02-01")) - ece.add_holiday("TEST", pd.Timestamp("2023-02-01"), ADDED_HOLIDAY) + ecx.remove_day("TEST", pd.Timestamp("2023-02-01")) + ecx.add_holiday("TEST", pd.Timestamp("2023-02-01"), ADDED_HOLIDAY) c = ec.get_calendar("TEST") @@ -739,9 +742,9 @@ def test_remove_and_add_existing_adhoc_holiday(): def test_add_new_special_open_with_new_time(): add_test_calendar_and_apply_extensions() import exchange_calendars as ec - import exchange_calendars_extensions.core as ece + import exchange_calendars_extensions.core as ecx - ece.add_special_open("TEST", pd.Timestamp("2023-07-03"), time(12, 0), ADDED_SPECIAL_OPEN) + ecx.add_special_open("TEST", pd.Timestamp("2023-07-03"), time(12, 0), ADDED_SPECIAL_OPEN) c = ec.get_calendar("TEST") @@ -779,9 +782,9 @@ def test_add_new_special_open_with_new_time(): def test_add_new_special_open_with_existing_time(): add_test_calendar_and_apply_extensions() import exchange_calendars as ec - import exchange_calendars_extensions.core as ece + import exchange_calendars_extensions.core as ecx - ece.add_special_open("TEST", pd.Timestamp("2023-07-03"), time(11, 0), ADDED_SPECIAL_OPEN) + ecx.add_special_open("TEST", pd.Timestamp("2023-07-03"), time(11, 0), ADDED_SPECIAL_OPEN) c = ec.get_calendar("TEST") @@ -815,9 +818,9 @@ def test_add_new_special_open_with_existing_time(): def test_overwrite_existing_regular_special_open_with_new_time(): add_test_calendar_and_apply_extensions() import exchange_calendars as ec - import exchange_calendars_extensions.core as ece + import exchange_calendars_extensions.core as ecx - ece.add_special_open("TEST", pd.Timestamp("2023-05-01"), time(12, 0), ADDED_SPECIAL_OPEN) + ecx.add_special_open("TEST", pd.Timestamp("2023-05-01"), time(12, 0), ADDED_SPECIAL_OPEN) c = ec.get_calendar("TEST") @@ -854,7 +857,7 @@ def test_overwrite_existing_regular_special_open_with_existing_time(): add_test_calendar_and_apply_extensions( special_opens=[(time(11, 00), [pd.Timestamp("2023-05-01")]), (time(12, 00), [pd.Timestamp("2023-05-04")])]) import exchange_calendars as ec - import exchange_calendars_extensions.core as ece + import exchange_calendars_extensions.core as ecx c = ec.get_calendar("TEST") @@ -890,7 +893,7 @@ def test_overwrite_existing_regular_special_open_with_existing_time(): pd.Timestamp("2024-05-01"): SPECIAL_OPEN_0, pd.Timestamp("2024-05-06"): SPECIAL_OPEN_0})).empty - ece.add_special_open("TEST", pd.Timestamp("2023-05-01"), time(12, 0), ADDED_SPECIAL_OPEN) + ecx.add_special_open("TEST", pd.Timestamp("2023-05-01"), time(12, 0), ADDED_SPECIAL_OPEN) c = ec.get_calendar("TEST") @@ -928,9 +931,9 @@ def test_overwrite_existing_regular_special_open_with_existing_time(): def test_overwrite_existing_ad_hoc_special_open_with_new_time(): add_test_calendar_and_apply_extensions() import exchange_calendars as ec - import exchange_calendars_extensions.core as ece + import exchange_calendars_extensions.core as ecx - ece.add_special_open("TEST", pd.Timestamp("2023-06-01"), time(12, 0), ADDED_SPECIAL_OPEN) + ecx.add_special_open("TEST", pd.Timestamp("2023-06-01"), time(12, 0), ADDED_SPECIAL_OPEN) c = ec.get_calendar("TEST") @@ -966,9 +969,9 @@ def test_overwrite_existing_ad_hoc_special_open_with_new_time(): def test_overwrite_existing_ad_hoc_special_open_with_existing_time(): add_test_calendar_and_apply_extensions() import exchange_calendars as ec - import exchange_calendars_extensions.core as ece + import exchange_calendars_extensions.core as ecx - ece.add_special_open("TEST", pd.Timestamp("2023-06-01"), time(11, 0), ADDED_SPECIAL_OPEN) + ecx.add_special_open("TEST", pd.Timestamp("2023-06-01"), time(11, 0), ADDED_SPECIAL_OPEN) c = ec.get_calendar("TEST") @@ -1002,9 +1005,9 @@ def test_remove_existing_regular_special_open(): add_test_calendar_and_apply_extensions( special_opens=[(time(11, 00), [pd.Timestamp("2023-05-01")]), (time(12, 00), [pd.Timestamp("2023-05-04")])]) import exchange_calendars as ec - import exchange_calendars_extensions.core as ece + import exchange_calendars_extensions.core as ecx - ece.remove_day("TEST", pd.Timestamp("2023-05-01")) + ecx.remove_day("TEST", pd.Timestamp("2023-05-01")) c = ec.get_calendar("TEST") @@ -1043,9 +1046,9 @@ def test_remove_existing_regular_special_open(): def test_remove_existing_ad_hoc_special_open(): add_test_calendar_and_apply_extensions() import exchange_calendars as ec - import exchange_calendars_extensions.core as ece + import exchange_calendars_extensions.core as ecx - ece.remove_day("TEST", pd.Timestamp("2023-06-01")) + ecx.remove_day("TEST", pd.Timestamp("2023-06-01")) c = ec.get_calendar("TEST") @@ -1076,9 +1079,9 @@ def test_remove_existing_ad_hoc_special_open(): def test_remove_non_existent_special_open(): add_test_calendar_and_apply_extensions() import exchange_calendars as ec - import exchange_calendars_extensions.core as ece + import exchange_calendars_extensions.core as ecx - ece.remove_day("TEST", pd.Timestamp("2023-07-03")) + ecx.remove_day("TEST", pd.Timestamp("2023-07-03")) c = ec.get_calendar("TEST") @@ -1110,9 +1113,9 @@ def test_remove_non_existent_special_open(): def test_add_new_special_close_with_new_time(): add_test_calendar_and_apply_extensions() import exchange_calendars as ec - import exchange_calendars_extensions.core as ece + import exchange_calendars_extensions.core as ecx - ece.add_special_close("TEST", pd.Timestamp("2023-07-03"), time(15, 0), ADDED_SPECIAL_CLOSE) + ecx.add_special_close("TEST", pd.Timestamp("2023-07-03"), time(15, 0), ADDED_SPECIAL_CLOSE) c = ec.get_calendar("TEST") @@ -1150,9 +1153,9 @@ def test_add_new_special_close_with_new_time(): def test_add_new_special_close_with_existing_time(): add_test_calendar_and_apply_extensions() import exchange_calendars as ec - import exchange_calendars_extensions.core as ece + import exchange_calendars_extensions.core as ecx - ece.add_special_close("TEST", pd.Timestamp("2023-07-03"), time(14, 0), ADDED_SPECIAL_CLOSE) + ecx.add_special_close("TEST", pd.Timestamp("2023-07-03"), time(14, 0), ADDED_SPECIAL_CLOSE) c = ec.get_calendar("TEST") @@ -1186,9 +1189,9 @@ def test_add_new_special_close_with_existing_time(): def test_overwrite_existing_regular_special_close_with_new_time(): add_test_calendar_and_apply_extensions() import exchange_calendars as ec - import exchange_calendars_extensions.core as ece + import exchange_calendars_extensions.core as ecx - ece.add_special_close("TEST", pd.Timestamp("2023-03-01"), time(15, 0), ADDED_SPECIAL_CLOSE) + ecx.add_special_close("TEST", pd.Timestamp("2023-03-01"), time(15, 0), ADDED_SPECIAL_CLOSE) c = ec.get_calendar("TEST") @@ -1225,7 +1228,7 @@ def test_overwrite_existing_regular_special_close_with_existing_time(): add_test_calendar_and_apply_extensions( special_closes=[(time(14, 00), [pd.Timestamp("2023-03-01")]), (time(15, 00), [pd.Timestamp("2023-03-04")])]) import exchange_calendars as ec - import exchange_calendars_extensions.core as ece + import exchange_calendars_extensions.core as ecx c = ec.get_calendar("TEST") @@ -1261,7 +1264,7 @@ def test_overwrite_existing_regular_special_close_with_existing_time(): pd.Timestamp("2024-03-01"): SPECIAL_CLOSE_0, pd.Timestamp("2024-03-04"): SPECIAL_CLOSE_0})).empty - ece.add_special_close("TEST", pd.Timestamp("2023-03-01"), time(15, 0), ADDED_SPECIAL_CLOSE) + ecx.add_special_close("TEST", pd.Timestamp("2023-03-01"), time(15, 0), ADDED_SPECIAL_CLOSE) c = ec.get_calendar("TEST") @@ -1299,9 +1302,9 @@ def test_overwrite_existing_regular_special_close_with_existing_time(): def test_overwrite_existing_ad_hoc_special_close_with_new_time(): add_test_calendar_and_apply_extensions() import exchange_calendars as ec - import exchange_calendars_extensions.core as ece + import exchange_calendars_extensions.core as ecx - ece.add_special_close("TEST", pd.Timestamp("2023-04-03"), time(15, 0), ADDED_SPECIAL_CLOSE) + ecx.add_special_close("TEST", pd.Timestamp("2023-04-03"), time(15, 0), ADDED_SPECIAL_CLOSE) c = ec.get_calendar("TEST") @@ -1337,9 +1340,9 @@ def test_overwrite_existing_ad_hoc_special_close_with_new_time(): def test_overwrite_existing_ad_hoc_special_close_with_existing_time(): add_test_calendar_and_apply_extensions() import exchange_calendars as ec - import exchange_calendars_extensions.core as ece + import exchange_calendars_extensions.core as ecx - ece.add_special_close("TEST", pd.Timestamp("2023-04-03"), time(14, 0), ADDED_SPECIAL_CLOSE) + ecx.add_special_close("TEST", pd.Timestamp("2023-04-03"), time(14, 0), ADDED_SPECIAL_CLOSE) c = ec.get_calendar("TEST") @@ -1373,9 +1376,9 @@ def test_remove_existing_regular_special_close(): add_test_calendar_and_apply_extensions( special_closes=[(time(14, 00), [pd.Timestamp("2023-03-01")]), (time(15, 00), [pd.Timestamp("2023-03-04")])]) import exchange_calendars as ec - import exchange_calendars_extensions.core as ece + import exchange_calendars_extensions.core as ecx - ece.remove_day("TEST", pd.Timestamp("2023-03-01")) + ecx.remove_day("TEST", pd.Timestamp("2023-03-01")) c = ec.get_calendar("TEST") @@ -1414,9 +1417,9 @@ def test_remove_existing_regular_special_close(): def test_remove_existing_ad_hoc_special_close(): add_test_calendar_and_apply_extensions() import exchange_calendars as ec - import exchange_calendars_extensions.core as ece + import exchange_calendars_extensions.core as ecx - ece.remove_day("TEST", pd.Timestamp("2023-04-03")) + ecx.remove_day("TEST", pd.Timestamp("2023-04-03")) c = ec.get_calendar("TEST") @@ -1447,9 +1450,9 @@ def test_remove_existing_ad_hoc_special_close(): def test_remove_non_existent_special_close(): add_test_calendar_and_apply_extensions() import exchange_calendars as ec - import exchange_calendars_extensions.core as ece + import exchange_calendars_extensions.core as ecx - ece.remove_day("TEST", pd.Timestamp("2023-07-03")) + ecx.remove_day("TEST", pd.Timestamp("2023-07-03")) c = ec.get_calendar("TEST") @@ -1481,10 +1484,10 @@ def test_remove_non_existent_special_close(): def test_add_quarterly_expiry(): add_test_calendar_and_apply_extensions() import exchange_calendars as ec - import exchange_calendars_extensions.core as ece + import exchange_calendars_extensions.core as ecx # Add quarterly expiry. - ece.add_quarterly_expiry("TEST", pd.Timestamp("2023-06-15"), "Added Quarterly Expiry") + ecx.add_quarterly_expiry("TEST", pd.Timestamp("2023-06-15"), "Added Quarterly Expiry") c = ec.get_calendar("TEST") @@ -1512,10 +1515,10 @@ def test_add_quarterly_expiry(): def test_remove_quarterly_expiry(): add_test_calendar_and_apply_extensions() import exchange_calendars as ec - import exchange_calendars_extensions.core as ece + import exchange_calendars_extensions.core as ecx # Add quarterly expiry. - ece.remove_day("TEST", pd.Timestamp("2023-06-16")) + ecx.remove_day("TEST", pd.Timestamp("2023-06-16")) c = ec.get_calendar("TEST") @@ -1541,10 +1544,10 @@ def test_remove_quarterly_expiry(): def test_add_monthly_expiry(): add_test_calendar_and_apply_extensions() import exchange_calendars as ec - import exchange_calendars_extensions.core as ece + import exchange_calendars_extensions.core as ecx # Add quarterly expiry. - ece.add_monthly_expiry("TEST", pd.Timestamp("2023-01-19"), "Added Monthly Expiry") + ecx.add_monthly_expiry("TEST", pd.Timestamp("2023-01-19"), "Added Monthly Expiry") c = ec.get_calendar("TEST") @@ -1584,9 +1587,9 @@ def test_add_monthly_expiry(): def test_overwrite_regular_holiday_with_special_open(): add_test_calendar_and_apply_extensions(holidays=[pd.Timestamp("2023-01-02")]) import exchange_calendars as ec - import exchange_calendars_extensions.core as ece + import exchange_calendars_extensions.core as ecx - ece.add_special_open("TEST", pd.Timestamp("2023-01-02"), time(11, 0), ADDED_SPECIAL_OPEN) + ecx.add_special_open("TEST", pd.Timestamp("2023-01-02"), time(11, 0), ADDED_SPECIAL_OPEN) c = ec.get_calendar("TEST") @@ -1634,7 +1637,7 @@ def test_overwrite_regular_holiday_with_special_open(): def test_apply_changeset(): add_test_calendar_and_apply_extensions() import exchange_calendars as ec - import exchange_calendars_extensions.core as ece + import exchange_calendars_extensions.core as ecx changes = { 'add': { @@ -1645,18 +1648,16 @@ def test_apply_changeset(): '2023-09-14': {'type': 'quarterly_expiry', 'name': "Inserted Quarterly Expiry"}, }, 'remove': ['2023-01-01', '2023-05-01', '2023-03-01', '2023-08-18', '2023-09-15'], - 'tags': { - '2023-01-02': ['tag1', 'tag2'], - '2023-05-02': ['tag1', 'tag2'], - '2023-03-02': ['tag1', 'tag2'], - '2023-08-17': ['tag1', 'tag2'], - '2023-09-14': ['tag1', 'tag2'], + 'meta': { + '2023-01-03': {'tags': ['tag1', 'tag2']}, + '2023-05-03': {'comment': 'This is a comment'}, + '2023-03-03': {'tags': ['tag3', 'tag´4'], 'comment': 'This is a comment'}, } } - ece.update_calendar("TEST", changes) + ecx.update_calendar("TEST", changes) c = ec.get_calendar("TEST") - assert isinstance(c, ece.ExtendedExchangeCalendar) + assert isinstance(c, ecx.ExtendedExchangeCalendar) start = pd.Timestamp("2022-01-01") end = pd.Timestamp("2024-12-31") @@ -1762,11 +1763,15 @@ def test_apply_changeset(): pd.Timestamp('2024-10-18'): MONTHLY_EXPIRY, pd.Timestamp('2024-11-15'): MONTHLY_EXPIRY})).empty + # Verify tags and comments. + + + @pytest.mark.isolated def test_test(): - import exchange_calendars_extensions.core as ece - ece.apply_extensions() + import exchange_calendars_extensions.core as ecx + ecx.apply_extensions() import exchange_calendars as ec changes = { @@ -1778,16 +1783,14 @@ def test_test(): '2022-01-20': {'type': 'quarterly_expiry', 'name': QUARTERLY_EXPIRY} }, 'remove': ['2022-01-11', '2022-01-13', '2022-01-17', '2022-01-19', '2022-01-21'], - 'tags': { - '2022-01-10': ['tag1', 'tag2'], - '2022-01-12': ['tag1', 'tag2'], - '2022-01-14': ['tag1', 'tag2'], - '2022-01-18': ['tag1', 'tag2'], - '2022-01-20': ['tag1', 'tag2'], + 'meta': { + '2022-01-22': {'tags': ['tag1', 'tag2']}, + '2022-01-23': {'comment': 'This is a comment'}, + '2022-01-24': {'tags': ['tag3', 'tag4'], 'comment': 'This is a comment'}, } } - ece.update_calendar('XLON', changes) + ecx.update_calendar('XLON', changes) calendar = ec.get_calendar('XLON') @@ -1915,3 +1918,222 @@ def test_monthly_expiry_rollback_multiple_days(): pd.Timestamp('2022-08-19'): MONTHLY_EXPIRY, pd.Timestamp('2022-10-21'): MONTHLY_EXPIRY, pd.Timestamp('2022-11-18'): MONTHLY_EXPIRY})).empty + + +@pytest.mark.isolated +def test_set_tags(): + add_test_calendar_and_apply_extensions() + + import exchange_calendars as ec + import exchange_calendars_extensions.core as ecx + + ecx.set_tags("TEST", "2023-01-03", ["tag1", "tag2"]) + + c = ec.get_calendar("TEST") + + assert c.meta() == {pd.Timestamp("2023-01-03"): DayMeta(tags=["tag1", "tag2"], comment=None)} + + ecx.set_tags("TEST", "2023-01-03", None) + + c = ec.get_calendar("TEST") + + assert c.meta() == dict() + + ecx.set_tags("TEST", "2023-01-03", []) + + c = ec.get_calendar("TEST") + + assert c.meta() == dict() + + +@pytest.mark.isolated +def test_set_comment(): + add_test_calendar_and_apply_extensions() + + import exchange_calendars as ec + import exchange_calendars_extensions.core as ecx + + ecx.set_comment("TEST", "2023-01-03", "This is a comment") + + c = ec.get_calendar("TEST") + + assert c.meta() == {pd.Timestamp("2023-01-03"): DayMeta(tags=[], comment="This is a comment")} + + ecx.set_comment("TEST", "2023-01-03", None) + + c = ec.get_calendar("TEST") + + assert c.meta() == dict() + + ecx.set_comment("TEST", "2023-01-03", "") + + c = ec.get_calendar("TEST") + + assert c.meta() == dict() + + +@pytest.mark.isolated +def test_set_meta(): + add_test_calendar_and_apply_extensions() + + import exchange_calendars as ec + import exchange_calendars_extensions.core as ecx + + ecx.set_meta("TEST", "2023-01-03", {'comment': 'This is a comment'}) + ecx.set_meta("TEST", "2023-01-04", {'tags': ['tag1', 'tag2']}) + ecx.set_meta("TEST", "2023-01-05", {'tags': ['tag1', 'tag2'], 'comment': 'This is a comment'}) + ecx.set_meta("TEST", "2023-01-06", None) + + c = ec.get_calendar("TEST") + + assert c.meta() == { + pd.Timestamp("2023-01-03"): DayMeta(tags=[], comment="This is a comment"), + pd.Timestamp("2023-01-04"): DayMeta(tags=["tag1", "tag2"], comment=None), + pd.Timestamp("2023-01-05"): DayMeta(tags=["tag1", "tag2"], comment="This is a comment") + } + + ecx.set_meta("TEST", "2023-01-03", None) + ecx.set_meta("TEST", "2023-01-04", None) + ecx.set_meta("TEST", "2023-01-05", None) + + c = ec.get_calendar("TEST") + + assert c.meta() == dict() + + +@pytest.mark.isolated +def test_get_meta(): + add_test_calendar_and_apply_extensions() + + import exchange_calendars as ec + import exchange_calendars_extensions.core as ecx + + ecx.set_meta("TEST", "2023-01-03", {'comment': 'This is a comment'}) + ecx.set_meta("TEST", "2023-01-04", {'tags': ['tag1', 'tag2']}) + ecx.set_meta("TEST", "2023-01-05", {'tags': ['tag1', 'tag2'], 'comment': 'This is a comment'}) + + c = ec.get_calendar("TEST") + + assert c.meta() == { + pd.Timestamp("2023-01-03"): DayMeta(tags=[], comment="This is a comment"), + pd.Timestamp("2023-01-04"): DayMeta(tags=["tag1", "tag2"], comment=None), + pd.Timestamp("2023-01-05"): DayMeta(tags=["tag1", "tag2"], comment="This is a comment") + } + + +@pytest.mark.isolated +def test_get_meta_tz_naive(): + add_test_calendar_and_apply_extensions() + + import exchange_calendars as ec + import exchange_calendars_extensions.core as ecx + + day_1 = pd.Timestamp("2023-01-03") + day_2 = day_1 + pd.Timedelta(days=1) + day_3 = day_2 + pd.Timedelta(days=1) + + meta_1 = DayMeta(tags=[], comment="This is a comment") + meta_2 = DayMeta(tags=["tag1", "tag2"], comment=None) + meta_3 = DayMeta(tags=["tag1", "tag2"], comment="This is a comment") + + ecx.set_meta("TEST", day_1, meta_1) + ecx.set_meta("TEST", day_2, meta_2) + ecx.set_meta("TEST", day_3, meta_3) + + c = ec.get_calendar("TEST") + + # Test combinations of start and end aligned with date boundary. + + # start + assert c.meta(start=day_1 - pd.Timedelta(days=1)) == OrderedDict([(day_1, meta_1), (day_2, meta_2), (day_3, meta_3)]) + assert c.meta(start=day_1) == OrderedDict([(day_1, meta_1), (day_2, meta_2), (day_3, meta_3)]) + assert c.meta(start=day_2) == OrderedDict([(day_2, meta_2), (day_3, meta_3)]) + assert c.meta(start=day_3) == OrderedDict([(day_3, meta_3)]) + assert c.meta(start=day_3 + pd.Timedelta(days=1)) == OrderedDict() + + # end + assert c.meta(end=day_3 + pd.Timedelta(days=1)) == OrderedDict([(day_1, meta_1), (day_2, meta_2), (day_3, meta_3)]) + assert c.meta(end=day_3) == OrderedDict([(day_1, meta_1), (day_2, meta_2), (day_3, meta_3)]) + assert c.meta(end=day_2) == OrderedDict([(day_1, meta_1), (day_2, meta_2)]) + assert c.meta(end=day_1) == OrderedDict([(day_1, meta_1)]) + assert c.meta(end=day_1 - pd.Timedelta(days=1)) == OrderedDict() + + # start & end + assert c.meta(start=day_1, end=day_3) == OrderedDict([(day_1, meta_1), (day_2, meta_2), (day_3, meta_3)]) + assert c.meta(start=day_2, end=day_2) == OrderedDict([(day_2, meta_2)]) + + # Test combinations of start and end not aligned with date boundary. + + # start + assert c.meta(start=day_1 - pd.Timedelta(hours=1)) == OrderedDict([(day_1, meta_1), (day_2, meta_2), (day_3, meta_3)]) + assert c.meta(start=day_1 + pd.Timedelta(hours=1)) == OrderedDict([(day_1, meta_1), (day_2, meta_2), (day_3, meta_3)]) + assert c.meta(start=day_1 + pd.Timedelta(hours=23, minutes=59, seconds=59, milliseconds=999, microseconds=999, nanoseconds=999)) == OrderedDict([(day_1, meta_1), (day_2, meta_2), (day_3, meta_3)]) + assert c.meta(start=day_1 + pd.Timedelta(days=1) - pd.Timedelta(nanoseconds=1)) == OrderedDict([(day_1, meta_1), (day_2, meta_2), (day_3, meta_3)]) + assert c.meta(start=day_1 + pd.Timedelta(hours=24)) == OrderedDict([(day_2, meta_2), (day_3, meta_3)]) + assert c.meta(start=day_1 + pd.Timedelta(days=1)) == OrderedDict([(day_2, meta_2), (day_3, meta_3)]) + + # end + assert c.meta(end=day_3 + pd.Timedelta(hours=24)) == OrderedDict([(day_1, meta_1), (day_2, meta_2), (day_3, meta_3)]) + assert c.meta(end=day_3 + pd.Timedelta(days=1)) == OrderedDict([(day_1, meta_1), (day_2, meta_2), (day_3, meta_3)]) + assert c.meta(end=day_3 + pd.Timedelta(hours=1) - pd.Timedelta(nanoseconds=1)) == OrderedDict([(day_1, meta_1), (day_2, meta_2), (day_3, meta_3)]) + assert c.meta(end=day_3 + pd.Timedelta(nanoseconds=1)) == OrderedDict([(day_1, meta_1), (day_2, meta_2), (day_3, meta_3)]) + assert c.meta(end=day_3) == OrderedDict([(day_1, meta_1), (day_2, meta_2), (day_3, meta_3)]) + assert c.meta(end=day_3 - pd.Timedelta(nanoseconds=1)) == OrderedDict([(day_1, meta_1), (day_2, meta_2)]) + + +def test_get_meta_tz_aware(): + add_test_calendar_and_apply_extensions() + + import exchange_calendars as ec + import exchange_calendars_extensions.core as ecx + + day_1 = pd.Timestamp("2024-03-31") # Begin of DST 02:00 -> 03:00 + day_2 = pd.Timestamp("2024-09-29") # End of DST 03:00 -> 02:00 + + meta_1 = DayMeta(tags=["tag1", "tag2"], comment="This is a comment") + meta_2 = DayMeta(tags=["tag1", "tag2"], comment="This is a comment") + + ecx.set_meta("TEST", day_1, meta_1) + ecx.set_meta("TEST", day_2, meta_2) + + c = ec.get_calendar("TEST") + + assert c.tz == timezone("CET") + + assert c.meta() == OrderedDict([(day_1, meta_1), (day_2, meta_2)]) + + # Test combinations of timezone-aware start and end. + + # start + # day_1 only has 23 hours due to DST transition + + # start in timezone CET, same as the calendar + assert c.meta(start=day_1.tz_localize(tz='CET')) == OrderedDict([(day_1, meta_1), (day_2, meta_2)]) + assert c.meta(start=day_1.tz_localize(tz='CET') + pd.Timedelta(days=1) - pd.Timedelta(hours=1, nanoseconds=1)) == OrderedDict([(day_1, meta_1), (day_2, meta_2)]) + assert c.meta(start=day_1.tz_localize(tz='CET') + pd.Timedelta(hours=22, minutes=59, seconds=59, milliseconds=999, microseconds=999, nanoseconds=999)) == OrderedDict([(day_1, meta_1), (day_2, meta_2)]) + assert c.meta(start=day_1.tz_localize(tz='CET') + pd.Timedelta(days=1) - pd.Timedelta(hours=1)) == OrderedDict([(day_2, meta_2)]) + assert c.meta(start=day_1.tz_localize(tz='CET') + pd.Timedelta(hours=23)) == OrderedDict([(day_2, meta_2)]) + + # start in UTC + assert c.meta(start=day_1.tz_localize(tz='CET').tz_convert(tz='UTC')) == OrderedDict([(day_1, meta_1), (day_2, meta_2)]) + assert c.meta(start=day_1.tz_localize(tz='CET').tz_convert(tz='UTC') + pd.Timedelta(days=1) - pd.Timedelta(hours=1, nanoseconds=1)) == OrderedDict([(day_1, meta_1), (day_2, meta_2)]) + assert c.meta(start=day_1.tz_localize(tz='CET').tz_convert(tz='UTC') + pd.Timedelta(hours=22, minutes=59, seconds=59, milliseconds=999, microseconds=999, nanoseconds=999)) == OrderedDict([(day_1, meta_1), (day_2, meta_2)]) + assert c.meta(start=day_1.tz_localize(tz='CET').tz_convert(tz='UTC') + pd.Timedelta(days=1) - pd.Timedelta(hours=1)) == OrderedDict([(day_2, meta_2)]) + assert c.meta(start=day_1.tz_localize(tz='CET').tz_convert(tz='UTC') + pd.Timedelta(hours=23)) == OrderedDict([(day_2, meta_2)]) + + # end + # day_2 has 25 hours due to DST transition + + # end in timezone CET, same as the calendar + assert c.meta(end=day_2.tz_localize(tz='CET') + pd.Timedelta(hours=25)) == OrderedDict([(day_1, meta_1), (day_2, meta_2)]) + assert c.meta(end=day_2.tz_localize(tz='CET') + pd.Timedelta(hours=24, minutes=59, seconds=59, milliseconds=999, microseconds=999, nanoseconds=999)) == OrderedDict([(day_1, meta_1), (day_2, meta_2)]) + assert c.meta(end=day_2.tz_localize(tz='CET') + pd.Timedelta(days=1) + pd.Timedelta(hours=1, nanoseconds=-1)) == OrderedDict([(day_1, meta_1), (day_2, meta_2)]) + assert c.meta(end=day_2.tz_localize(tz='CET')) == OrderedDict([(day_1, meta_1), (day_2, meta_2)]) + assert c.meta(end=day_2.tz_localize(tz='CET') - pd.Timedelta(nanoseconds=1)) == OrderedDict([(day_1, meta_1)]) + + # end in UTC + assert c.meta(end=day_2.tz_localize(tz='CET').tz_convert(tz='UTC') + pd.Timedelta(hours=25)) == OrderedDict([(day_1, meta_1), (day_2, meta_2)]) + assert c.meta(end=day_2.tz_localize(tz='CET').tz_convert(tz='UTC') + pd.Timedelta(hours=24, minutes=59, seconds=59, milliseconds=999, microseconds=999, nanoseconds=999)) == OrderedDict([(day_1, meta_1), (day_2, meta_2)]) + assert c.meta(end=day_2.tz_localize(tz='CET').tz_convert(tz='UTC') + pd.Timedelta(days=1) + pd.Timedelta(hours=1, nanoseconds=-1)) == OrderedDict([(day_1, meta_1), (day_2, meta_2)]) + assert c.meta(end=day_2.tz_localize(tz='CET').tz_convert(tz='UTC')) == OrderedDict([(day_1, meta_1), (day_2, meta_2)]) + assert c.meta(end=day_2.tz_localize(tz='CET').tz_convert(tz='UTC') - pd.Timedelta(nanoseconds=1)) == OrderedDict([(day_1, meta_1)]) From fc7e65f7a9b997d618000e602ad86c6329560e9d Mon Sep 17 00:00:00 2001 From: Jens Keiner Date: Tue, 7 May 2024 13:08:35 +0200 Subject: [PATCH 13/21] Update locked dependencies. --- poetry.lock | 414 ++++++++++++++++++++++++++-------------------------- 1 file changed, 208 insertions(+), 206 deletions(-) diff --git a/poetry.lock b/poetry.lock index 36d9574..98ba1b1 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. [[package]] name = "annotated-types" @@ -11,9 +11,6 @@ files = [ {file = "annotated_types-0.6.0.tar.gz", hash = "sha256:563339e807e53ffd9c267e99fc6d9ea23eb8443c08f112651963e24e22f84a5d"}, ] -[package.dependencies] -typing-extensions = {version = ">=4.0.0", markers = "python_version < \"3.9\""} - [[package]] name = "cfgv" version = "3.4.0" @@ -38,63 +35,63 @@ files = [ [[package]] name = "coverage" -version = "7.4.1" +version = "7.5.1" description = "Code coverage measurement for Python" optional = false python-versions = ">=3.8" files = [ - {file = "coverage-7.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:077d366e724f24fc02dbfe9d946534357fda71af9764ff99d73c3c596001bbd7"}, - {file = "coverage-7.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0193657651f5399d433c92f8ae264aff31fc1d066deee4b831549526433f3f61"}, - {file = "coverage-7.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d17bbc946f52ca67adf72a5ee783cd7cd3477f8f8796f59b4974a9b59cacc9ee"}, - {file = "coverage-7.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a3277f5fa7483c927fe3a7b017b39351610265308f5267ac6d4c2b64cc1d8d25"}, - {file = "coverage-7.4.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6dceb61d40cbfcf45f51e59933c784a50846dc03211054bd76b421a713dcdf19"}, - {file = "coverage-7.4.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6008adeca04a445ea6ef31b2cbaf1d01d02986047606f7da266629afee982630"}, - {file = "coverage-7.4.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:c61f66d93d712f6e03369b6a7769233bfda880b12f417eefdd4f16d1deb2fc4c"}, - {file = "coverage-7.4.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b9bb62fac84d5f2ff523304e59e5c439955fb3b7f44e3d7b2085184db74d733b"}, - {file = "coverage-7.4.1-cp310-cp310-win32.whl", hash = "sha256:f86f368e1c7ce897bf2457b9eb61169a44e2ef797099fb5728482b8d69f3f016"}, - {file = "coverage-7.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:869b5046d41abfea3e381dd143407b0d29b8282a904a19cb908fa24d090cc018"}, - {file = "coverage-7.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b8ffb498a83d7e0305968289441914154fb0ef5d8b3157df02a90c6695978295"}, - {file = "coverage-7.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3cacfaefe6089d477264001f90f55b7881ba615953414999c46cc9713ff93c8c"}, - {file = "coverage-7.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d6850e6e36e332d5511a48a251790ddc545e16e8beaf046c03985c69ccb2676"}, - {file = "coverage-7.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:18e961aa13b6d47f758cc5879383d27b5b3f3dcd9ce8cdbfdc2571fe86feb4dd"}, - {file = "coverage-7.4.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dfd1e1b9f0898817babf840b77ce9fe655ecbe8b1b327983df485b30df8cc011"}, - {file = "coverage-7.4.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6b00e21f86598b6330f0019b40fb397e705135040dbedc2ca9a93c7441178e74"}, - {file = "coverage-7.4.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:536d609c6963c50055bab766d9951b6c394759190d03311f3e9fcf194ca909e1"}, - {file = "coverage-7.4.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:7ac8f8eb153724f84885a1374999b7e45734bf93a87d8df1e7ce2146860edef6"}, - {file = "coverage-7.4.1-cp311-cp311-win32.whl", hash = "sha256:f3771b23bb3675a06f5d885c3630b1d01ea6cac9e84a01aaf5508706dba546c5"}, - {file = "coverage-7.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:9d2f9d4cc2a53b38cabc2d6d80f7f9b7e3da26b2f53d48f05876fef7956b6968"}, - {file = "coverage-7.4.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f68ef3660677e6624c8cace943e4765545f8191313a07288a53d3da188bd8581"}, - {file = "coverage-7.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:23b27b8a698e749b61809fb637eb98ebf0e505710ec46a8aa6f1be7dc0dc43a6"}, - {file = "coverage-7.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e3424c554391dc9ef4a92ad28665756566a28fecf47308f91841f6c49288e66"}, - {file = "coverage-7.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e0860a348bf7004c812c8368d1fc7f77fe8e4c095d661a579196a9533778e156"}, - {file = "coverage-7.4.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe558371c1bdf3b8fa03e097c523fb9645b8730399c14fe7721ee9c9e2a545d3"}, - {file = "coverage-7.4.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3468cc8720402af37b6c6e7e2a9cdb9f6c16c728638a2ebc768ba1ef6f26c3a1"}, - {file = "coverage-7.4.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:02f2edb575d62172aa28fe00efe821ae31f25dc3d589055b3fb64d51e52e4ab1"}, - {file = "coverage-7.4.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:ca6e61dc52f601d1d224526360cdeab0d0712ec104a2ce6cc5ccef6ed9a233bc"}, - {file = "coverage-7.4.1-cp312-cp312-win32.whl", hash = "sha256:ca7b26a5e456a843b9b6683eada193fc1f65c761b3a473941efe5a291f604c74"}, - {file = "coverage-7.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:85ccc5fa54c2ed64bd91ed3b4a627b9cce04646a659512a051fa82a92c04a448"}, - {file = "coverage-7.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8bdb0285a0202888d19ec6b6d23d5990410decb932b709f2b0dfe216d031d218"}, - {file = "coverage-7.4.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:918440dea04521f499721c039863ef95433314b1db00ff826a02580c1f503e45"}, - {file = "coverage-7.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:379d4c7abad5afbe9d88cc31ea8ca262296480a86af945b08214eb1a556a3e4d"}, - {file = "coverage-7.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b094116f0b6155e36a304ff912f89bbb5067157aff5f94060ff20bbabdc8da06"}, - {file = "coverage-7.4.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2f5968608b1fe2a1d00d01ad1017ee27efd99b3437e08b83ded9b7af3f6f766"}, - {file = "coverage-7.4.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:10e88e7f41e6197ea0429ae18f21ff521d4f4490aa33048f6c6f94c6045a6a75"}, - {file = "coverage-7.4.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a4a3907011d39dbc3e37bdc5df0a8c93853c369039b59efa33a7b6669de04c60"}, - {file = "coverage-7.4.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6d224f0c4c9c98290a6990259073f496fcec1b5cc613eecbd22786d398ded3ad"}, - {file = "coverage-7.4.1-cp38-cp38-win32.whl", hash = "sha256:23f5881362dcb0e1a92b84b3c2809bdc90db892332daab81ad8f642d8ed55042"}, - {file = "coverage-7.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:a07f61fc452c43cd5328b392e52555f7d1952400a1ad09086c4a8addccbd138d"}, - {file = "coverage-7.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8e738a492b6221f8dcf281b67129510835461132b03024830ac0e554311a5c54"}, - {file = "coverage-7.4.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:46342fed0fff72efcda77040b14728049200cbba1279e0bf1188f1f2078c1d70"}, - {file = "coverage-7.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9641e21670c68c7e57d2053ddf6c443e4f0a6e18e547e86af3fad0795414a628"}, - {file = "coverage-7.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aeb2c2688ed93b027eb0d26aa188ada34acb22dceea256d76390eea135083950"}, - {file = "coverage-7.4.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d12c923757de24e4e2110cf8832d83a886a4cf215c6e61ed506006872b43a6d1"}, - {file = "coverage-7.4.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0491275c3b9971cdbd28a4595c2cb5838f08036bca31765bad5e17edf900b2c7"}, - {file = "coverage-7.4.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:8dfc5e195bbef80aabd81596ef52a1277ee7143fe419efc3c4d8ba2754671756"}, - {file = "coverage-7.4.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:1a78b656a4d12b0490ca72651fe4d9f5e07e3c6461063a9b6265ee45eb2bdd35"}, - {file = "coverage-7.4.1-cp39-cp39-win32.whl", hash = "sha256:f90515974b39f4dea2f27c0959688621b46d96d5a626cf9c53dbc653a895c05c"}, - {file = "coverage-7.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:64e723ca82a84053dd7bfcc986bdb34af8d9da83c521c19d6b472bc6880e191a"}, - {file = "coverage-7.4.1-pp38.pp39.pp310-none-any.whl", hash = "sha256:32a8d985462e37cfdab611a6f95b09d7c091d07668fdc26e47a725ee575fe166"}, - {file = "coverage-7.4.1.tar.gz", hash = "sha256:1ed4b95480952b1a26d863e546fa5094564aa0065e1e5f0d4d0041f293251d04"}, + {file = "coverage-7.5.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c0884920835a033b78d1c73b6d3bbcda8161a900f38a488829a83982925f6c2e"}, + {file = "coverage-7.5.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:39afcd3d4339329c5f58de48a52f6e4e50f6578dd6099961cf22228feb25f38f"}, + {file = "coverage-7.5.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a7b0ceee8147444347da6a66be737c9d78f3353b0681715b668b72e79203e4a"}, + {file = "coverage-7.5.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a9ca3f2fae0088c3c71d743d85404cec8df9be818a005ea065495bedc33da35"}, + {file = "coverage-7.5.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5fd215c0c7d7aab005221608a3c2b46f58c0285a819565887ee0b718c052aa4e"}, + {file = "coverage-7.5.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:4bf0655ab60d754491004a5efd7f9cccefcc1081a74c9ef2da4735d6ee4a6223"}, + {file = "coverage-7.5.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:61c4bf1ba021817de12b813338c9be9f0ad5b1e781b9b340a6d29fc13e7c1b5e"}, + {file = "coverage-7.5.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:db66fc317a046556a96b453a58eced5024af4582a8dbdc0c23ca4dbc0d5b3146"}, + {file = "coverage-7.5.1-cp310-cp310-win32.whl", hash = "sha256:b016ea6b959d3b9556cb401c55a37547135a587db0115635a443b2ce8f1c7228"}, + {file = "coverage-7.5.1-cp310-cp310-win_amd64.whl", hash = "sha256:df4e745a81c110e7446b1cc8131bf986157770fa405fe90e15e850aaf7619bc8"}, + {file = "coverage-7.5.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:796a79f63eca8814ca3317a1ea443645c9ff0d18b188de470ed7ccd45ae79428"}, + {file = "coverage-7.5.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4fc84a37bfd98db31beae3c2748811a3fa72bf2007ff7902f68746d9757f3746"}, + {file = "coverage-7.5.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6175d1a0559986c6ee3f7fccfc4a90ecd12ba0a383dcc2da30c2b9918d67d8a3"}, + {file = "coverage-7.5.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1fc81d5878cd6274ce971e0a3a18a8803c3fe25457165314271cf78e3aae3aa2"}, + {file = "coverage-7.5.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:556cf1a7cbc8028cb60e1ff0be806be2eded2daf8129b8811c63e2b9a6c43bca"}, + {file = "coverage-7.5.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:9981706d300c18d8b220995ad22627647be11a4276721c10911e0e9fa44c83e8"}, + {file = "coverage-7.5.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:d7fed867ee50edf1a0b4a11e8e5d0895150e572af1cd6d315d557758bfa9c057"}, + {file = "coverage-7.5.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:ef48e2707fb320c8f139424a596f5b69955a85b178f15af261bab871873bb987"}, + {file = "coverage-7.5.1-cp311-cp311-win32.whl", hash = "sha256:9314d5678dcc665330df5b69c1e726a0e49b27df0461c08ca12674bcc19ef136"}, + {file = "coverage-7.5.1-cp311-cp311-win_amd64.whl", hash = "sha256:5fa567e99765fe98f4e7d7394ce623e794d7cabb170f2ca2ac5a4174437e90dd"}, + {file = "coverage-7.5.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b6cf3764c030e5338e7f61f95bd21147963cf6aa16e09d2f74f1fa52013c1206"}, + {file = "coverage-7.5.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2ec92012fefebee89a6b9c79bc39051a6cb3891d562b9270ab10ecfdadbc0c34"}, + {file = "coverage-7.5.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:16db7f26000a07efcf6aea00316f6ac57e7d9a96501e990a36f40c965ec7a95d"}, + {file = "coverage-7.5.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:beccf7b8a10b09c4ae543582c1319c6df47d78fd732f854ac68d518ee1fb97fa"}, + {file = "coverage-7.5.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8748731ad392d736cc9ccac03c9845b13bb07d020a33423fa5b3a36521ac6e4e"}, + {file = "coverage-7.5.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7352b9161b33fd0b643ccd1f21f3a3908daaddf414f1c6cb9d3a2fd618bf2572"}, + {file = "coverage-7.5.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:7a588d39e0925f6a2bff87154752481273cdb1736270642aeb3635cb9b4cad07"}, + {file = "coverage-7.5.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:68f962d9b72ce69ea8621f57551b2fa9c70509af757ee3b8105d4f51b92b41a7"}, + {file = "coverage-7.5.1-cp312-cp312-win32.whl", hash = "sha256:f152cbf5b88aaeb836127d920dd0f5e7edff5a66f10c079157306c4343d86c19"}, + {file = "coverage-7.5.1-cp312-cp312-win_amd64.whl", hash = "sha256:5a5740d1fb60ddf268a3811bcd353de34eb56dc24e8f52a7f05ee513b2d4f596"}, + {file = "coverage-7.5.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e2213def81a50519d7cc56ed643c9e93e0247f5bbe0d1247d15fa520814a7cd7"}, + {file = "coverage-7.5.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5037f8fcc2a95b1f0e80585bd9d1ec31068a9bcb157d9750a172836e98bc7a90"}, + {file = "coverage-7.5.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c3721c2c9e4c4953a41a26c14f4cef64330392a6d2d675c8b1db3b645e31f0e"}, + {file = "coverage-7.5.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca498687ca46a62ae590253fba634a1fe9836bc56f626852fb2720f334c9e4e5"}, + {file = "coverage-7.5.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0cdcbc320b14c3e5877ee79e649677cb7d89ef588852e9583e6b24c2e5072661"}, + {file = "coverage-7.5.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:57e0204b5b745594e5bc14b9b50006da722827f0b8c776949f1135677e88d0b8"}, + {file = "coverage-7.5.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:8fe7502616b67b234482c3ce276ff26f39ffe88adca2acf0261df4b8454668b4"}, + {file = "coverage-7.5.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:9e78295f4144f9dacfed4f92935fbe1780021247c2fabf73a819b17f0ccfff8d"}, + {file = "coverage-7.5.1-cp38-cp38-win32.whl", hash = "sha256:1434e088b41594baa71188a17533083eabf5609e8e72f16ce8c186001e6b8c41"}, + {file = "coverage-7.5.1-cp38-cp38-win_amd64.whl", hash = "sha256:0646599e9b139988b63704d704af8e8df7fa4cbc4a1f33df69d97f36cb0a38de"}, + {file = "coverage-7.5.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4cc37def103a2725bc672f84bd939a6fe4522310503207aae4d56351644682f1"}, + {file = "coverage-7.5.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:fc0b4d8bfeabd25ea75e94632f5b6e047eef8adaed0c2161ada1e922e7f7cece"}, + {file = "coverage-7.5.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0d0a0f5e06881ecedfe6f3dd2f56dcb057b6dbeb3327fd32d4b12854df36bf26"}, + {file = "coverage-7.5.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9735317685ba6ec7e3754798c8871c2f49aa5e687cc794a0b1d284b2389d1bd5"}, + {file = "coverage-7.5.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d21918e9ef11edf36764b93101e2ae8cc82aa5efdc7c5a4e9c6c35a48496d601"}, + {file = "coverage-7.5.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:c3e757949f268364b96ca894b4c342b41dc6f8f8b66c37878aacef5930db61be"}, + {file = "coverage-7.5.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:79afb6197e2f7f60c4824dd4b2d4c2ec5801ceb6ba9ce5d2c3080e5660d51a4f"}, + {file = "coverage-7.5.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d1d0d98d95dd18fe29dc66808e1accf59f037d5716f86a501fc0256455219668"}, + {file = "coverage-7.5.1-cp39-cp39-win32.whl", hash = "sha256:1cc0fe9b0b3a8364093c53b0b4c0c2dd4bb23acbec4c9240b5f284095ccf7981"}, + {file = "coverage-7.5.1-cp39-cp39-win_amd64.whl", hash = "sha256:dde0070c40ea8bb3641e811c1cfbf18e265d024deff6de52c5950677a8fb1e0f"}, + {file = "coverage-7.5.1-pp38.pp39.pp310-none-any.whl", hash = "sha256:6537e7c10cc47c595828b8a8be04c72144725c383c4702703ff4e42e44577312"}, + {file = "coverage-7.5.1.tar.gz", hash = "sha256:54de9ef3a9da981f7af93eafde4ede199e0846cd819eb27c88e2b712aae9708c"}, ] [package.dependencies] @@ -116,13 +113,13 @@ files = [ [[package]] name = "exceptiongroup" -version = "1.2.0" +version = "1.2.1" description = "Backport of PEP 654 (exception groups)" optional = false python-versions = ">=3.7" files = [ - {file = "exceptiongroup-1.2.0-py3-none-any.whl", hash = "sha256:4bfd3996ac73b41e9b9628b04e079f193850720ea5945fc96a08633c66912f14"}, - {file = "exceptiongroup-1.2.0.tar.gz", hash = "sha256:91f5c769735f051a4290d52edd0858999b57e5876e9f85937691bd4c9fa3ed68"}, + {file = "exceptiongroup-1.2.1-py3-none-any.whl", hash = "sha256:5258b9ed329c5bbdd31a309f53cbfb0b155341807f6ff7606a1e801a891b29ad"}, + {file = "exceptiongroup-1.2.1.tar.gz", hash = "sha256:a4785e48b045528f5bfe627b6ad554ff32def154f42372786903b7abcfe1aa16"}, ] [package.extras] @@ -130,68 +127,69 @@ test = ["pytest (>=6)"] [[package]] name = "exchange-calendars" -version = "4.2.8" +version = "4.5.3" description = "Calendars for securities exchanges" optional = false -python-versions = "~=3.8" +python-versions = "~=3.9" files = [ - {file = "exchange_calendars-4.2.8-py3-none-any.whl", hash = "sha256:3695afd0608c6507ce3016dfcb68a1698220016a049b45d42b4dfa9ecf85a15c"}, - {file = "exchange_calendars-4.2.8.tar.gz", hash = "sha256:1598b6219a58e7be218c640f389375e39c9c12513c7db82d7591ae56f64467f9"}, + {file = "exchange_calendars-4.5.3-py3-none-any.whl", hash = "sha256:f07b8ec6056adc27813fb864d7bcbff9235fa62e9edc5c306e7c1f7e3d32d748"}, + {file = "exchange_calendars-4.5.3.tar.gz", hash = "sha256:d4f950cfe62812fc53462379dc88e0b670128d32852d40503edf5320d3097e85"}, ] [package.dependencies] korean-lunar-calendar = "*" -numpy = "*" -pandas = ">=1.1" +numpy = "<2" +pandas = ">=1.5" pyluach = "*" -python-dateutil = "*" -pytz = "*" toolz = "*" +tzdata = "*" [package.extras] dev = ["flake8", "hypothesis", "pip-tools", "pytest", "pytest-benchmark", "pytest-xdist"] [[package]] name = "exchange-calendars-extensions-api" -version = "0.2.0" +version = "0" description = "A package that defines parts of the API of the exchange-calendars-extensions package." optional = false -python-versions = ">=3.8,<4.0" -files = [ - {file = "exchange_calendars_extensions_api-0.2.0-py3-none-any.whl", hash = "sha256:acaed8a1bc76fefc5ba29ed53b6f965230bb15b0d7677f361e8252028305a413"}, - {file = "exchange_calendars_extensions_api-0.2.0.tar.gz", hash = "sha256:e8de5a84e9d5821f1b8d74584eea931354b086089cde52df78e03fe9fd8c3b2f"}, -] +python-versions = "~=3.9" +files = [] +develop = true [package.dependencies] -pandas = ">=1" +pandas = "^2" pydantic = ">=2,<3" typing-extensions = ">=4.0,<5" +[package.source] +type = "directory" +url = "../exchange_calendars_extensions_api" + [[package]] name = "filelock" -version = "3.13.1" +version = "3.14.0" description = "A platform independent file lock." optional = false python-versions = ">=3.8" files = [ - {file = "filelock-3.13.1-py3-none-any.whl", hash = "sha256:57dbda9b35157b05fb3e58ee91448612eb674172fab98ee235ccb0b5bee19a1c"}, - {file = "filelock-3.13.1.tar.gz", hash = "sha256:521f5f56c50f8426f5e03ad3b281b490a87ef15bc6c526f168290f0c7148d44e"}, + {file = "filelock-3.14.0-py3-none-any.whl", hash = "sha256:43339835842f110ca7ae60f1e1c160714c5a6afd15a2873419ab185334975c0f"}, + {file = "filelock-3.14.0.tar.gz", hash = "sha256:6ea72da3be9b8c82afd3edcf99f2fffbb5076335a5ae4d03248bb5b6c3eae78a"}, ] [package.extras] -docs = ["furo (>=2023.9.10)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.24)"] -testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)"] +docs = ["furo (>=2023.9.10)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] +testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8.0.1)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)"] typing = ["typing-extensions (>=4.8)"] [[package]] name = "identify" -version = "2.5.33" +version = "2.5.36" description = "File identification library for Python" optional = false python-versions = ">=3.8" files = [ - {file = "identify-2.5.33-py2.py3-none-any.whl", hash = "sha256:d40ce5fcd762817627670da8a7d8d8e65f24342d14539c59488dc603bf662e34"}, - {file = "identify-2.5.33.tar.gz", hash = "sha256:161558f9fe4559e1557e1bff323e8631f6a0e4837f7497767c1782832f16b62d"}, + {file = "identify-2.5.36-py2.py3-none-any.whl", hash = "sha256:37d93f380f4de590500d9dba7db359d0d3da95ffe7f9de1753faa159e71e7dfa"}, + {file = "identify-2.5.36.tar.gz", hash = "sha256:e5e00f54165f9047fbebeb4a560f9acfb8af4c88232be60a488e9b68d122745d"}, ] [package.extras] @@ -235,133 +233,148 @@ setuptools = "*" [[package]] name = "numpy" -version = "1.24.4" +version = "1.26.4" description = "Fundamental package for array computing in Python" optional = false -python-versions = ">=3.8" -files = [ - {file = "numpy-1.24.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c0bfb52d2169d58c1cdb8cc1f16989101639b34c7d3ce60ed70b19c63eba0b64"}, - {file = "numpy-1.24.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ed094d4f0c177b1b8e7aa9cba7d6ceed51c0e569a5318ac0ca9a090680a6a1b1"}, - {file = "numpy-1.24.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:79fc682a374c4a8ed08b331bef9c5f582585d1048fa6d80bc6c35bc384eee9b4"}, - {file = "numpy-1.24.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ffe43c74893dbf38c2b0a1f5428760a1a9c98285553c89e12d70a96a7f3a4d6"}, - {file = "numpy-1.24.4-cp310-cp310-win32.whl", hash = "sha256:4c21decb6ea94057331e111a5bed9a79d335658c27ce2adb580fb4d54f2ad9bc"}, - {file = "numpy-1.24.4-cp310-cp310-win_amd64.whl", hash = "sha256:b4bea75e47d9586d31e892a7401f76e909712a0fd510f58f5337bea9572c571e"}, - {file = "numpy-1.24.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f136bab9c2cfd8da131132c2cf6cc27331dd6fae65f95f69dcd4ae3c3639c810"}, - {file = "numpy-1.24.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e2926dac25b313635e4d6cf4dc4e51c8c0ebfed60b801c799ffc4c32bf3d1254"}, - {file = "numpy-1.24.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:222e40d0e2548690405b0b3c7b21d1169117391c2e82c378467ef9ab4c8f0da7"}, - {file = "numpy-1.24.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7215847ce88a85ce39baf9e89070cb860c98fdddacbaa6c0da3ffb31b3350bd5"}, - {file = "numpy-1.24.4-cp311-cp311-win32.whl", hash = "sha256:4979217d7de511a8d57f4b4b5b2b965f707768440c17cb70fbf254c4b225238d"}, - {file = "numpy-1.24.4-cp311-cp311-win_amd64.whl", hash = "sha256:b7b1fc9864d7d39e28f41d089bfd6353cb5f27ecd9905348c24187a768c79694"}, - {file = "numpy-1.24.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1452241c290f3e2a312c137a9999cdbf63f78864d63c79039bda65ee86943f61"}, - {file = "numpy-1.24.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:04640dab83f7c6c85abf9cd729c5b65f1ebd0ccf9de90b270cd61935eef0197f"}, - {file = "numpy-1.24.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5425b114831d1e77e4b5d812b69d11d962e104095a5b9c3b641a218abcc050e"}, - {file = "numpy-1.24.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd80e219fd4c71fc3699fc1dadac5dcf4fd882bfc6f7ec53d30fa197b8ee22dc"}, - {file = "numpy-1.24.4-cp38-cp38-win32.whl", hash = "sha256:4602244f345453db537be5314d3983dbf5834a9701b7723ec28923e2889e0bb2"}, - {file = "numpy-1.24.4-cp38-cp38-win_amd64.whl", hash = "sha256:692f2e0f55794943c5bfff12b3f56f99af76f902fc47487bdfe97856de51a706"}, - {file = "numpy-1.24.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2541312fbf09977f3b3ad449c4e5f4bb55d0dbf79226d7724211acc905049400"}, - {file = "numpy-1.24.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9667575fb6d13c95f1b36aca12c5ee3356bf001b714fc354eb5465ce1609e62f"}, - {file = "numpy-1.24.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f3a86ed21e4f87050382c7bc96571755193c4c1392490744ac73d660e8f564a9"}, - {file = "numpy-1.24.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d11efb4dbecbdf22508d55e48d9c8384db795e1b7b51ea735289ff96613ff74d"}, - {file = "numpy-1.24.4-cp39-cp39-win32.whl", hash = "sha256:6620c0acd41dbcb368610bb2f4d83145674040025e5536954782467100aa8835"}, - {file = "numpy-1.24.4-cp39-cp39-win_amd64.whl", hash = "sha256:befe2bf740fd8373cf56149a5c23a0f601e82869598d41f8e188a0e9869926f8"}, - {file = "numpy-1.24.4-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:31f13e25b4e304632a4619d0e0777662c2ffea99fcae2029556b17d8ff958aef"}, - {file = "numpy-1.24.4-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95f7ac6540e95bc440ad77f56e520da5bf877f87dca58bd095288dce8940532a"}, - {file = "numpy-1.24.4-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:e98f220aa76ca2a977fe435f5b04d7b3470c0a2e6312907b37ba6068f26787f2"}, - {file = "numpy-1.24.4.tar.gz", hash = "sha256:80f5e3a4e498641401868df4208b74581206afbee7cf7b8329daae82676d9463"}, +python-versions = ">=3.9" +files = [ + {file = "numpy-1.26.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9ff0f4f29c51e2803569d7a51c2304de5554655a60c5d776e35b4a41413830d0"}, + {file = "numpy-1.26.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2e4ee3380d6de9c9ec04745830fd9e2eccb3e6cf790d39d7b98ffd19b0dd754a"}, + {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d209d8969599b27ad20994c8e41936ee0964e6da07478d6c35016bc386b66ad4"}, + {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ffa75af20b44f8dba823498024771d5ac50620e6915abac414251bd971b4529f"}, + {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:62b8e4b1e28009ef2846b4c7852046736bab361f7aeadeb6a5b89ebec3c7055a"}, + {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a4abb4f9001ad2858e7ac189089c42178fcce737e4169dc61321660f1a96c7d2"}, + {file = "numpy-1.26.4-cp310-cp310-win32.whl", hash = "sha256:bfe25acf8b437eb2a8b2d49d443800a5f18508cd811fea3181723922a8a82b07"}, + {file = "numpy-1.26.4-cp310-cp310-win_amd64.whl", hash = "sha256:b97fe8060236edf3662adfc2c633f56a08ae30560c56310562cb4f95500022d5"}, + {file = "numpy-1.26.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c66707fabe114439db9068ee468c26bbdf909cac0fb58686a42a24de1760c71"}, + {file = "numpy-1.26.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:edd8b5fe47dab091176d21bb6de568acdd906d1887a4584a15a9a96a1dca06ef"}, + {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ab55401287bfec946ced39700c053796e7cc0e3acbef09993a9ad2adba6ca6e"}, + {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:666dbfb6ec68962c033a450943ded891bed2d54e6755e35e5835d63f4f6931d5"}, + {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:96ff0b2ad353d8f990b63294c8986f1ec3cb19d749234014f4e7eb0112ceba5a"}, + {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:60dedbb91afcbfdc9bc0b1f3f402804070deed7392c23eb7a7f07fa857868e8a"}, + {file = "numpy-1.26.4-cp311-cp311-win32.whl", hash = "sha256:1af303d6b2210eb850fcf03064d364652b7120803a0b872f5211f5234b399f20"}, + {file = "numpy-1.26.4-cp311-cp311-win_amd64.whl", hash = "sha256:cd25bcecc4974d09257ffcd1f098ee778f7834c3ad767fe5db785be9a4aa9cb2"}, + {file = "numpy-1.26.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b3ce300f3644fb06443ee2222c2201dd3a89ea6040541412b8fa189341847218"}, + {file = "numpy-1.26.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:03a8c78d01d9781b28a6989f6fa1bb2c4f2d51201cf99d3dd875df6fbd96b23b"}, + {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9fad7dcb1aac3c7f0584a5a8133e3a43eeb2fe127f47e3632d43d677c66c102b"}, + {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:675d61ffbfa78604709862923189bad94014bef562cc35cf61d3a07bba02a7ed"}, + {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ab47dbe5cc8210f55aa58e4805fe224dac469cde56b9f731a4c098b91917159a"}, + {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:1dda2e7b4ec9dd512f84935c5f126c8bd8b9f2fc001e9f54af255e8c5f16b0e0"}, + {file = "numpy-1.26.4-cp312-cp312-win32.whl", hash = "sha256:50193e430acfc1346175fcbdaa28ffec49947a06918b7b92130744e81e640110"}, + {file = "numpy-1.26.4-cp312-cp312-win_amd64.whl", hash = "sha256:08beddf13648eb95f8d867350f6a018a4be2e5ad54c8d8caed89ebca558b2818"}, + {file = "numpy-1.26.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7349ab0fa0c429c82442a27a9673fc802ffdb7c7775fad780226cb234965e53c"}, + {file = "numpy-1.26.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:52b8b60467cd7dd1e9ed082188b4e6bb35aa5cdd01777621a1658910745b90be"}, + {file = "numpy-1.26.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5241e0a80d808d70546c697135da2c613f30e28251ff8307eb72ba696945764"}, + {file = "numpy-1.26.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f870204a840a60da0b12273ef34f7051e98c3b5961b61b0c2c1be6dfd64fbcd3"}, + {file = "numpy-1.26.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:679b0076f67ecc0138fd2ede3a8fd196dddc2ad3254069bcb9faf9a79b1cebcd"}, + {file = "numpy-1.26.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:47711010ad8555514b434df65f7d7b076bb8261df1ca9bb78f53d3b2db02e95c"}, + {file = "numpy-1.26.4-cp39-cp39-win32.whl", hash = "sha256:a354325ee03388678242a4d7ebcd08b5c727033fcff3b2f536aea978e15ee9e6"}, + {file = "numpy-1.26.4-cp39-cp39-win_amd64.whl", hash = "sha256:3373d5d70a5fe74a2c1bb6d2cfd9609ecf686d47a2d7b1d37a8f3b6bf6003aea"}, + {file = "numpy-1.26.4-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:afedb719a9dcfc7eaf2287b839d8198e06dcd4cb5d276a3df279231138e83d30"}, + {file = "numpy-1.26.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95a7476c59002f2f6c590b9b7b998306fba6a5aa646b1e22ddfeaf8f78c3a29c"}, + {file = "numpy-1.26.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7e50d0a0cc3189f9cb0aeb3a6a6af18c16f59f004b866cd2be1c14b36134a4a0"}, + {file = "numpy-1.26.4.tar.gz", hash = "sha256:2a02aba9ed12e4ac4eb3ea9421c420301a0c6460d9830d74a9df87efa4912010"}, ] [[package]] name = "packaging" -version = "23.2" +version = "24.0" description = "Core utilities for Python packages" optional = false python-versions = ">=3.7" files = [ - {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, - {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, + {file = "packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5"}, + {file = "packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"}, ] [[package]] name = "pandas" -version = "2.0.3" +version = "2.2.2" description = "Powerful data structures for data analysis, time series, and statistics" optional = false -python-versions = ">=3.8" -files = [ - {file = "pandas-2.0.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e4c7c9f27a4185304c7caf96dc7d91bc60bc162221152de697c98eb0b2648dd8"}, - {file = "pandas-2.0.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f167beed68918d62bffb6ec64f2e1d8a7d297a038f86d4aed056b9493fca407f"}, - {file = "pandas-2.0.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ce0c6f76a0f1ba361551f3e6dceaff06bde7514a374aa43e33b588ec10420183"}, - {file = "pandas-2.0.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba619e410a21d8c387a1ea6e8a0e49bb42216474436245718d7f2e88a2f8d7c0"}, - {file = "pandas-2.0.3-cp310-cp310-win32.whl", hash = "sha256:3ef285093b4fe5058eefd756100a367f27029913760773c8bf1d2d8bebe5d210"}, - {file = "pandas-2.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:9ee1a69328d5c36c98d8e74db06f4ad518a1840e8ccb94a4ba86920986bb617e"}, - {file = "pandas-2.0.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b084b91d8d66ab19f5bb3256cbd5ea661848338301940e17f4492b2ce0801fe8"}, - {file = "pandas-2.0.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:37673e3bdf1551b95bf5d4ce372b37770f9529743d2498032439371fc7b7eb26"}, - {file = "pandas-2.0.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b9cb1e14fdb546396b7e1b923ffaeeac24e4cedd14266c3497216dd4448e4f2d"}, - {file = "pandas-2.0.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d9cd88488cceb7635aebb84809d087468eb33551097d600c6dad13602029c2df"}, - {file = "pandas-2.0.3-cp311-cp311-win32.whl", hash = "sha256:694888a81198786f0e164ee3a581df7d505024fbb1f15202fc7db88a71d84ebd"}, - {file = "pandas-2.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:6a21ab5c89dcbd57f78d0ae16630b090eec626360085a4148693def5452d8a6b"}, - {file = "pandas-2.0.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9e4da0d45e7f34c069fe4d522359df7d23badf83abc1d1cef398895822d11061"}, - {file = "pandas-2.0.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:32fca2ee1b0d93dd71d979726b12b61faa06aeb93cf77468776287f41ff8fdc5"}, - {file = "pandas-2.0.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:258d3624b3ae734490e4d63c430256e716f488c4fcb7c8e9bde2d3aa46c29089"}, - {file = "pandas-2.0.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9eae3dc34fa1aa7772dd3fc60270d13ced7346fcbcfee017d3132ec625e23bb0"}, - {file = "pandas-2.0.3-cp38-cp38-win32.whl", hash = "sha256:f3421a7afb1a43f7e38e82e844e2bca9a6d793d66c1a7f9f0ff39a795bbc5e02"}, - {file = "pandas-2.0.3-cp38-cp38-win_amd64.whl", hash = "sha256:69d7f3884c95da3a31ef82b7618af5710dba95bb885ffab339aad925c3e8ce78"}, - {file = "pandas-2.0.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5247fb1ba347c1261cbbf0fcfba4a3121fbb4029d95d9ef4dc45406620b25c8b"}, - {file = "pandas-2.0.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:81af086f4543c9d8bb128328b5d32e9986e0c84d3ee673a2ac6fb57fd14f755e"}, - {file = "pandas-2.0.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1994c789bf12a7c5098277fb43836ce090f1073858c10f9220998ac74f37c69b"}, - {file = "pandas-2.0.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5ec591c48e29226bcbb316e0c1e9423622bc7a4eaf1ef7c3c9fa1a3981f89641"}, - {file = "pandas-2.0.3-cp39-cp39-win32.whl", hash = "sha256:04dbdbaf2e4d46ca8da896e1805bc04eb85caa9a82e259e8eed00254d5e0c682"}, - {file = "pandas-2.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:1168574b036cd8b93abc746171c9b4f1b83467438a5e45909fed645cf8692dbc"}, - {file = "pandas-2.0.3.tar.gz", hash = "sha256:c02f372a88e0d17f36d3093a644c73cfc1788e876a7c4bcb4020a77512e2043c"}, +python-versions = ">=3.9" +files = [ + {file = "pandas-2.2.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:90c6fca2acf139569e74e8781709dccb6fe25940488755716d1d354d6bc58bce"}, + {file = "pandas-2.2.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c7adfc142dac335d8c1e0dcbd37eb8617eac386596eb9e1a1b77791cf2498238"}, + {file = "pandas-2.2.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4abfe0be0d7221be4f12552995e58723c7422c80a659da13ca382697de830c08"}, + {file = "pandas-2.2.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8635c16bf3d99040fdf3ca3db669a7250ddf49c55dc4aa8fe0ae0fa8d6dcc1f0"}, + {file = "pandas-2.2.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:40ae1dffb3967a52203105a077415a86044a2bea011b5f321c6aa64b379a3f51"}, + {file = "pandas-2.2.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8e5a0b00e1e56a842f922e7fae8ae4077aee4af0acb5ae3622bd4b4c30aedf99"}, + {file = "pandas-2.2.2-cp310-cp310-win_amd64.whl", hash = "sha256:ddf818e4e6c7c6f4f7c8a12709696d193976b591cc7dc50588d3d1a6b5dc8772"}, + {file = "pandas-2.2.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:696039430f7a562b74fa45f540aca068ea85fa34c244d0deee539cb6d70aa288"}, + {file = "pandas-2.2.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8e90497254aacacbc4ea6ae5e7a8cd75629d6ad2b30025a4a8b09aa4faf55151"}, + {file = "pandas-2.2.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:58b84b91b0b9f4bafac2a0ac55002280c094dfc6402402332c0913a59654ab2b"}, + {file = "pandas-2.2.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d2123dc9ad6a814bcdea0f099885276b31b24f7edf40f6cdbc0912672e22eee"}, + {file = "pandas-2.2.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:2925720037f06e89af896c70bca73459d7e6a4be96f9de79e2d440bd499fe0db"}, + {file = "pandas-2.2.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0cace394b6ea70c01ca1595f839cf193df35d1575986e484ad35c4aeae7266c1"}, + {file = "pandas-2.2.2-cp311-cp311-win_amd64.whl", hash = "sha256:873d13d177501a28b2756375d59816c365e42ed8417b41665f346289adc68d24"}, + {file = "pandas-2.2.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:9dfde2a0ddef507a631dc9dc4af6a9489d5e2e740e226ad426a05cabfbd7c8ef"}, + {file = "pandas-2.2.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:e9b79011ff7a0f4b1d6da6a61aa1aa604fb312d6647de5bad20013682d1429ce"}, + {file = "pandas-2.2.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1cb51fe389360f3b5a4d57dbd2848a5f033350336ca3b340d1c53a1fad33bcad"}, + {file = "pandas-2.2.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eee3a87076c0756de40b05c5e9a6069c035ba43e8dd71c379e68cab2c20f16ad"}, + {file = "pandas-2.2.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3e374f59e440d4ab45ca2fffde54b81ac3834cf5ae2cdfa69c90bc03bde04d76"}, + {file = "pandas-2.2.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:43498c0bdb43d55cb162cdc8c06fac328ccb5d2eabe3cadeb3529ae6f0517c32"}, + {file = "pandas-2.2.2-cp312-cp312-win_amd64.whl", hash = "sha256:d187d355ecec3629624fccb01d104da7d7f391db0311145817525281e2804d23"}, + {file = "pandas-2.2.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:0ca6377b8fca51815f382bd0b697a0814c8bda55115678cbc94c30aacbb6eff2"}, + {file = "pandas-2.2.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9057e6aa78a584bc93a13f0a9bf7e753a5e9770a30b4d758b8d5f2a62a9433cd"}, + {file = "pandas-2.2.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:001910ad31abc7bf06f49dcc903755d2f7f3a9186c0c040b827e522e9cef0863"}, + {file = "pandas-2.2.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:66b479b0bd07204e37583c191535505410daa8df638fd8e75ae1b383851fe921"}, + {file = "pandas-2.2.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:a77e9d1c386196879aa5eb712e77461aaee433e54c68cf253053a73b7e49c33a"}, + {file = "pandas-2.2.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:92fd6b027924a7e178ac202cfbe25e53368db90d56872d20ffae94b96c7acc57"}, + {file = "pandas-2.2.2-cp39-cp39-win_amd64.whl", hash = "sha256:640cef9aa381b60e296db324337a554aeeb883ead99dc8f6c18e81a93942f5f4"}, + {file = "pandas-2.2.2.tar.gz", hash = "sha256:9e79019aba43cb4fda9e4d983f8e88ca0373adbb697ae9c6c43093218de28b54"}, ] [package.dependencies] numpy = [ - {version = ">=1.20.3", markers = "python_version < \"3.10\""}, - {version = ">=1.23.2", markers = "python_version >= \"3.11\""}, - {version = ">=1.21.0", markers = "python_version >= \"3.10\" and python_version < \"3.11\""}, + {version = ">=1.22.4", markers = "python_version < \"3.11\""}, + {version = ">=1.23.2", markers = "python_version == \"3.11\""}, + {version = ">=1.26.0", markers = "python_version >= \"3.12\""}, ] python-dateutil = ">=2.8.2" pytz = ">=2020.1" -tzdata = ">=2022.1" +tzdata = ">=2022.7" [package.extras] -all = ["PyQt5 (>=5.15.1)", "SQLAlchemy (>=1.4.16)", "beautifulsoup4 (>=4.9.3)", "bottleneck (>=1.3.2)", "brotlipy (>=0.7.0)", "fastparquet (>=0.6.3)", "fsspec (>=2021.07.0)", "gcsfs (>=2021.07.0)", "html5lib (>=1.1)", "hypothesis (>=6.34.2)", "jinja2 (>=3.0.0)", "lxml (>=4.6.3)", "matplotlib (>=3.6.1)", "numba (>=0.53.1)", "numexpr (>=2.7.3)", "odfpy (>=1.4.1)", "openpyxl (>=3.0.7)", "pandas-gbq (>=0.15.0)", "psycopg2 (>=2.8.6)", "pyarrow (>=7.0.0)", "pymysql (>=1.0.2)", "pyreadstat (>=1.1.2)", "pytest (>=7.3.2)", "pytest-asyncio (>=0.17.0)", "pytest-xdist (>=2.2.0)", "python-snappy (>=0.6.0)", "pyxlsb (>=1.0.8)", "qtpy (>=2.2.0)", "s3fs (>=2021.08.0)", "scipy (>=1.7.1)", "tables (>=3.6.1)", "tabulate (>=0.8.9)", "xarray (>=0.21.0)", "xlrd (>=2.0.1)", "xlsxwriter (>=1.4.3)", "zstandard (>=0.15.2)"] -aws = ["s3fs (>=2021.08.0)"] -clipboard = ["PyQt5 (>=5.15.1)", "qtpy (>=2.2.0)"] -compression = ["brotlipy (>=0.7.0)", "python-snappy (>=0.6.0)", "zstandard (>=0.15.2)"] -computation = ["scipy (>=1.7.1)", "xarray (>=0.21.0)"] -excel = ["odfpy (>=1.4.1)", "openpyxl (>=3.0.7)", "pyxlsb (>=1.0.8)", "xlrd (>=2.0.1)", "xlsxwriter (>=1.4.3)"] -feather = ["pyarrow (>=7.0.0)"] -fss = ["fsspec (>=2021.07.0)"] -gcp = ["gcsfs (>=2021.07.0)", "pandas-gbq (>=0.15.0)"] -hdf5 = ["tables (>=3.6.1)"] -html = ["beautifulsoup4 (>=4.9.3)", "html5lib (>=1.1)", "lxml (>=4.6.3)"] -mysql = ["SQLAlchemy (>=1.4.16)", "pymysql (>=1.0.2)"] -output-formatting = ["jinja2 (>=3.0.0)", "tabulate (>=0.8.9)"] -parquet = ["pyarrow (>=7.0.0)"] -performance = ["bottleneck (>=1.3.2)", "numba (>=0.53.1)", "numexpr (>=2.7.1)"] -plot = ["matplotlib (>=3.6.1)"] -postgresql = ["SQLAlchemy (>=1.4.16)", "psycopg2 (>=2.8.6)"] -spss = ["pyreadstat (>=1.1.2)"] -sql-other = ["SQLAlchemy (>=1.4.16)"] -test = ["hypothesis (>=6.34.2)", "pytest (>=7.3.2)", "pytest-asyncio (>=0.17.0)", "pytest-xdist (>=2.2.0)"] -xml = ["lxml (>=4.6.3)"] +all = ["PyQt5 (>=5.15.9)", "SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "adbc-driver-sqlite (>=0.8.0)", "beautifulsoup4 (>=4.11.2)", "bottleneck (>=1.3.6)", "dataframe-api-compat (>=0.1.7)", "fastparquet (>=2022.12.0)", "fsspec (>=2022.11.0)", "gcsfs (>=2022.11.0)", "html5lib (>=1.1)", "hypothesis (>=6.46.1)", "jinja2 (>=3.1.2)", "lxml (>=4.9.2)", "matplotlib (>=3.6.3)", "numba (>=0.56.4)", "numexpr (>=2.8.4)", "odfpy (>=1.4.1)", "openpyxl (>=3.1.0)", "pandas-gbq (>=0.19.0)", "psycopg2 (>=2.9.6)", "pyarrow (>=10.0.1)", "pymysql (>=1.0.2)", "pyreadstat (>=1.2.0)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)", "python-calamine (>=0.1.7)", "pyxlsb (>=1.0.10)", "qtpy (>=2.3.0)", "s3fs (>=2022.11.0)", "scipy (>=1.10.0)", "tables (>=3.8.0)", "tabulate (>=0.9.0)", "xarray (>=2022.12.0)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.5)", "zstandard (>=0.19.0)"] +aws = ["s3fs (>=2022.11.0)"] +clipboard = ["PyQt5 (>=5.15.9)", "qtpy (>=2.3.0)"] +compression = ["zstandard (>=0.19.0)"] +computation = ["scipy (>=1.10.0)", "xarray (>=2022.12.0)"] +consortium-standard = ["dataframe-api-compat (>=0.1.7)"] +excel = ["odfpy (>=1.4.1)", "openpyxl (>=3.1.0)", "python-calamine (>=0.1.7)", "pyxlsb (>=1.0.10)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.5)"] +feather = ["pyarrow (>=10.0.1)"] +fss = ["fsspec (>=2022.11.0)"] +gcp = ["gcsfs (>=2022.11.0)", "pandas-gbq (>=0.19.0)"] +hdf5 = ["tables (>=3.8.0)"] +html = ["beautifulsoup4 (>=4.11.2)", "html5lib (>=1.1)", "lxml (>=4.9.2)"] +mysql = ["SQLAlchemy (>=2.0.0)", "pymysql (>=1.0.2)"] +output-formatting = ["jinja2 (>=3.1.2)", "tabulate (>=0.9.0)"] +parquet = ["pyarrow (>=10.0.1)"] +performance = ["bottleneck (>=1.3.6)", "numba (>=0.56.4)", "numexpr (>=2.8.4)"] +plot = ["matplotlib (>=3.6.3)"] +postgresql = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "psycopg2 (>=2.9.6)"] +pyarrow = ["pyarrow (>=10.0.1)"] +spss = ["pyreadstat (>=1.2.0)"] +sql-other = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "adbc-driver-sqlite (>=0.8.0)"] +test = ["hypothesis (>=6.46.1)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)"] +xml = ["lxml (>=4.9.2)"] [[package]] name = "platformdirs" -version = "4.2.0" -description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +version = "4.2.1" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." optional = false python-versions = ">=3.8" files = [ - {file = "platformdirs-4.2.0-py3-none-any.whl", hash = "sha256:0614df2a2f37e1a662acbd8e2b25b92ccf8632929bc6d43467e17fe89c75e068"}, - {file = "platformdirs-4.2.0.tar.gz", hash = "sha256:ef0cc731df711022c174543cb70a9b5bd22e5a9337c8624ef2c2ceb8ddad8768"}, + {file = "platformdirs-4.2.1-py3-none-any.whl", hash = "sha256:17d5a1161b3fd67b390023cb2d3b026bbd40abde6fdb052dfbd3a29c3ba22ee1"}, + {file = "platformdirs-4.2.1.tar.gz", hash = "sha256:031cd18d4ec63ec53e82dceaac0417d218a6863f7745dfcc9efe7793b7039bdf"}, ] [package.extras] docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"] +type = ["mypy (>=1.8)"] [[package]] name = "pluggy" @@ -580,13 +593,13 @@ dev = ["pre-commit", "pytest-asyncio", "tox"] [[package]] name = "python-dateutil" -version = "2.8.2" +version = "2.9.0.post0" description = "Extensions to the standard Python datetime module" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" files = [ - {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, - {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, ] [package.dependencies] @@ -594,13 +607,13 @@ six = ">=1.5" [[package]] name = "pytz" -version = "2023.4" +version = "2024.1" description = "World timezone definitions, modern and historical" optional = false python-versions = "*" files = [ - {file = "pytz-2023.4-py2.py3-none-any.whl", hash = "sha256:f90ef520d95e7c46951105338d918664ebfd6f1d995bd7d153127ce90efafa6a"}, - {file = "pytz-2023.4.tar.gz", hash = "sha256:31d4583c4ed539cd037956140d695e42c033a19e984bfce9964a3f7d59bc2b40"}, + {file = "pytz-2024.1-py2.py3-none-any.whl", hash = "sha256:328171f4e3623139da4983451950b28e95ac706e13f3f2630a879749e7a8b319"}, + {file = "pytz-2024.1.tar.gz", hash = "sha256:2a29735ea9c18baf14b448846bde5a48030ed267578472d8955cd0e7443a9812"}, ] [[package]] @@ -615,7 +628,6 @@ files = [ {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, - {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, @@ -623,16 +635,8 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, - {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, - {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, - {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, - {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, @@ -649,7 +653,6 @@ files = [ {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, - {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, @@ -657,7 +660,6 @@ files = [ {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, - {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, @@ -665,19 +667,19 @@ files = [ [[package]] name = "setuptools" -version = "69.0.3" +version = "69.5.1" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-69.0.3-py3-none-any.whl", hash = "sha256:385eb4edd9c9d5c17540511303e39a147ce2fc04bc55289c322b9e5904fe2c05"}, - {file = "setuptools-69.0.3.tar.gz", hash = "sha256:be1af57fc409f93647f2e8e4573a142ed38724b8cdd389706a867bb4efcf1e78"}, + {file = "setuptools-69.5.1-py3-none-any.whl", hash = "sha256:c636ac361bc47580504644275c9ad802c50415c7522212252c033bd15f301f32"}, + {file = "setuptools-69.5.1.tar.gz", hash = "sha256:6c1fccdac05a97e598fb0ae3bbed5904ccb317337a51139dcd51453611bbb987"}, ] [package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] -testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.1)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6,!=8.1.1)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] [[package]] name = "six" @@ -725,24 +727,24 @@ files = [ [[package]] name = "tzdata" -version = "2023.4" +version = "2024.1" description = "Provider of IANA time zone data" optional = false python-versions = ">=2" files = [ - {file = "tzdata-2023.4-py2.py3-none-any.whl", hash = "sha256:aa3ace4329eeacda5b7beb7ea08ece826c28d761cda36e747cfbf97996d39bf3"}, - {file = "tzdata-2023.4.tar.gz", hash = "sha256:dd54c94f294765522c77399649b4fefd95522479a664a0cec87f41bebc6148c9"}, + {file = "tzdata-2024.1-py2.py3-none-any.whl", hash = "sha256:9068bc196136463f5245e51efda838afa15aaeca9903f49050dfa2679db4d252"}, + {file = "tzdata-2024.1.tar.gz", hash = "sha256:2674120f8d891909751c38abcdfd386ac0a5a1127954fbc332af6b5ceae07efd"}, ] [[package]] name = "virtualenv" -version = "20.25.0" +version = "20.26.1" description = "Virtual Python Environment builder" optional = false python-versions = ">=3.7" files = [ - {file = "virtualenv-20.25.0-py3-none-any.whl", hash = "sha256:4238949c5ffe6876362d9c0180fc6c3a824a7b12b80604eeb8085f2ed7460de3"}, - {file = "virtualenv-20.25.0.tar.gz", hash = "sha256:bf51c0d9c7dd63ea8e44086fa1e4fb1093a31e963b86959257378aef020e1f1b"}, + {file = "virtualenv-20.26.1-py3-none-any.whl", hash = "sha256:7aa9982a728ae5892558bff6a2839c00b9ed145523ece2274fad6f414690ae75"}, + {file = "virtualenv-20.26.1.tar.gz", hash = "sha256:604bfdceaeece392802e6ae48e69cec49168b9c5f4a44e483963f9242eb0e78b"}, ] [package.dependencies] @@ -751,10 +753,10 @@ filelock = ">=3.12.2,<4" platformdirs = ">=3.9.1,<5" [package.extras] -docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"] +docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2,!=7.3)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"] test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8)", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=68)", "time-machine (>=2.10)"] [metadata] lock-version = "2.0" -python-versions = "~=3.8" -content-hash = "309a36a89b14b9a01b53be3abc2050186122152488fa49b1c4296d0593949f3b" +python-versions = "~=3.9" +content-hash = "65c0b16d21dca55c37b7734491320c8f25aab38b6716bf0878e2175222fb0b20" From d136f7822d65b1fc9114b905cf0cdfd08e83400e Mon Sep 17 00:00:00 2001 From: Jens Keiner Date: Tue, 7 May 2024 13:09:46 +0200 Subject: [PATCH 14/21] Update pre-commit hooks. --- .pre-commit-config.yaml | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 721e1cd..f1a6bbc 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,6 +1,14 @@ repos: - repo: https://github.com/astral-sh/ruff-pre-commit # Ruff version. - rev: v0.0.275 + rev: v0.3.2 hooks: + # Run the linter. - id: ruff + # Run the formatter. + - id: ruff-format + - repo: https://github.com/asottile/pyupgrade + rev: v3.15.2 + hooks: + - id: pyupgrade + args: ["--py39-plus"] From 5112c3d3898d67bbdaafe8852d873c64cfe9694e Mon Sep 17 00:00:00 2001 From: Jens Keiner Date: Tue, 7 May 2024 13:14:22 +0200 Subject: [PATCH 15/21] Apply latest pre-commit hooks. --- .../core/__init__.py | 131 +- exchange_calendars_extensions/core/holiday.py | 72 +- .../core/holiday_calendar.py | 575 +++- exchange_calendars_extensions/core/offset.py | 51 +- exchange_calendars_extensions/core/util.py | 31 +- pyproject.toml | 27 +- tests/conftest.py | 1 + tests/test_api.py | 2993 ++++++++++++----- tests/test_holiday.py | 174 +- tests/test_holiday_calendar.py | 987 ++++-- tests/test_offset.py | 187 +- tests/test_util.py | 345 +- tests/util.py | 14 +- 13 files changed, 3944 insertions(+), 1644 deletions(-) diff --git a/exchange_calendars_extensions/core/__init__.py b/exchange_calendars_extensions/core/__init__.py index 7440ada..33d5dab 100644 --- a/exchange_calendars_extensions/core/__init__.py +++ b/exchange_calendars_extensions/core/__init__.py @@ -1,7 +1,12 @@ import functools -from typing import Callable, Type, Union, Dict - -from exchange_calendars import calendar_utils, register_calendar_type, ExchangeCalendar, get_calendar_names +from typing import Callable, Union + +from exchange_calendars import ( + calendar_utils, + register_calendar_type, + ExchangeCalendar, + get_calendar_names, +) from exchange_calendars.calendar_utils import _default_calendar_factories from exchange_calendars.exchange_calendar_asex import ASEXExchangeCalendar from exchange_calendars.exchange_calendar_xams import XAMSExchangeCalendar @@ -30,29 +35,40 @@ from pydantic import validate_call, BaseModel, conint from typing_extensions import ParamSpec, Concatenate -from exchange_calendars_extensions.api.changes import (ChangeSet, ChangeSetDict, DayType, DateLike, DayPropsLike, - Tags, TimeLike, DayMeta) -from exchange_calendars_extensions.core.holiday_calendar import (extend_class, ExtendedExchangeCalendar, - ExchangeCalendarExtensions) +from exchange_calendars_extensions.api.changes import ( + ChangeSet, + ChangeSetDict, + DayType, + DateLike, + DayPropsLike, + Tags, + TimeLike, + DayMeta, +) +from exchange_calendars_extensions.core.holiday_calendar import ( + extend_class, + ExtendedExchangeCalendar, + ExchangeCalendarExtensions, +) # Dictionary that maps from exchange key to ExchangeCalendarChangeSet. Contains all changesets to apply when creating a # new calendar instance. This dictionary should only ever contain non-empty changesets. If a changeset becomes empty, # the corresponding entry should just be removed. -_changesets: Dict[str, ChangeSet] = dict() +_changesets: dict[str, ChangeSet] = dict() class ExtensionSpec(BaseModel, arbitrary_types_allowed=True): """Specifies how to derive an extended calendar class from a vanilla calendar class.""" # The base class to extend. - base: Type[ExchangeCalendar] + base: type[ExchangeCalendar] # The day of the week on which options expire. If None, expiry days are not supported. - day_of_week_expiry: Union[conint(ge=0,le=6), None] = None + day_of_week_expiry: Union[conint(ge=0, le=6), None] = None # Internal dictionary that specifies how to derive extended calendars for specific exchanges. -_extensions: Dict[str, ExtensionSpec] = { +_extensions: dict[str, ExtensionSpec] = { "ASEX": ExtensionSpec(base=ASEXExchangeCalendar, day_of_week_expiry=4), "XAMS": ExtensionSpec(base=XAMSExchangeCalendar, day_of_week_expiry=4), "XBRU": ExtensionSpec(base=XBRUExchangeCalendar, day_of_week_expiry=4), @@ -112,6 +128,7 @@ def get_changeset_fn(name: str) -> Callable[[], ChangeSet]: Callable[[], ChangeSet] The function that returns the changeset. """ + def fn() -> ChangeSet: return _changesets.get(name) @@ -127,7 +144,9 @@ def fn() -> ChangeSet: _original_classes[k] = cls # Create extended class without support for expiry days. - cls = extend_class(cls, day_of_week_expiry=None, changeset_provider=get_changeset_fn(k)) + cls = extend_class( + cls, day_of_week_expiry=None, changeset_provider=get_changeset_fn(k) + ) # Register extended class. register_calendar_type(k, cls, force=True) @@ -144,7 +163,11 @@ def fn() -> ChangeSet: _original_classes[k] = cls # Create extended class with support for expiry days. - cls = extend_class(cls, day_of_week_expiry=day_of_week_expiry, changeset_provider=get_changeset_fn(k)) + cls = extend_class( + cls, + day_of_week_expiry=day_of_week_expiry, + changeset_provider=get_changeset_fn(k), + ) # Register extended class. register_calendar_type(k, cls, force=True) @@ -174,7 +197,9 @@ def remove_extensions() -> None: _original_classes.clear() -def register_extension(name: str, cls: Type[ExchangeCalendar], day_of_week_expiry: Union[int, None] = None) -> None: +def register_extension( + name: str, cls: type[ExchangeCalendar], day_of_week_expiry: Union[int, None] = None +) -> None: """ Register an extended calendar class for a given exchange key and a given base class. @@ -208,10 +233,12 @@ def _remove_calendar_from_factory_cache(name: str): calendar_utils.global_calendar_dispatcher._factory_output_cache.pop(name, None) -P = ParamSpec('P') +P = ParamSpec("P") -def _with_changeset(f: Callable[Concatenate[ChangeSet, P], ChangeSet]) -> Callable[Concatenate[str, P], None]: +def _with_changeset( + f: Callable[Concatenate[ChangeSet, P], ChangeSet], +) -> Callable[Concatenate[str, P], None]: """ An annotation that obtains the changeset from _changesets that corresponds to the exchange key passed as the first positional argument to the wrapped function. Instead of passing the key, passes the retrieved changeset, or a newly @@ -235,6 +262,7 @@ def _with_changeset(f: Callable[Concatenate[ChangeSet, P], ChangeSet]) -> Callab Callable The wrapped function. """ + @functools.wraps(f) def wrapper(exchange: str, *args: P.args, **kwargs: P.kwargs) -> None: # Retrieve changeset for key, create new empty one, if required. @@ -286,7 +314,7 @@ def _add_day(cs: ChangeSet, date: DateLike, props: DayPropsLike) -> ChangeSet: return cs.add_day(date, props) -@validate_call(config={'arbitrary_types_allowed': True}) +@validate_call(config={"arbitrary_types_allowed": True}) def add_day(exchange: str, date: DateLike, props: DayPropsLike) -> None: """ Add a day of a given type to the given exchange calendar. @@ -332,7 +360,7 @@ def _remove_day(cs: ChangeSet, date: DateLike) -> ChangeSet: return cs.remove_day(date) -@validate_call(config={'arbitrary_types_allowed': True}) +@validate_call(config={"arbitrary_types_allowed": True}) def remove_day(exchange: str, date: DateLike) -> None: """ Remove a day of a given type from the given exchange calendar. @@ -373,7 +401,7 @@ def _set_tags(cs: ChangeSet, date: DateLike, tags: Tags) -> ChangeSet: return cs.set_tags(date, tags) -@validate_call(config={'arbitrary_types_allowed': True}) +@validate_call(config={"arbitrary_types_allowed": True}) def set_tags(exchange: str, date: DateLike, tags: Tags) -> None: """ Set tags for a given day in the given exchange calendar. @@ -416,7 +444,7 @@ def _set_comment(cs: ChangeSet, date: DateLike, comment: Union[str, None]) -> Ch return cs.set_comment(date, comment) -@validate_call(config={'arbitrary_types_allowed': True}) +@validate_call(config={"arbitrary_types_allowed": True}) def set_comment(exchange: str, date: DateLike, comment: Union[str, None]) -> None: """ Set tags for a given day in the given exchange calendar. @@ -459,7 +487,7 @@ def _set_meta(cs: ChangeSet, date: DateLike, meta: Union[DayMeta, None]) -> Chan return cs.set_meta(date, meta) -@validate_call(config={'arbitrary_types_allowed': True}) +@validate_call(config={"arbitrary_types_allowed": True}) def set_meta(exchange: str, date: DateLike, meta: Union[DayMeta, None]) -> None: """ Set metadata for a given day in the given exchange calendar. @@ -502,7 +530,7 @@ def _reset_day(cs: ChangeSet, date: DateLike, include_tags: bool) -> ChangeSet: return cs.clear_day(date, include_meta=include_tags) -@validate_call(config={'arbitrary_types_allowed': True}) +@validate_call(config={"arbitrary_types_allowed": True}) def reset_day(exchange: str, date: DateLike, include_tags: bool = False) -> None: """ Clear a day of a given type from the given exchange calendar. @@ -545,10 +573,12 @@ def add_holiday(exchange: str, date: DateLike, name: str = "Holiday") -> None: ValidationError If strict is True and the changeset for the exchange would be inconsistent after adding the day. """ - _add_day(exchange, date, {'type': DayType.HOLIDAY, 'name': name}) + _add_day(exchange, date, {"type": DayType.HOLIDAY, "name": name}) -def add_special_open(exchange: str, date: DateLike, time: TimeLike, name: str = "Special Open") -> None: +def add_special_open( + exchange: str, date: DateLike, time: TimeLike, name: str = "Special Open" +) -> None: """ Add a special open to an exchange calendar. @@ -572,10 +602,12 @@ def add_special_open(exchange: str, date: DateLike, time: TimeLike, name: str = ValidationError If strict is True and the changeset for the exchange would be inconsistent after adding the day. """ - _add_day(exchange, date, {'type': DayType.SPECIAL_OPEN, 'name': name, 'time': time}) + _add_day(exchange, date, {"type": DayType.SPECIAL_OPEN, "name": name, "time": time}) -def add_special_close(exchange: str, date: DateLike, time: TimeLike, name: str = "Special Close") -> None: +def add_special_close( + exchange: str, date: DateLike, time: TimeLike, name: str = "Special Close" +) -> None: """ Add a special close to an exchange calendar. @@ -599,10 +631,14 @@ def add_special_close(exchange: str, date: DateLike, time: TimeLike, name: str = ValidationError If strict is True and the changeset for the exchange would be inconsistent after adding the day. """ - _add_day(exchange, date, {'type': DayType.SPECIAL_CLOSE, 'name': name, 'time': time}) + _add_day( + exchange, date, {"type": DayType.SPECIAL_CLOSE, "name": name, "time": time} + ) -def add_quarterly_expiry(exchange: str, date: DateLike, name: str = "Quarterly Expiry") -> None: +def add_quarterly_expiry( + exchange: str, date: DateLike, name: str = "Quarterly Expiry" +) -> None: """ Add a quarterly expiry to an exchange calendar. @@ -624,10 +660,12 @@ def add_quarterly_expiry(exchange: str, date: DateLike, name: str = "Quarterly E ValidationError If strict is True and the changeset for the exchange would be inconsistent after adding the day. """ - _add_day(exchange, date, {'type': DayType.QUARTERLY_EXPIRY, 'name': name}) + _add_day(exchange, date, {"type": DayType.QUARTERLY_EXPIRY, "name": name}) -def add_monthly_expiry(exchange: str, date: DateLike, name: str = "Monthly Expiry") -> None: +def add_monthly_expiry( + exchange: str, date: DateLike, name: str = "Monthly Expiry" +) -> None: """ Add a monthly expiry to an exchange calendar. @@ -649,7 +687,7 @@ def add_monthly_expiry(exchange: str, date: DateLike, name: str = "Monthly Expir ValidationError If strict is True and the changeset for the exchange would be inconsistent after adding the day. """ - _add_day(exchange, date, {'type': DayType.MONTHLY_EXPIRY, 'name': name}) + _add_day(exchange, date, {"type": DayType.MONTHLY_EXPIRY, "name": name}) @_with_changeset @@ -757,16 +795,37 @@ def get_changes_for_all_calendars() -> ChangeSetDict: # Declare public names. -__all__ = ["apply_extensions", "remove_extensions", "register_extension", "extend_class", "DayType", "add_day", - "remove_day", "reset_day", "DayPropsLike", "add_holiday", "add_special_close", "add_special_open", - "add_quarterly_expiry", "add_monthly_expiry", "set_meta", "reset_calendar", "reset_all_calendars", - "update_calendar", "get_changes_for_calendar", "get_changes_for_all_calendars", "ChangeSet", - "ExtendedExchangeCalendar", "ExchangeCalendarExtensions"] +__all__ = [ + "apply_extensions", + "remove_extensions", + "register_extension", + "extend_class", + "DayType", + "add_day", + "remove_day", + "reset_day", + "DayPropsLike", + "add_holiday", + "add_special_close", + "add_special_open", + "add_quarterly_expiry", + "add_monthly_expiry", + "set_meta", + "reset_calendar", + "reset_all_calendars", + "update_calendar", + "get_changes_for_calendar", + "get_changes_for_all_calendars", + "ChangeSet", + "ExtendedExchangeCalendar", + "ExchangeCalendarExtensions", +] __version__ = None try: from importlib.metadata import version + # get version from installed package __version__ = version("exchange_calendars_extensions") del version diff --git a/exchange_calendars_extensions/core/holiday.py b/exchange_calendars_extensions/core/holiday.py index 337e405..279e7fa 100644 --- a/exchange_calendars_extensions/core/holiday.py +++ b/exchange_calendars_extensions/core/holiday.py @@ -5,18 +5,21 @@ from exchange_calendars.pandas_extensions.holiday import Holiday from pandas import Series, DatetimeIndex -from exchange_calendars_extensions.core.offset import LastDayOfMonthOffsetClasses, \ - ThirdDayOfWeekInMonthOffsetClasses +from exchange_calendars_extensions.core.offset import ( + LastDayOfMonthOffsetClasses, + ThirdDayOfWeekInMonthOffsetClasses, +) def get_monthly_expiry_holiday( - name: str, - day_of_week: int, - month: int, - observance: Union[Callable[[pd.Timestamp], pd.Timestamp], None] = None, - start_date: Union[pd.Timestamp, None] = None, - end_date: Union[pd.Timestamp, None] = None, - tz: Union[tzinfo, None] = None) -> Holiday: + name: str, + day_of_week: int, + month: int, + observance: Union[Callable[[pd.Timestamp], pd.Timestamp], None] = None, + start_date: Union[pd.Timestamp, None] = None, + end_date: Union[pd.Timestamp, None] = None, + tz: Union[tzinfo, None] = None, +) -> Holiday: """ Return a holiday that occurs yearly on the third given day of the week in the given month of the year. @@ -45,18 +48,26 @@ def get_monthly_expiry_holiday( Holiday A new Holiday object as specified. """ - return Holiday(name, month=1, day=1, - offset=ThirdDayOfWeekInMonthOffsetClasses[day_of_week][month](), - observance=observance, start_date=start_date, end_date=end_date, tz=tz) + return Holiday( + name, + month=1, + day=1, + offset=ThirdDayOfWeekInMonthOffsetClasses[day_of_week][month](), + observance=observance, + start_date=start_date, + end_date=end_date, + tz=tz, + ) def get_last_day_of_month_holiday( - name: str, - month: int, - observance: Union[Callable[[pd.Timestamp], pd.Timestamp], None] = None, - start_date: Union[pd.Timestamp, None] = None, - end_date: Union[pd.Timestamp, None] = None, - tz: Union[tzinfo, None] = None) -> Holiday: + name: str, + month: int, + observance: Union[Callable[[pd.Timestamp], pd.Timestamp], None] = None, + start_date: Union[pd.Timestamp, None] = None, + end_date: Union[pd.Timestamp, None] = None, + tz: Union[tzinfo, None] = None, +) -> Holiday: """ Return a holiday that occurs yearly on the last day of the given month of the year. @@ -82,9 +93,16 @@ def get_last_day_of_month_holiday( Holiday A new Holiday object as specified. """ - return Holiday(name, month=1, day=1, - offset=LastDayOfMonthOffsetClasses[month](), - observance=observance, start_date=start_date, end_date=end_date, tz=tz) + return Holiday( + name, + month=1, + day=1, + offset=LastDayOfMonthOffsetClasses[month](), + observance=observance, + start_date=start_date, + end_date=end_date, + tz=tz, + ) class DayOfWeekPeriodicHoliday(Holiday): @@ -98,7 +116,7 @@ def __init__( day_of_week: int, start_date: Optional[pd.Timestamp] = None, end_date: Optional[pd.Timestamp] = None, - tz: Optional[tzinfo] = None + tz: Optional[tzinfo] = None, ) -> None: """ Constructor. @@ -131,7 +149,7 @@ def __init__( start_date=start_date, end_date=end_date, days_of_week=None, - tz=tz + tz=tz, ) # Store day of week. @@ -164,7 +182,9 @@ def _dates(self, start_date, end_date) -> pd.DatetimeIndex: return pd.DatetimeIndex([]) # Get the first date larger or equal to start_date where the day of the week is the same as day_of_week. - first = start_date + pd.Timedelta(days=(self.day_of_week - start_date.dayofweek) % 7) + first = start_date + pd.Timedelta( + days=(self.day_of_week - start_date.dayofweek) % 7 + ) if first > end_date: # Empty result. @@ -179,7 +199,9 @@ def _dates(self, start_date, end_date) -> pd.DatetimeIndex: # Return the dates. return dates - def dates(self, start_date, end_date, return_name=False) -> Union[DatetimeIndex, Series]: + def dates( + self, start_date, end_date, return_name=False + ) -> Union[DatetimeIndex, Series]: # Get DateTimeIndex with the dates of the holidays. dates = self._dates(start_date, end_date) diff --git a/exchange_calendars_extensions/core/holiday_calendar.py b/exchange_calendars_extensions/core/holiday_calendar.py index 119cfba..5384171 100644 --- a/exchange_calendars_extensions/core/holiday_calendar.py +++ b/exchange_calendars_extensions/core/holiday_calendar.py @@ -1,26 +1,44 @@ import datetime from abc import ABC from collections import OrderedDict +from collections.abc import Iterable from copy import copy from dataclasses import field, dataclass from functools import reduce -from typing import Iterable, Optional, Callable, Union, Type, Protocol, List, Tuple, runtime_checkable +from typing import ( + Optional, + Callable, + Union, + Protocol, + runtime_checkable, +) import pandas as pd from exchange_calendars import ExchangeCalendar -from exchange_calendars.exchange_calendar import HolidayCalendar as ExchangeCalendarsHolidayCalendar +from exchange_calendars.exchange_calendar import ( + HolidayCalendar as ExchangeCalendarsHolidayCalendar, +) from exchange_calendars.pandas_extensions.holiday import Holiday -from exchange_calendars.pandas_extensions.holiday import Holiday as ExchangeCalendarsHoliday +from exchange_calendars.pandas_extensions.holiday import ( + Holiday as ExchangeCalendarsHoliday, +) from pandas.tseries.holiday import Holiday as PandasHoliday from pydantic import validate_call -from typing_extensions import Dict -from exchange_calendars_extensions.api.changes import ChangeSet, DayType, DayMeta, TimestampLike -from exchange_calendars_extensions.core.holiday import get_monthly_expiry_holiday, DayOfWeekPeriodicHoliday, \ - get_last_day_of_month_holiday +from exchange_calendars_extensions.api.changes import ( + ChangeSet, + DayType, + DayMeta, + TimestampLike, +) +from exchange_calendars_extensions.core.holiday import ( + get_monthly_expiry_holiday, + DayOfWeekPeriodicHoliday, + get_last_day_of_month_holiday, +) # Timdelta that represents a day minus the smallest increment of time. -ONE_DAY_MINUS_EPS = pd.Timedelta(1, 'd') - pd.Timedelta(1, 'ns') +ONE_DAY_MINUS_EPS = pd.Timedelta(1, "d") - pd.Timedelta(1, "ns") class HolidayCalendar(ExchangeCalendarsHolidayCalendar): @@ -51,7 +69,11 @@ def holidays(self, start=None, end=None, return_name=False): return holidays.drop_duplicates() -def get_conflicts(holidays_dates: List[Union[pd.Timestamp, None]], other_holidays: pd.DatetimeIndex, weekend_days: Iterable[int]) -> List[int]: +def get_conflicts( + holidays_dates: list[Union[pd.Timestamp, None]], + other_holidays: pd.DatetimeIndex, + weekend_days: Iterable[int], +) -> list[int]: """ Get the indices of holidays that coincide with holidays from the other calendar or the given weekend days. @@ -71,7 +93,16 @@ def get_conflicts(holidays_dates: List[Union[pd.Timestamp, None]], other_holiday """ # Determine the indices of holidays that coincide with holidays from the other calendar. - return [i for i in range(len(holidays_dates)) if holidays_dates[i] is not None and (holidays_dates[i] in other_holidays or holidays_dates[i].weekday() in weekend_days or holidays_dates[i] in holidays_dates[i+1:])] + return [ + i + for i in range(len(holidays_dates)) + if holidays_dates[i] is not None + and ( + holidays_dates[i] in other_holidays + or holidays_dates[i].weekday() in weekend_days + or holidays_dates[i] in holidays_dates[i + 1 :] + ) + ] # A function that takes a date and returns a date or None. @@ -111,12 +142,16 @@ def roll_one_day_same_month(d: pd.Timestamp) -> Union[pd.Timestamp, None]: class AdjustedHolidayCalendar(ExchangeCalendarsHolidayCalendar): - - def __init__(self, rules, other: ExchangeCalendarsHolidayCalendar, weekmask: str, - roll_fn: RollFn = lambda d: d - pd.Timedelta(days=1)) -> None: + def __init__( + self, + rules, + other: ExchangeCalendarsHolidayCalendar, + weekmask: str, + roll_fn: RollFn = lambda d: d - pd.Timedelta(days=1), + ) -> None: super().__init__(rules=rules) self._other = other - self._weekend_days = {d for d in range(7) if weekmask[d] == '0'} + self._weekend_days = {d for d in range(7) if weekmask[d] == "0"} self._roll_fn = roll_fn def holidays(self, start=None, end=None, return_name=False): @@ -138,21 +173,40 @@ def holidays(self, start=None, end=None, return_name=False): for i in conflicts: holidays_dates[i] = self._roll_fn(holidays_dates[i]) - conflicts = get_conflicts(holidays_dates, other_holidays, self._weekend_days) + conflicts = get_conflicts( + holidays_dates, other_holidays, self._weekend_days + ) if len(conflicts) == 0: break if return_name: # Return a series, filter out dates that are None. - return pd.Series({d: n for d, n in zip(holidays_dates, holidays.values) if d is not None and (start is None or d >= start) and (end is None or d <= end)}) + return pd.Series( + { + d: n + for d, n in zip(holidays_dates, holidays.values) + if d is not None + and (start is None or d >= start) + and (end is None or d <= end) + } + ) else: # Return index, filter out dates that are None. - return pd.DatetimeIndex([d for d in holidays_dates if d is not None and (start is None or d >= start) and (end is None or d <= end)]) - - -def get_holiday_calendar_from_timestamps(timestamps: Iterable[pd.Timestamp], - name: Optional[str] = None) -> ExchangeCalendarsHolidayCalendar: + return pd.DatetimeIndex( + [ + d + for d in holidays_dates + if d is not None + and (start is None or d >= start) + and (end is None or d <= end) + ] + ) + + +def get_holiday_calendar_from_timestamps( + timestamps: Iterable[pd.Timestamp], name: Optional[str] = None +) -> ExchangeCalendarsHolidayCalendar: """ Return a holiday calendar with holidays given by a collection of timestamps. @@ -171,14 +225,20 @@ def get_holiday_calendar_from_timestamps(timestamps: Iterable[pd.Timestamp], A new HolidayCalendar object as specified. """ # Generate list of rules, one for each timestamp. - rules = [Holiday(name, year=ts.year, month=ts.month, day=ts.day, start_date=ts, end_date=ts) for ts in - set(dict.fromkeys(timestamps))] # As of Python 3.7, dict preserves insertion order. + rules = [ + Holiday( + name, year=ts.year, month=ts.month, day=ts.day, start_date=ts, end_date=ts + ) + for ts in set(dict.fromkeys(timestamps)) + ] # As of Python 3.7, dict preserves insertion order. # Return a new HolidayCalendar with the given rules. return ExchangeCalendarsHolidayCalendar(rules=rules) -def get_holiday_calendar_from_day_of_week(day_of_week: int, name: Optional[str] = None) -> HolidayCalendar: +def get_holiday_calendar_from_day_of_week( + day_of_week: int, name: Optional[str] = None +) -> HolidayCalendar: """ Return a holiday calendar with a periodic holiday occurring on each instance of the given day of the week. @@ -196,19 +256,26 @@ def get_holiday_calendar_from_day_of_week(day_of_week: int, name: Optional[str] return ExchangeCalendarsHolidayCalendar(rules=rules) -def merge_calendars(calendars: Iterable[ExchangeCalendarsHolidayCalendar]) -> ExchangeCalendarsHolidayCalendar: +def merge_calendars( + calendars: Iterable[ExchangeCalendarsHolidayCalendar], +) -> ExchangeCalendarsHolidayCalendar: """ Return a holiday calendar with all holidays from the given calendars merged into a single HolidayCalendar. The rules of the returned calendar will be the concatenation of the rules of the given calendars. Note that rules that occur earlier take precedence in case of conflicts, i.e. rules that apply to the same date. """ - x = reduce(lambda x, y: HolidayCalendar(rules=[r for r in x.rules] + [r for r in y.rules]), calendars, - ExchangeCalendarsHolidayCalendar(rules=[])) + x = reduce( + lambda x, y: HolidayCalendar(rules=[r for r in x.rules] + [r for r in y.rules]), + calendars, + ExchangeCalendarsHolidayCalendar(rules=[]), + ) return x -def get_holidays_calendar(exchange_calendar: ExchangeCalendar) -> ExchangeCalendarsHolidayCalendar: +def get_holidays_calendar( + exchange_calendar: ExchangeCalendar, +) -> ExchangeCalendarsHolidayCalendar: """ Return a holiday calendar with all holidays, regular and ad-hoc, from the given exchange calendar merged into a single calendar. @@ -223,14 +290,20 @@ def get_holidays_calendar(exchange_calendar: ExchangeCalendar) -> ExchangeCalend ExchangeCalendarsHolidayCalendar A new HolidayCalendar with all holidays from the given EchangeCalendar. """ - holiday_calendars = [get_holiday_calendar_from_timestamps(exchange_calendar.adhoc_holidays, name='ad-hoc holiday'), - exchange_calendar.regular_holidays] + holiday_calendars = [ + get_holiday_calendar_from_timestamps( + exchange_calendar.adhoc_holidays, name="ad-hoc holiday" + ), + exchange_calendar.regular_holidays, + ] # Merge all calendars by reducing the list of calendars into one, calling the merge method on each pair. return merge_calendars(holiday_calendars) -def get_special_opens_calendar(exchange_calendar: ExchangeCalendar) -> ExchangeCalendarsHolidayCalendar: +def get_special_opens_calendar( + exchange_calendar: ExchangeCalendar, +) -> ExchangeCalendarsHolidayCalendar: """ Return a holiday calendar with all special opens, regular and ad-hoc, from the given exchange calendar merged into a single calendar. @@ -250,7 +323,9 @@ def get_special_opens_calendar(exchange_calendar: ExchangeCalendar) -> ExchangeC # Add ad-hoc special opens. for item in exchange_calendar.special_opens_adhoc: _, definition = item - holiday_calendars.append(get_holiday_calendar_from_timestamps(definition, name='ad-hoc special open')) + holiday_calendars.append( + get_holiday_calendar_from_timestamps(definition, name="ad-hoc special open") + ) # Add regular special open days. for item in exchange_calendar.special_opens: @@ -258,13 +333,17 @@ def get_special_opens_calendar(exchange_calendar: ExchangeCalendar) -> ExchangeC if isinstance(definition, ExchangeCalendarsHolidayCalendar): holiday_calendars.append(definition) elif isinstance(definition, int): - holiday_calendars.append(get_holiday_calendar_from_day_of_week(definition, name='special open')) + holiday_calendars.append( + get_holiday_calendar_from_day_of_week(definition, name="special open") + ) # Merge all calendars by reducing the list of calendars into one, calling the merge method on each pair. return merge_calendars(holiday_calendars) -def get_special_closes_calendar(exchange_calendar: ExchangeCalendar) -> ExchangeCalendarsHolidayCalendar: +def get_special_closes_calendar( + exchange_calendar: ExchangeCalendar, +) -> ExchangeCalendarsHolidayCalendar: """ Return a holiday calendar with all special closes, regular and ad-hoc, from the given exchange calendar merged into a single calendar. @@ -284,7 +363,11 @@ def get_special_closes_calendar(exchange_calendar: ExchangeCalendar) -> Exchange # Add ad-hoc special closes. for item in exchange_calendar.special_closes_adhoc: _, definition = item - holiday_calendars.append(get_holiday_calendar_from_timestamps(definition, name='ad-hoc special close')) + holiday_calendars.append( + get_holiday_calendar_from_timestamps( + definition, name="ad-hoc special close" + ) + ) # Add regular special close days. for item in exchange_calendar.special_closes: @@ -292,13 +375,17 @@ def get_special_closes_calendar(exchange_calendar: ExchangeCalendar) -> Exchange if isinstance(definition, ExchangeCalendarsHolidayCalendar): holiday_calendars.append(definition) elif isinstance(definition, int): - holiday_calendars.append(get_holiday_calendar_from_day_of_week(definition, name='special close')) + holiday_calendars.append( + get_holiday_calendar_from_day_of_week(definition, name="special close") + ) # Merge all calendars by reducing the list of calendars into one, calling the merge method on each pair. return merge_calendars(holiday_calendars) -def get_weekend_days_calendar(exchange_calendar: ExchangeCalendar) -> ExchangeCalendarsHolidayCalendar: +def get_weekend_days_calendar( + exchange_calendar: ExchangeCalendar, +) -> ExchangeCalendarsHolidayCalendar: """ Return a holiday calendar with all weekend days from the given exchange calendar as holidays. @@ -312,13 +399,18 @@ def get_weekend_days_calendar(exchange_calendar: ExchangeCalendar) -> ExchangeCa ExchangeCalendarsHolidayCalendar A new HolidayCalendar with all weekend days from the given EchangeCalendar. """ - rules = [DayOfWeekPeriodicHoliday('weekend day', day_of_week) for day_of_week, v in - enumerate(exchange_calendar.weekmask) if v == '0'] + rules = [ + DayOfWeekPeriodicHoliday("weekend day", day_of_week) + for day_of_week, v in enumerate(exchange_calendar.weekmask) + if v == "0" + ] return ExchangeCalendarsHolidayCalendar(rules=rules) -def get_monthly_expiry_rules(day_of_week: int, - observance: Optional[Callable[[pd.Timestamp], pd.Timestamp]] = None) -> List[Holiday]: +def get_monthly_expiry_rules( + day_of_week: int, + observance: Optional[Callable[[pd.Timestamp], pd.Timestamp]] = None, +) -> list[Holiday]: """ Return a list of rules for a calendar with a holiday for each month's expiry, but excluding quarterly expiry days. @@ -334,12 +426,16 @@ def get_monthly_expiry_rules(day_of_week: int, List[Holiday] A list of rules for a calendar with a holiday for each month's expiry, but excluding quarterly expiry days. """ - return [get_monthly_expiry_holiday('monthly expiry', day_of_week, month, observance) for month in - [1, 2, 4, 5, 7, 8, 10, 11]] + return [ + get_monthly_expiry_holiday("monthly expiry", day_of_week, month, observance) + for month in [1, 2, 4, 5, 7, 8, 10, 11] + ] -def get_quadruple_witching_rules(day_of_week: int, - observance: Optional[Callable[[pd.Timestamp], pd.Timestamp]] = None) -> List[Holiday]: +def get_quadruple_witching_rules( + day_of_week: int, + observance: Optional[Callable[[pd.Timestamp], pd.Timestamp]] = None, +) -> list[Holiday]: """ Return a list of rules for a calendar with a holiday for each quarterly expiry aka quadruple witching. @@ -355,11 +451,16 @@ def get_quadruple_witching_rules(day_of_week: int, List[Holiday] A list of rules for a calendar with a holiday for each quarterly expiry aka quadruple witching. """ - return [get_monthly_expiry_holiday('quarterly expiry', day_of_week, month, observance) for month in [3, 6, 9, 12]] + return [ + get_monthly_expiry_holiday("quarterly expiry", day_of_week, month, observance) + for month in [3, 6, 9, 12] + ] -def get_last_day_of_month_rules(name: Optional[str] = 'last trading day of month', observance: Optional[ - Callable[[pd.Timestamp], pd.Timestamp]] = None) -> List[Holiday]: +def get_last_day_of_month_rules( + name: Optional[str] = "last trading day of month", + observance: Optional[Callable[[pd.Timestamp], pd.Timestamp]] = None, +) -> list[Holiday]: """ Return a list of rules for a calendar with a holiday for each last trading day of the month. @@ -375,7 +476,10 @@ def get_last_day_of_month_rules(name: Optional[str] = 'last trading day of month List[Holiday] A list of rules for a calendar with a holiday for each last trading day of the month. """ - return [get_last_day_of_month_holiday(name, i, observance=observance) for i in range(1, 13)] + return [ + get_last_day_of_month_holiday(name, i, observance=observance) + for i in range(1, 13) + ] @runtime_checkable @@ -457,7 +561,9 @@ def quarterly_expiries(self) -> Union[ExchangeCalendarsHolidayCalendar, None]: ... # pragma: no cover @property - def last_trading_days_of_months(self) -> Union[ExchangeCalendarsHolidayCalendar, None]: + def last_trading_days_of_months( + self, + ) -> Union[ExchangeCalendarsHolidayCalendar, None]: """ Return a holiday calendar with a holiday for each last trading day of the month. @@ -469,7 +575,9 @@ def last_trading_days_of_months(self) -> Union[ExchangeCalendarsHolidayCalendar, ... # pragma: no cover @property - def last_regular_trading_days_of_months(self) -> Union[ExchangeCalendarsHolidayCalendar, None]: + def last_regular_trading_days_of_months( + self, + ) -> Union[ExchangeCalendarsHolidayCalendar, None]: """ Return a holiday calendar with a holiday for each last regular trading day of the month. @@ -480,8 +588,11 @@ def last_regular_trading_days_of_months(self) -> Union[ExchangeCalendarsHolidayC """ ... - def meta(self, start: Union[TimestampLike, None] = None, end: Union[TimestampLike, None] = None) -> Dict[pd.Timestamp, DayMeta]: - ... + def meta( + self, + start: Union[TimestampLike, None] = None, + end: Union[TimestampLike, None] = None, + ) -> dict[pd.Timestamp, DayMeta]: ... @dataclass @@ -491,45 +602,49 @@ class AdjustedProperties: """ # The regular holidays of the exchange calendar. - regular_holidays: List[Holiday] + regular_holidays: list[Holiday] # The ad-hoc holidays of the exchange calendar. - adhoc_holidays: List[pd.Timestamp] + adhoc_holidays: list[pd.Timestamp] # The special closes of the exchange calendar. - special_closes: List[Tuple[datetime.time, Union[List[Holiday], int]]] + special_closes: list[tuple[datetime.time, Union[list[Holiday], int]]] # The ad-hoc special closes of the exchange calendar. - adhoc_special_closes: List[Tuple[datetime.time, pd.DatetimeIndex]] + adhoc_special_closes: list[tuple[datetime.time, pd.DatetimeIndex]] # The special opens of the exchange calendar. - special_opens: List[Tuple[datetime.time, Union[List[Holiday], int]]] + special_opens: list[tuple[datetime.time, Union[list[Holiday], int]]] # The ad-hoc special opens of the exchange calendar. - adhoc_special_opens: List[Tuple[datetime.time, pd.DatetimeIndex]] + adhoc_special_opens: list[tuple[datetime.time, pd.DatetimeIndex]] # The quarterly expiry days of the exchange calendar. - quarterly_expiries: List[Holiday] = field(default_factory=list) + quarterly_expiries: list[Holiday] = field(default_factory=list) # The monthly expiry days of the exchange calendar. - monthly_expiries: List[Holiday] = field(default_factory=list) + monthly_expiries: list[Holiday] = field(default_factory=list) # The last trading days of the month of the exchange calendar. - last_trading_days_of_months: List[Holiday] = field(default_factory=list) + last_trading_days_of_months: list[Holiday] = field(default_factory=list) # The last regular trading days of the month of the exchange calendar. - last_regular_trading_days_of_months: List[Holiday] = field(default_factory=list) + last_regular_trading_days_of_months: list[Holiday] = field(default_factory=list) class ExtendedExchangeCalendar(ExchangeCalendar, ExchangeCalendarExtensions, ABC): """ Abstract base class for exchange calendars with extended functionality. """ + ... -def extend_class(cls: Type[ExchangeCalendar], day_of_week_expiry: Union[int, None] = None, - changeset_provider: Union[Callable[[], ChangeSet], None] = None) -> type: +def extend_class( + cls: type[ExchangeCalendar], + day_of_week_expiry: Union[int, None] = None, + changeset_provider: Union[Callable[[], ChangeSet], None] = None, +) -> type: """ Extend the given ExchangeCalendar class with additional properties. @@ -610,9 +725,13 @@ def is_holiday(holiday: Holiday, ts: pd.Timestamp) -> bool: """ return any([d == ts for d in holiday.dates(start_date=ts, end_date=ts)]) - def clone_holiday(holiday: Union[PandasHoliday, ExchangeCalendarsHoliday, DayOfWeekPeriodicHoliday], - start_date: Optional[pd.Timestamp] = None, end_date: Optional[pd.Timestamp] = None) -> Union[ - PandasHoliday, ExchangeCalendarsHoliday, DayOfWeekPeriodicHoliday]: + def clone_holiday( + holiday: Union[ + PandasHoliday, ExchangeCalendarsHoliday, DayOfWeekPeriodicHoliday + ], + start_date: Optional[pd.Timestamp] = None, + end_date: Optional[pd.Timestamp] = None, + ) -> Union[PandasHoliday, ExchangeCalendarsHoliday, DayOfWeekPeriodicHoliday]: """ Return a copy of the given holiday. @@ -631,25 +750,48 @@ def clone_holiday(holiday: Union[PandasHoliday, ExchangeCalendarsHoliday, DayOfW The copy of the given holiday. """ # Determine the effective start and end dates. - start_date_effective = start_date if start_date is not None else holiday.start_date + start_date_effective = ( + start_date if start_date is not None else holiday.start_date + ) end_date_effective = end_date if end_date is not None else holiday.end_date if isinstance(holiday, DayOfWeekPeriodicHoliday): - return DayOfWeekPeriodicHoliday(name=holiday.name, day_of_week=holiday.day_of_week, - start_date=start_date_effective, end_date=end_date_effective, tz=holiday.tz) + return DayOfWeekPeriodicHoliday( + name=holiday.name, + day_of_week=holiday.day_of_week, + start_date=start_date_effective, + end_date=end_date_effective, + tz=holiday.tz, + ) elif isinstance(holiday, ExchangeCalendarsHoliday): - return ExchangeCalendarsHoliday(name=holiday.name, year=holiday.year, month=holiday.month, day=holiday.day, - offset=holiday.offset, observance=holiday.observance, - start_date=start_date_effective, end_date=end_date_effective, - days_of_week=holiday.days_of_week, tz=holiday.tz) + return ExchangeCalendarsHoliday( + name=holiday.name, + year=holiday.year, + month=holiday.month, + day=holiday.day, + offset=holiday.offset, + observance=holiday.observance, + start_date=start_date_effective, + end_date=end_date_effective, + days_of_week=holiday.days_of_week, + tz=holiday.tz, + ) elif isinstance(holiday, PandasHoliday): - return PandasHoliday(name=holiday.name, year=holiday.year, month=holiday.month, day=holiday.day, - offset=holiday.offset, observance=holiday.observance, start_date=start_date_effective, - end_date=end_date_effective, days_of_week=holiday.days_of_week) + return PandasHoliday( + name=holiday.name, + year=holiday.year, + month=holiday.month, + day=holiday.day, + offset=holiday.offset, + observance=holiday.observance, + start_date=start_date_effective, + end_date=end_date_effective, + days_of_week=holiday.days_of_week, + ) else: raise NotImplementedError(f"Unsupported holiday type: {type(holiday)}") - def remove_day_from_rules(ts: pd.Timestamp, rules: List[Holiday]) -> List[Holiday]: + def remove_day_from_rules(ts: pd.Timestamp, rules: list[Holiday]) -> list[Holiday]: """ Parameters ---------- @@ -682,8 +824,12 @@ def remove_day_from_rules(ts: pd.Timestamp, rules: List[Holiday]) -> List[Holida return rules - def add_special_session(name: str, ts: pd.Timestamp, t: datetime.time, special_sessions: List[Tuple[ - datetime.time, List[Holiday]]]) -> List[Tuple[datetime.time, List[Holiday]]]: + def add_special_session( + name: str, + ts: pd.Timestamp, + t: datetime.time, + special_sessions: list[tuple[datetime.time, list[Holiday]]], + ) -> list[tuple[datetime.time, list[Holiday]]]: """ Add a special session to the given list of special sessions. @@ -726,8 +872,11 @@ def add_special_session(name: str, ts: pd.Timestamp, t: datetime.time, special_s return special_sessions - def remove_holiday(ts: pd.Timestamp, regular_holidays_rules: List[Holiday], - adhoc_holidays: List[pd.Timestamp] = []) -> Tuple[List[Holiday], List[pd.Timestamp]]: + def remove_holiday( + ts: pd.Timestamp, + regular_holidays_rules: list[Holiday], + adhoc_holidays: list[pd.Timestamp] = [], + ) -> tuple[list[Holiday], list[pd.Timestamp]]: """ Remove any holidays that coincide with ts. @@ -750,9 +899,14 @@ def remove_holiday(ts: pd.Timestamp, regular_holidays_rules: List[Holiday], adhoc_holidays = [adhoc_ts for adhoc_ts in adhoc_holidays if adhoc_ts != ts] return regular_holidays_rules, adhoc_holidays - def remove_special_session(ts: pd.Timestamp, regular_special_sessions: List[Tuple[datetime.time, List[Holiday]]], - adhoc_special_sessions: List[Tuple[datetime.time, pd.DatetimeIndex]]) -> Tuple[ - List[Tuple[datetime.time, List[Holiday]]], List[Tuple[datetime.time, pd.DatetimeIndex]]]: + def remove_special_session( + ts: pd.Timestamp, + regular_special_sessions: list[tuple[datetime.time, list[Holiday]]], + adhoc_special_sessions: list[tuple[datetime.time, pd.DatetimeIndex]], + ) -> tuple[ + list[tuple[datetime.time, list[Holiday]]], + list[tuple[datetime.time, pd.DatetimeIndex]], + ]: """ Remove any special sessions that coincide with ts. @@ -776,32 +930,48 @@ def remove_special_session(ts: pd.Timestamp, regular_special_sessions: List[Tupl # Check if the day of week corresponding to ts is the same as rules. if ts.dayofweek == rules: raise NotImplementedError( - "Removing a special session date that corresponds to a day of week rule is not supported.") + "Removing a special session date that corresponds to a day of week rule is not supported." + ) else: # List of rules. _ = remove_day_from_rules(ts, rules) # Remove any ad-hoc special sessions that coincide with ts. - adhoc_special_sessions = [(t, adhoc_ts.drop(ts, errors='ignore')) for t, adhoc_ts in adhoc_special_sessions] + adhoc_special_sessions = [ + (t, adhoc_ts.drop(ts, errors="ignore")) + for t, adhoc_ts in adhoc_special_sessions + ] # Remove empty DateTime indices. - adhoc_special_sessions = [(t, adhoc_ts) for t, adhoc_ts in adhoc_special_sessions if not adhoc_ts.empty] + adhoc_special_sessions = [ + (t, adhoc_ts) + for t, adhoc_ts in adhoc_special_sessions + if not adhoc_ts.empty + ] return regular_special_sessions, adhoc_special_sessions def __init__(self, *args, **kwargs): # Save adjusted properties. Initialize with copies of the original properties. - a = AdjustedProperties(regular_holidays=list(copy(regular_holidays_orig(self).rules)), - adhoc_holidays=list(copy(adhoc_holidays_orig(self))), - special_closes=[(t, d if isinstance(d, int) else list(copy(d.rules))) for t, d in - copy(special_closes_orig(self))], - adhoc_special_closes=list(copy(adhoc_special_closes_orig(self))), - special_opens=[(t, d if isinstance(d, int) else list(copy(d.rules))) for t, d in - copy(special_opens_orig(self))], - adhoc_special_opens=list(copy(adhoc_special_opens_orig(self)))) + a = AdjustedProperties( + regular_holidays=list(copy(regular_holidays_orig(self).rules)), + adhoc_holidays=list(copy(adhoc_holidays_orig(self))), + special_closes=[ + (t, d if isinstance(d, int) else list(copy(d.rules))) + for t, d in copy(special_closes_orig(self)) + ], + adhoc_special_closes=list(copy(adhoc_special_closes_orig(self))), + special_opens=[ + (t, d if isinstance(d, int) else list(copy(d.rules))) + for t, d in copy(special_opens_orig(self)) + ], + adhoc_special_opens=list(copy(adhoc_special_opens_orig(self))), + ) # Get changeset from provider, maybe. - changeset: Union[ChangeSet, None] = changeset_provider() if changeset_provider is not None else None + changeset: Union[ChangeSet, None] = ( + changeset_provider() if changeset_provider is not None else None + ) # Set changeset to None if it is empty. if changeset is not None and len(changeset) <= 0: @@ -810,24 +980,35 @@ def __init__(self, *args, **kwargs): if changeset is not None: # Remove all changed days from holidays, special opens, and special closes. for ts in changeset.all_days(include_meta=False): - a.regular_holidays, a.adhoc_holidays = remove_holiday(ts, a.regular_holidays, a.adhoc_holidays) - a.special_opens, a.adhoc_special_opens = remove_special_session(ts, a.special_opens, - a.adhoc_special_opens) - a.special_closes, a.adhoc_special_closes = remove_special_session(ts, a.special_closes, - a.adhoc_special_closes) + a.regular_holidays, a.adhoc_holidays = remove_holiday( + ts, a.regular_holidays, a.adhoc_holidays + ) + a.special_opens, a.adhoc_special_opens = remove_special_session( + ts, a.special_opens, a.adhoc_special_opens + ) + a.special_closes, a.adhoc_special_closes = remove_special_session( + ts, a.special_closes, a.adhoc_special_closes + ) # Add holiday, special opens, and special closes. for date, props in changeset.add.items(): if props.type == DayType.HOLIDAY: # Add the holiday. - a.regular_holidays.append(Holiday(props.name, year=date.year, month=date.month, - day=date.day)) + a.regular_holidays.append( + Holiday( + props.name, year=date.year, month=date.month, day=date.day + ) + ) elif props.type == DayType.SPECIAL_OPEN: # Add the special open. - a.special_opens = add_special_session(props.name, date, props.time, a.special_opens) + a.special_opens = add_special_session( + props.name, date, props.time, a.special_opens + ) elif props.type == DayType.SPECIAL_CLOSE: # Add the special close. - a.special_closes = add_special_session(props.name, date, props.time, a.special_closes) + a.special_closes = add_special_session( + props.name, date, props.time, a.special_closes + ) self._adjusted_properties = a @@ -839,8 +1020,16 @@ def __init__(self, *args, **kwargs): # Set up monthly and quarterly expiries. This can only be done after holidays, special opens, and special closes # have been set up. - a.monthly_expiries = get_monthly_expiry_rules(day_of_week_expiry) if day_of_week_expiry is not None else [] - a.quarterly_expiries = get_quadruple_witching_rules(day_of_week_expiry) if day_of_week_expiry is not None else [] + a.monthly_expiries = ( + get_monthly_expiry_rules(day_of_week_expiry) + if day_of_week_expiry is not None + else [] + ) + a.quarterly_expiries = ( + get_quadruple_witching_rules(day_of_week_expiry) + if day_of_week_expiry is not None + else [] + ) if changeset is not None: # Remove all changed days from monthly and quarterly expiries. @@ -852,19 +1041,29 @@ def __init__(self, *args, **kwargs): for date, props in changeset.add.items(): if props.type == DayType.MONTHLY_EXPIRY: # Add the monthly expiry. - a.monthly_expiries.append(Holiday(props.name, year=date.year, month=date.month, - day=date.day)) + a.monthly_expiries.append( + Holiday( + props.name, year=date.year, month=date.month, day=date.day + ) + ) elif props.type == DayType.QUARTERLY_EXPIRY: # Add the quarterly expiry. - a.quarterly_expiries.append(Holiday(props.name, year=date.year, month=date.month, - day=date.day)) + a.quarterly_expiries.append( + Holiday( + props.name, year=date.year, month=date.month, day=date.day + ) + ) # Set up last trading days of the month. - a.last_trading_days_of_months = get_last_day_of_month_rules('last trading day of month') + a.last_trading_days_of_months = get_last_day_of_month_rules( + "last trading day of month" + ) # Set up last regular trading days of the month. This can only be done after holidays, special opens, # special closes, monthly expiries, and quarterly expiries have been set up. - a.last_regular_trading_days_of_months = get_last_day_of_month_rules('last regular trading day of month') + a.last_regular_trading_days_of_months = get_last_day_of_month_rules( + "last regular trading day of month" + ) # Save a calendar with all holidays and another one with all holidays and special business days for later use. # These calendars are needed to generate calendars for monthly expiries, quarterly expiries, last trading days @@ -872,30 +1071,41 @@ def __init__(self, *args, **kwargs): # need to be rolled back to a previous business day if they fall on a holiday and/or special business day. self._holidays_shared = get_holidays_calendar(self) self._holidays_and_special_business_days_shared = merge_calendars( - [get_holidays_calendar(self), get_special_opens_calendar(self), get_special_closes_calendar(self)]) + [ + get_holidays_calendar(self), + get_special_opens_calendar(self), + get_special_closes_calendar(self), + ] + ) @property def regular_holidays(self) -> Union[HolidayCalendar, None]: return HolidayCalendar(rules=self._adjusted_properties.regular_holidays) @property - def adhoc_holidays(self) -> List[pd.Timestamp]: + def adhoc_holidays(self) -> list[pd.Timestamp]: return copy(self._adjusted_properties.adhoc_holidays) @property - def special_closes(self) -> List[Tuple[datetime.time, Union[HolidayCalendar, int]]]: - return [(t, rules if isinstance(rules, int) else HolidayCalendar(rules=rules)) for t, rules in self._adjusted_properties.special_closes] + def special_closes(self) -> list[tuple[datetime.time, Union[HolidayCalendar, int]]]: + return [ + (t, rules if isinstance(rules, int) else HolidayCalendar(rules=rules)) + for t, rules in self._adjusted_properties.special_closes + ] @property - def special_closes_adhoc(self) -> List[Tuple[datetime.time, pd.DatetimeIndex]]: + def special_closes_adhoc(self) -> list[tuple[datetime.time, pd.DatetimeIndex]]: return copy(self._adjusted_properties.adhoc_special_closes) @property - def special_opens(self) -> List[Tuple[datetime.time, Union[HolidayCalendar, int]]]: - return [(t, rules if isinstance(rules, int) else HolidayCalendar(rules=rules)) for t, rules in self._adjusted_properties.special_opens] + def special_opens(self) -> list[tuple[datetime.time, Union[HolidayCalendar, int]]]: + return [ + (t, rules if isinstance(rules, int) else HolidayCalendar(rules=rules)) + for t, rules in self._adjusted_properties.special_opens + ] @property - def special_opens_adhoc(self) -> List[Tuple[datetime.time, pd.DatetimeIndex]]: + def special_opens_adhoc(self) -> list[tuple[datetime.time, pd.DatetimeIndex]]: return copy(self._adjusted_properties.adhoc_special_opens) @property @@ -916,34 +1126,56 @@ def special_closes_all(self) -> Union[HolidayCalendar, None]: @property def monthly_expiries(self) -> Union[ExchangeCalendarsHolidayCalendar, None]: - return AdjustedHolidayCalendar(rules=self._adjusted_properties.monthly_expiries, - other=self._holidays_and_special_business_days_shared, weekmask=self.weekmask, - roll_fn=roll_one_day_same_month) + return AdjustedHolidayCalendar( + rules=self._adjusted_properties.monthly_expiries, + other=self._holidays_and_special_business_days_shared, + weekmask=self.weekmask, + roll_fn=roll_one_day_same_month, + ) @property def quarterly_expiries(self) -> Union[ExchangeCalendarsHolidayCalendar, None]: - return AdjustedHolidayCalendar(rules=self._adjusted_properties.quarterly_expiries, - other=self._holidays_and_special_business_days_shared, weekmask=self.weekmask, - roll_fn=roll_one_day_same_month) + return AdjustedHolidayCalendar( + rules=self._adjusted_properties.quarterly_expiries, + other=self._holidays_and_special_business_days_shared, + weekmask=self.weekmask, + roll_fn=roll_one_day_same_month, + ) @property - def last_trading_days_of_months(self) -> Union[ExchangeCalendarsHolidayCalendar, None]: - return AdjustedHolidayCalendar(rules=self._adjusted_properties.last_trading_days_of_months, - other=self._holidays_shared, weekmask=self.weekmask, - roll_fn=roll_one_day_same_month) + def last_trading_days_of_months( + self, + ) -> Union[ExchangeCalendarsHolidayCalendar, None]: + return AdjustedHolidayCalendar( + rules=self._adjusted_properties.last_trading_days_of_months, + other=self._holidays_shared, + weekmask=self.weekmask, + roll_fn=roll_one_day_same_month, + ) @property - def last_regular_trading_days_of_months(self) -> Union[ExchangeCalendarsHolidayCalendar, None]: - return AdjustedHolidayCalendar(rules=self._adjusted_properties.last_regular_trading_days_of_months, - other=self._holidays_and_special_business_days_shared, weekmask=self.weekmask, - roll_fn=roll_one_day_same_month) - - @validate_call(config={'arbitrary_types_allowed': True}) - def meta(self, start: Union[TimestampLike, None] = None, end: Union[TimestampLike, None] = None) -> Dict[pd.Timestamp, DayMeta]: + def last_regular_trading_days_of_months( + self, + ) -> Union[ExchangeCalendarsHolidayCalendar, None]: + return AdjustedHolidayCalendar( + rules=self._adjusted_properties.last_regular_trading_days_of_months, + other=self._holidays_and_special_business_days_shared, + weekmask=self.weekmask, + roll_fn=roll_one_day_same_month, + ) + + @validate_call(config={"arbitrary_types_allowed": True}) + def meta( + self, + start: Union[TimestampLike, None] = None, + end: Union[TimestampLike, None] = None, + ) -> dict[pd.Timestamp, DayMeta]: # Check that when start and end are both given, they are both timezone-aware or both timezone-naive. if start and end: if bool(start.tz) != bool(end.tz): - raise ValueError("start and end must both be timezone-aware or both timezone-naive.") + raise ValueError( + "start and end must both be timezone-aware or both timezone-naive." + ) if start > end: raise ValueError("start must be less than or equal to end.") @@ -959,28 +1191,49 @@ def meta(self, start: Union[TimestampLike, None] = None, end: Union[TimestampLik # to (almost) a calendar day. The same may not be true for timezone-aware timestamps when the period includes # e.g. a DST transition. if tz: - return OrderedDict([(k, v) for k, v in self._meta.items() if (start is None or (k + ONE_DAY_MINUS_EPS).tz_localize(tz=self.tz) >= start) and (end is None or (k.tz_localize(tz=self.tz)) <= end)]) + return OrderedDict( + [ + (k, v) + for k, v in self._meta.items() + if ( + start is None + or (k + ONE_DAY_MINUS_EPS).tz_localize(tz=self.tz) >= start + ) + and (end is None or (k.tz_localize(tz=self.tz)) <= end) + ] + ) else: - return OrderedDict([(k, v) for k, v in self._meta.items() if (start is None or (k + ONE_DAY_MINUS_EPS) >= start) and (end is None or k <= end)]) + return OrderedDict( + [ + (k, v) + for k, v in self._meta.items() + if (start is None or (k + ONE_DAY_MINUS_EPS) >= start) + and (end is None or k <= end) + ] + ) # Use type to create a new class. - extended = type(cls.__name__ + "Extended", (cls, ExtendedExchangeCalendar), { - "__init__": __init__, - "regular_holidays": regular_holidays, - "adhoc_holidays": adhoc_holidays, - "special_closes": special_closes, - "special_closes_adhoc": special_closes_adhoc, - "special_opens": special_opens, - "special_opens_adhoc": special_opens_adhoc, - "weekend_days": weekend_days, - "holidays_all": holidays_all, - "special_opens_all": special_opens_all, - "special_closes_all": special_closes_all, - "monthly_expiries": monthly_expiries, - "quarterly_expiries": quarterly_expiries, - "last_trading_days_of_months": last_trading_days_of_months, - "last_regular_trading_days_of_months": last_regular_trading_days_of_months, - 'meta': meta, - }) + extended = type( + cls.__name__ + "Extended", + (cls, ExtendedExchangeCalendar), + { + "__init__": __init__, + "regular_holidays": regular_holidays, + "adhoc_holidays": adhoc_holidays, + "special_closes": special_closes, + "special_closes_adhoc": special_closes_adhoc, + "special_opens": special_opens, + "special_opens_adhoc": special_opens_adhoc, + "weekend_days": weekend_days, + "holidays_all": holidays_all, + "special_opens_all": special_opens_all, + "special_closes_all": special_closes_all, + "monthly_expiries": monthly_expiries, + "quarterly_expiries": quarterly_expiries, + "last_trading_days_of_months": last_trading_days_of_months, + "last_regular_trading_days_of_months": last_regular_trading_days_of_months, + "meta": meta, + }, + ) return extended diff --git a/exchange_calendars_extensions/core/offset.py b/exchange_calendars_extensions/core/offset.py index a2f6cbc..faf46eb 100644 --- a/exchange_calendars_extensions/core/offset.py +++ b/exchange_calendars_extensions/core/offset.py @@ -1,17 +1,19 @@ from abc import ABC, abstractmethod from datetime import datetime, date -from typing import Type import pandas as pd from pandas._libs.tslibs import localize_pydatetime from pandas._libs.tslibs.offsets import Easter, apply_wraps -from exchange_calendars_extensions.core.util import get_month_name, get_day_of_week_name, third_day_of_week_in_month, \ - last_day_in_month +from exchange_calendars_extensions.core.util import ( + get_month_name, + get_day_of_week_name, + third_day_of_week_in_month, + last_day_in_month, +) class AbstractHolidayOffset(Easter, ABC): - @staticmethod def _is_normalized(dt): if dt.hour != 0 or dt.minute != 0 or dt.second != 0 or dt.microsecond != 0: @@ -63,11 +65,13 @@ def is_on_offset(self, dt): return date(dt.year, dt.month, dt.day) == self.holiday(dt.year).to_pydate() -def get_third_day_of_week_in_month_offset_class(day_of_week: int, month: int) -> Type[AbstractHolidayOffset]: +def get_third_day_of_week_in_month_offset_class( + day_of_week: int, month: int +) -> type[AbstractHolidayOffset]: """ Return a new class that represents an offset that, when applied to the first day of a year, results in the third given day of the week in the given month. - + For example, to get the offset for the third Friday in June, call this function with day_of_week=4 and month=6. On many exchanges, this will be the quadruple witching day for the second quarter of the year. @@ -97,9 +101,13 @@ def holiday(self, year) -> date: month_name = get_month_name(month) # Create the new class. - offset = type(f"MonthlyExpiry{month_name}{day_of_week_name}Offset", (AbstractHolidayOffset,), { - "holiday": holiday, - }) + offset = type( + f"MonthlyExpiry{month_name}{day_of_week_name}Offset", + (AbstractHolidayOffset,), + { + "holiday": holiday, + }, + ) # Return the new class. return offset @@ -118,10 +126,16 @@ def holiday(self, year) -> date: # December are also called quadruple witching. # # Currently, includes cases for Monday to Friday which should cover all real-world scenarios. -ThirdDayOfWeekInMonthOffsetClasses = {day_of_week: {month: get_third_day_of_week_in_month_offset_class(day_of_week, month) for month in range(1, 13)} for day_of_week in range(5)} +ThirdDayOfWeekInMonthOffsetClasses = { + day_of_week: { + month: get_third_day_of_week_in_month_offset_class(day_of_week, month) + for month in range(1, 13) + } + for day_of_week in range(5) +} -def get_last_day_of_month_offset_class(month: int) -> Type[AbstractHolidayOffset]: +def get_last_day_of_month_offset_class(month: int) -> type[AbstractHolidayOffset]: """ Return a new class that represents an offset that, when applied to the first day of a year, results in the last day of the given month. @@ -136,6 +150,7 @@ def get_last_day_of_month_offset_class(month: int) -> Type[AbstractHolidayOffset Type[AbstractHolidayOffset] A new class that represents the offset. """ + def holiday(self, year) -> date: """ Return a function that returns the last day of the month for a given year. @@ -146,9 +161,13 @@ def holiday(self, year) -> date: month_name = get_month_name(month) # Create the new class. - offset = type(f"LastDayOfMonth{month_name}Offset", (AbstractHolidayOffset,), { - "holiday": holiday, - }) + offset = type( + f"LastDayOfMonth{month_name}Offset", + (AbstractHolidayOffset,), + { + "holiday": holiday, + }, + ) # Return the new class. return offset @@ -156,4 +175,6 @@ def holiday(self, year) -> date: # A dictionary that maps month to corresponding offset class as returned by get_last_day_of_month_offset_class. Used # as an internal cache to avoid unnecessarily creating classes with the same parameters. -LastDayOfMonthOffsetClasses = {month: get_last_day_of_month_offset_class(month) for month in range(1, 13)} +LastDayOfMonthOffsetClasses = { + month: get_last_day_of_month_offset_class(month) for month in range(1, 13) +} diff --git a/exchange_calendars_extensions/core/util.py b/exchange_calendars_extensions/core/util.py index cf71136..4b83c56 100644 --- a/exchange_calendars_extensions/core/util.py +++ b/exchange_calendars_extensions/core/util.py @@ -19,9 +19,20 @@ def get_month_name(month: int) -> str: if month < 1 or month > 12: raise ValueError("Month must be between 1 and 12.") - month_name = \ - ["January", "February", "March", "April", "May", "June", "July", "August", "September", "October", "November", - "December"][month - 1] + month_name = [ + "January", + "February", + "March", + "April", + "May", + "June", + "July", + "August", + "September", + "October", + "November", + "December", + ][month - 1] return month_name @@ -43,7 +54,15 @@ def get_day_of_week_name(day_of_week: int) -> str: if day_of_week < 0 or day_of_week > 6: raise ValueError("Day of week must be between 0 and 6.") - day_of_week_name = ["Monday", "Tuesday", "Wednesday", "Thursday", "Friday", "Saturday", "Sunday"][day_of_week] + day_of_week_name = [ + "Monday", + "Tuesday", + "Wednesday", + "Thursday", + "Friday", + "Saturday", + "Sunday", + ][day_of_week] return day_of_week_name @@ -95,4 +114,6 @@ def last_day_in_month(month: int, year: int) -> date: datetime.date the datetime.date representing the last day in the given month. """ - return (date(year, month, 1) + timedelta(days=32)).replace(day=1) - timedelta(days=1) + return (date(year, month, 1) + timedelta(days=32)).replace(day=1) - timedelta( + days=1 + ) diff --git a/pyproject.toml b/pyproject.toml index 4f238b5..251d2ea 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -48,14 +48,6 @@ pre-commit = ">=3.3.3,<3.6.0" addopts = "--cov=exchange_calendars_extensions --cov-report=term-missing" [tool.ruff] -# Enable pycodestyle (`E`) and Pyflakes (`F`) codes by default. -select = ["E", "F"] -ignore = ["E501"] - -# Allow autofix for all enabled rules (when `--fix`) is provided. -fixable = ["A", "B", "C", "D", "E", "F", "G", "I", "N", "Q", "S", "T", "W", "ANN", "ARG", "BLE", "COM", "DJ", "DTZ", "EM", "ERA", "EXE", "FBT", "ICN", "INP", "ISC", "NPY", "PD", "PGH", "PIE", "PL", "PT", "PTH", "PYI", "RET", "RSE", "RUF", "SIM", "SLF", "TCH", "TID", "TRY", "UP", "YTT"] -unfixable = [] - # Exclude a variety of commonly ignored directories. exclude = [ ".bzr", @@ -84,12 +76,21 @@ exclude = [ # Same as Black. line-length = 88 -# Allow unused variables when underscore-prefixed. -dummy-variable-rgx = "^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$" - # Assume Python 3.9. target-version = "py39" -[tool.ruff.mccabe] + +[lint] +# Enable pycodestyle (`E`) and Pyflakes (`F`) codes by default. +select = ["E", "F"] +ignore = ["E501"] + +# Allow autofix for all enabled rules (when `--fix`) is provided. +fixable = ["A", "B", "C", "D", "E", "F", "G", "I", "N", "Q", "S", "T", "W", "ANN", "ARG", "BLE", "COM", "DJ", "DTZ", "EM", "ERA", "EXE", "FBT", "ICN", "INP", "ISC", "NPY", "PD", "PGH", "PIE", "PL", "PT", "PTH", "PYI", "RET", "RSE", "RUF", "SIM", "SLF", "TCH", "TID", "TRY", "UP", "YTT"] +unfixable = [] + +# Allow unused variables when underscore-prefixed. +dummy-variable-rgx = "^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$" + # Unlike Flake8, default to a complexity level of 10. -max-complexity = 10 +mccabe = 10 \ No newline at end of file diff --git a/tests/conftest.py b/tests/conftest.py index 4d4f76c..94ca7eb 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -21,6 +21,7 @@ def run_test_in_separate_process(test_function: Callable) -> Callable: A new function that, when called with some arguments, runs the given test function with those arguments in a separate process and returns the result. """ + def wrapper(*args, **kwargs): with multiprocessing.Pool(1) as pool: result = pool.apply(test_function, args, kwargs) diff --git a/tests/test_api.py b/tests/test_api.py index f6ccbbe..55a7775 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -1,7 +1,8 @@ import datetime from collections import OrderedDict +from collections.abc import Iterable from datetime import time -from typing import Optional, Tuple, Iterable, Union +from typing import Optional, Union import pandas as pd import pytest @@ -12,50 +13,52 @@ from exchange_calendars_extensions.api.changes import DayMeta -HOLIDAY_0 = 'Holiday 0' -SPECIAL_OPEN_0 = 'Special Open 0' -SPECIAL_CLOSE_0 = 'Special Close 0' -AD_HOC_HOLIDAY = 'ad-hoc holiday' -AD_HOC_SPECIAL_OPEN = 'ad-hoc special open' -AD_HOC_SPECIAL_CLOSE = 'ad-hoc special close' -WEEKEND_DAY = 'weekend day' -QUARTERLY_EXPIRY = 'quarterly expiry' -MONTHLY_EXPIRY = 'monthly expiry' -LAST_TRADING_DAY_OF_MONTH = 'last trading day of month' -LAST_REGULAR_TRADING_DAY_OF_MONTH = 'last regular trading day of month' -ADDED_HOLIDAY = 'Added holiday' -ADDED_SPECIAL_OPEN = 'Added Special Open' -ADDED_SPECIAL_CLOSE = 'Added Special Close' -INSERTED_HOLIDAY = 'Inserted Holiday' +HOLIDAY_0 = "Holiday 0" +SPECIAL_OPEN_0 = "Special Open 0" +SPECIAL_CLOSE_0 = "Special Close 0" +AD_HOC_HOLIDAY = "ad-hoc holiday" +AD_HOC_SPECIAL_OPEN = "ad-hoc special open" +AD_HOC_SPECIAL_CLOSE = "ad-hoc special close" +WEEKEND_DAY = "weekend day" +QUARTERLY_EXPIRY = "quarterly expiry" +MONTHLY_EXPIRY = "monthly expiry" +LAST_TRADING_DAY_OF_MONTH = "last trading day of month" +LAST_REGULAR_TRADING_DAY_OF_MONTH = "last regular trading day of month" +ADDED_HOLIDAY = "Added holiday" +ADDED_SPECIAL_OPEN = "Added Special Open" +ADDED_SPECIAL_CLOSE = "Added Special Close" +INSERTED_HOLIDAY = "Inserted Holiday" def apply_extensions(): - """ Apply the extensions to the exchange_calendars module. """ + """Apply the extensions to the exchange_calendars module.""" import exchange_calendars_extensions.core as ecx + ecx.apply_extensions() -def add_test_calendar_and_apply_extensions(holidays: Optional[Iterable[pd.Timestamp]] = (pd.Timestamp("2023-01-01"),), - adhoc_holidays: Optional[Iterable[pd.Timestamp]] = ( - pd.Timestamp("2023-02-01"),), - regular_special_close: Optional[time] = time(14, 00), - special_closes: Optional[ - Iterable[Tuple[datetime.time, Iterable[pd.Timestamp]]]] = ( - (time(14, 00), (pd.Timestamp("2023-03-01"),)),), - adhoc_special_closes: Optional[Iterable[ - Tuple[datetime.time, Union[pd.Timestamp, Iterable[pd.Timestamp]]]]] = ( - (time(14, 00), pd.Timestamp("2023-04-03")),), - regular_special_open: Optional[time] = time(11, 00), - special_opens: Optional[ - Iterable[Tuple[datetime.time, Iterable[pd.Timestamp]]]] = ( - (time(11, 00), (pd.Timestamp("2023-05-01"),)),), - adhoc_special_opens: Optional[Iterable[ - Tuple[datetime.time, Union[pd.Timestamp, Iterable[pd.Timestamp]]]]] = ( - (time(11, 00), pd.Timestamp("2023-06-01")),), - weekmask: Optional[str] = "1111100", - day_of_week_expiry: Optional[int] = 4): +def add_test_calendar_and_apply_extensions( + holidays: Optional[Iterable[pd.Timestamp]] = (pd.Timestamp("2023-01-01"),), + adhoc_holidays: Optional[Iterable[pd.Timestamp]] = (pd.Timestamp("2023-02-01"),), + regular_special_close: Optional[time] = time(14, 00), + special_closes: Optional[Iterable[tuple[datetime.time, Iterable[pd.Timestamp]]]] = ( + (time(14, 00), (pd.Timestamp("2023-03-01"),)), + ), + adhoc_special_closes: Optional[ + Iterable[tuple[datetime.time, Union[pd.Timestamp, Iterable[pd.Timestamp]]]] + ] = ((time(14, 00), pd.Timestamp("2023-04-03")),), + regular_special_open: Optional[time] = time(11, 00), + special_opens: Optional[Iterable[tuple[datetime.time, Iterable[pd.Timestamp]]]] = ( + (time(11, 00), (pd.Timestamp("2023-05-01"),)), + ), + adhoc_special_opens: Optional[ + Iterable[tuple[datetime.time, Union[pd.Timestamp, Iterable[pd.Timestamp]]]] + ] = ((time(11, 00), pd.Timestamp("2023-06-01")),), + weekmask: Optional[str] = "1111100", + day_of_week_expiry: Optional[int] = 4, +): def ensure_list(obj): - """ Check if an object is iterable.""" + """Check if an object is iterable.""" try: iter(obj) except Exception: @@ -86,8 +89,14 @@ class TestCalendar(ec.ExchangeCalendar): # Holidays. @property def regular_holidays(self): - return HolidayCalendar([Holiday(name=f"Holiday {i}", month=ts.month, day=ts.day) for i, ts in - enumerate(holidays)] if holidays else []) + return HolidayCalendar( + [ + Holiday(name=f"Holiday {i}", month=ts.month, day=ts.day) + for i, ts in enumerate(holidays) + ] + if holidays + else [] + ) @property def adhoc_holidays(self): @@ -95,25 +104,81 @@ def adhoc_holidays(self): @property def special_closes(self): - return list(map(lambda x: (x[0], HolidayCalendar( - [Holiday(name=f"Special Close {i}", month=ts.month, day=ts.day, observance=next_monday) for i, ts in - enumerate(x[1])])), special_closes)) if special_closes else [] + return ( + list( + map( + lambda x: ( + x[0], + HolidayCalendar( + [ + Holiday( + name=f"Special Close {i}", + month=ts.month, + day=ts.day, + observance=next_monday, + ) + for i, ts in enumerate(x[1]) + ] + ), + ), + special_closes, + ) + ) + if special_closes + else [] + ) @property def special_closes_adhoc(self): - return list(map(lambda x: (x[0], pd.DatetimeIndex(ensure_list(x[1]))), - adhoc_special_closes)) if adhoc_special_closes else [] + return ( + list( + map( + lambda x: (x[0], pd.DatetimeIndex(ensure_list(x[1]))), + adhoc_special_closes, + ) + ) + if adhoc_special_closes + else [] + ) @property def special_opens(self): - return list(map(lambda x: (x[0], HolidayCalendar( - [Holiday(name=f"Special Open {i}", month=ts.month, day=ts.day, observance=next_monday) for i, ts in - enumerate(x[1])])), special_opens)) if special_opens else [] + return ( + list( + map( + lambda x: ( + x[0], + HolidayCalendar( + [ + Holiday( + name=f"Special Open {i}", + month=ts.month, + day=ts.day, + observance=next_monday, + ) + for i, ts in enumerate(x[1]) + ] + ), + ), + special_opens, + ) + ) + if special_opens + else [] + ) @property def special_opens_adhoc(self): - return list(map(lambda x: (x[0], pd.DatetimeIndex(ensure_list(x[1]))), - adhoc_special_opens)) if adhoc_special_opens else [] + return ( + list( + map( + lambda x: (x[0], pd.DatetimeIndex(ensure_list(x[1]))), + adhoc_special_opens, + ) + ) + if adhoc_special_opens + else [] + ) # Weekmask. @property @@ -131,10 +196,11 @@ def weekmask(self): @pytest.mark.isolated def test_unmodified_calendars(): - """ Test that calendars are unmodified when the module is just imported, without calling apply_extensions() """ + """Test that calendars are unmodified when the module is just imported, without calling apply_extensions()""" import exchange_calendars_extensions.core as ecx import exchange_calendars as ec + c = ec.get_calendar("XETR") # Check if returned Calendar is of expected type. @@ -147,7 +213,7 @@ def test_unmodified_calendars(): @pytest.mark.isolated def test_apply_extensions(): - """ Test that calendars are modified when apply_extensions() is called """ + """Test that calendars are modified when apply_extensions() is called""" apply_extensions() import exchange_calendars as ec import exchange_calendars_extensions.core as ecx @@ -162,7 +228,7 @@ def test_apply_extensions(): @pytest.mark.isolated def test_extended_calendar_xetr(): - """ Test the additional properties of the extended XETR calendar. """ + """Test the additional properties of the extended XETR calendar.""" apply_extensions() import exchange_calendars as ec @@ -189,10 +255,14 @@ def test_extended_calendar_xetr(): assert isinstance(c.quarterly_expiries, ec.exchange_calendar.HolidayCalendar) assert hasattr(c, "last_trading_days_of_months") - assert isinstance(c.last_trading_days_of_months, ec.exchange_calendar.HolidayCalendar) + assert isinstance( + c.last_trading_days_of_months, ec.exchange_calendar.HolidayCalendar + ) assert hasattr(c, "last_regular_trading_days_of_months") - assert isinstance(c.last_regular_trading_days_of_months, ec.exchange_calendar.HolidayCalendar) + assert isinstance( + c.last_regular_trading_days_of_months, ec.exchange_calendar.HolidayCalendar + ) @pytest.mark.isolated @@ -209,10 +279,19 @@ def test_extended_calendar_test(): end = pd.Timestamp("2024-12-31") # Verify regular holidays for 2022, 2023, and 2024. - assert c.regular_holidays.holidays(start=start, end=end, return_name=True).compare(pd.Series({ - pd.Timestamp("2022-01-01"): HOLIDAY_0, - pd.Timestamp("2023-01-01"): HOLIDAY_0, - pd.Timestamp("2024-01-01"): HOLIDAY_0})).empty + assert ( + c.regular_holidays.holidays(start=start, end=end, return_name=True) + .compare( + pd.Series( + { + pd.Timestamp("2022-01-01"): HOLIDAY_0, + pd.Timestamp("2023-01-01"): HOLIDAY_0, + pd.Timestamp("2024-01-01"): HOLIDAY_0, + } + ) + ) + .empty + ) # Verify adhoc holidays. assert c.adhoc_holidays == [pd.Timestamp("2023-02-01")] @@ -221,173 +300,274 @@ def test_extended_calendar_test(): assert len(c.special_closes) == 1 assert len(c.special_closes[0]) == 2 assert c.special_closes[0][0] == datetime.time(14, 0) - assert c.special_closes[0][1].holidays(start=start, end=end, return_name=True).compare(pd.Series({ - pd.Timestamp('2022-03-01'): SPECIAL_CLOSE_0, - pd.Timestamp('2023-03-01'): SPECIAL_CLOSE_0, - pd.Timestamp('2024-03-01'): SPECIAL_CLOSE_0})).empty + assert ( + c.special_closes[0][1] + .holidays(start=start, end=end, return_name=True) + .compare( + pd.Series( + { + pd.Timestamp("2022-03-01"): SPECIAL_CLOSE_0, + pd.Timestamp("2023-03-01"): SPECIAL_CLOSE_0, + pd.Timestamp("2024-03-01"): SPECIAL_CLOSE_0, + } + ) + ) + .empty + ) # Verify adhoc special closes. - assert c.special_closes_adhoc == [(datetime.time(14, 0), pd.DatetimeIndex([pd.Timestamp("2023-04-03")]))] + assert c.special_closes_adhoc == [ + (datetime.time(14, 0), pd.DatetimeIndex([pd.Timestamp("2023-04-03")])) + ] # Verify special opens for 2022, 2023, and 2024. assert len(c.special_opens) == 1 assert len(c.special_opens[0]) == 2 assert c.special_opens[0][0] == datetime.time(11, 0) - assert c.special_opens[0][1].holidays(start=start, end=end, return_name=True).compare(pd.Series({ - pd.Timestamp('2022-05-02'): SPECIAL_OPEN_0, - pd.Timestamp('2023-05-01'): SPECIAL_OPEN_0, - pd.Timestamp('2024-05-01'): SPECIAL_OPEN_0})).empty + assert ( + c.special_opens[0][1] + .holidays(start=start, end=end, return_name=True) + .compare( + pd.Series( + { + pd.Timestamp("2022-05-02"): SPECIAL_OPEN_0, + pd.Timestamp("2023-05-01"): SPECIAL_OPEN_0, + pd.Timestamp("2024-05-01"): SPECIAL_OPEN_0, + } + ) + ) + .empty + ) # Verify adhoc special opens. - assert c.special_opens_adhoc == [(datetime.time(11, 0), pd.DatetimeIndex([pd.Timestamp("2023-06-01")]))] + assert c.special_opens_adhoc == [ + (datetime.time(11, 0), pd.DatetimeIndex([pd.Timestamp("2023-06-01")])) + ] # Verify additional holiday calendars. - assert c.holidays_all.holidays(start=start, end=end, return_name=True).compare(pd.Series({ - pd.Timestamp("2022-01-01"): HOLIDAY_0, - pd.Timestamp("2023-01-01"): HOLIDAY_0, - pd.Timestamp("2023-02-01"): AD_HOC_HOLIDAY, - pd.Timestamp("2024-01-01"): HOLIDAY_0})).empty - - assert c.special_closes_all.holidays(start=start, end=end, return_name=True).compare(pd.Series({ - pd.Timestamp('2022-03-01'): SPECIAL_CLOSE_0, - pd.Timestamp('2023-03-01'): SPECIAL_CLOSE_0, - pd.Timestamp('2023-04-03'): AD_HOC_SPECIAL_CLOSE, - pd.Timestamp('2024-03-01'): SPECIAL_CLOSE_0})).empty - - assert c.special_opens_all.holidays(start=start, end=end, return_name=True).compare(pd.Series({ - pd.Timestamp('2022-05-02'): SPECIAL_OPEN_0, - pd.Timestamp('2023-05-01'): SPECIAL_OPEN_0, - pd.Timestamp('2023-06-01'): AD_HOC_SPECIAL_OPEN, - pd.Timestamp('2024-05-01'): SPECIAL_OPEN_0})).empty - - assert c.weekend_days.holidays(start=pd.Timestamp('2023-01-01'), end=pd.Timestamp('2023-01-31'), - return_name=True).compare(pd.Series({ - pd.Timestamp('2023-01-01'): WEEKEND_DAY, - pd.Timestamp('2023-01-07'): WEEKEND_DAY, - pd.Timestamp('2023-01-08'): WEEKEND_DAY, - pd.Timestamp('2023-01-14'): WEEKEND_DAY, - pd.Timestamp('2023-01-15'): WEEKEND_DAY, - pd.Timestamp('2023-01-21'): WEEKEND_DAY, - pd.Timestamp('2023-01-22'): WEEKEND_DAY, - pd.Timestamp('2023-01-28'): WEEKEND_DAY, - pd.Timestamp('2023-01-29'): WEEKEND_DAY})).empty - - assert c.quarterly_expiries.holidays(start=start, end=end, return_name=True).compare(pd.Series({ - pd.Timestamp('2022-03-18'): QUARTERLY_EXPIRY, - pd.Timestamp('2022-06-17'): QUARTERLY_EXPIRY, - pd.Timestamp('2022-09-16'): QUARTERLY_EXPIRY, - pd.Timestamp('2022-12-16'): QUARTERLY_EXPIRY, - pd.Timestamp('2023-03-17'): QUARTERLY_EXPIRY, - pd.Timestamp('2023-06-16'): QUARTERLY_EXPIRY, - pd.Timestamp('2023-09-15'): QUARTERLY_EXPIRY, - pd.Timestamp('2023-12-15'): QUARTERLY_EXPIRY, - pd.Timestamp('2024-03-15'): QUARTERLY_EXPIRY, - pd.Timestamp('2024-06-21'): QUARTERLY_EXPIRY, - pd.Timestamp('2024-09-20'): QUARTERLY_EXPIRY, - pd.Timestamp('2024-12-20'): QUARTERLY_EXPIRY})).empty - - assert c.monthly_expiries.holidays(start=start, end=end, return_name=True).compare(pd.Series({ - pd.Timestamp('2022-01-21'): MONTHLY_EXPIRY, - pd.Timestamp('2022-02-18'): MONTHLY_EXPIRY, - pd.Timestamp('2022-04-15'): MONTHLY_EXPIRY, - pd.Timestamp('2022-05-20'): MONTHLY_EXPIRY, - pd.Timestamp('2022-07-15'): MONTHLY_EXPIRY, - pd.Timestamp('2022-08-19'): MONTHLY_EXPIRY, - pd.Timestamp('2022-10-21'): MONTHLY_EXPIRY, - pd.Timestamp('2022-11-18'): MONTHLY_EXPIRY, - pd.Timestamp('2023-01-20'): MONTHLY_EXPIRY, - pd.Timestamp('2023-02-17'): MONTHLY_EXPIRY, - pd.Timestamp('2023-04-21'): MONTHLY_EXPIRY, - pd.Timestamp('2023-05-19'): MONTHLY_EXPIRY, - pd.Timestamp('2023-07-21'): MONTHLY_EXPIRY, - pd.Timestamp('2023-08-18'): MONTHLY_EXPIRY, - pd.Timestamp('2023-10-20'): MONTHLY_EXPIRY, - pd.Timestamp('2023-11-17'): MONTHLY_EXPIRY, - pd.Timestamp('2024-01-19'): MONTHLY_EXPIRY, - pd.Timestamp('2024-02-16'): MONTHLY_EXPIRY, - pd.Timestamp('2024-04-19'): MONTHLY_EXPIRY, - pd.Timestamp('2024-05-17'): MONTHLY_EXPIRY, - pd.Timestamp('2024-07-19'): MONTHLY_EXPIRY, - pd.Timestamp('2024-08-16'): MONTHLY_EXPIRY, - pd.Timestamp('2024-10-18'): MONTHLY_EXPIRY, - pd.Timestamp('2024-11-15'): MONTHLY_EXPIRY})).empty - - assert c.last_trading_days_of_months.holidays(start=start, end=end, return_name=True).compare(pd.Series({ - pd.Timestamp('2022-01-31'): LAST_TRADING_DAY_OF_MONTH, - pd.Timestamp('2022-02-28'): LAST_TRADING_DAY_OF_MONTH, - pd.Timestamp('2022-03-31'): LAST_TRADING_DAY_OF_MONTH, - pd.Timestamp('2022-04-29'): LAST_TRADING_DAY_OF_MONTH, - pd.Timestamp('2022-05-31'): LAST_TRADING_DAY_OF_MONTH, - pd.Timestamp('2022-06-30'): LAST_TRADING_DAY_OF_MONTH, - pd.Timestamp('2022-07-29'): LAST_TRADING_DAY_OF_MONTH, - pd.Timestamp('2022-08-31'): LAST_TRADING_DAY_OF_MONTH, - pd.Timestamp('2022-09-30'): LAST_TRADING_DAY_OF_MONTH, - pd.Timestamp('2022-10-31'): LAST_TRADING_DAY_OF_MONTH, - pd.Timestamp('2022-11-30'): LAST_TRADING_DAY_OF_MONTH, - pd.Timestamp('2022-12-30'): LAST_TRADING_DAY_OF_MONTH, - pd.Timestamp('2023-01-31'): LAST_TRADING_DAY_OF_MONTH, - pd.Timestamp('2023-02-28'): LAST_TRADING_DAY_OF_MONTH, - pd.Timestamp('2023-03-31'): LAST_TRADING_DAY_OF_MONTH, - pd.Timestamp('2023-04-28'): LAST_TRADING_DAY_OF_MONTH, - pd.Timestamp('2023-05-31'): LAST_TRADING_DAY_OF_MONTH, - pd.Timestamp('2023-06-30'): LAST_TRADING_DAY_OF_MONTH, - pd.Timestamp('2023-07-31'): LAST_TRADING_DAY_OF_MONTH, - pd.Timestamp('2023-08-31'): LAST_TRADING_DAY_OF_MONTH, - pd.Timestamp('2023-09-29'): LAST_TRADING_DAY_OF_MONTH, - pd.Timestamp('2023-10-31'): LAST_TRADING_DAY_OF_MONTH, - pd.Timestamp('2023-11-30'): LAST_TRADING_DAY_OF_MONTH, - pd.Timestamp('2023-12-29'): LAST_TRADING_DAY_OF_MONTH, - pd.Timestamp('2024-01-31'): LAST_TRADING_DAY_OF_MONTH, - pd.Timestamp('2024-02-29'): LAST_TRADING_DAY_OF_MONTH, - pd.Timestamp('2024-03-29'): LAST_TRADING_DAY_OF_MONTH, - pd.Timestamp('2024-04-30'): LAST_TRADING_DAY_OF_MONTH, - pd.Timestamp('2024-05-31'): LAST_TRADING_DAY_OF_MONTH, - pd.Timestamp('2024-06-28'): LAST_TRADING_DAY_OF_MONTH, - pd.Timestamp('2024-07-31'): LAST_TRADING_DAY_OF_MONTH, - pd.Timestamp('2024-08-30'): LAST_TRADING_DAY_OF_MONTH, - pd.Timestamp('2024-09-30'): LAST_TRADING_DAY_OF_MONTH, - pd.Timestamp('2024-10-31'): LAST_TRADING_DAY_OF_MONTH, - pd.Timestamp('2024-11-29'): LAST_TRADING_DAY_OF_MONTH, - pd.Timestamp('2024-12-31'): LAST_TRADING_DAY_OF_MONTH})).empty - - assert c.last_regular_trading_days_of_months.holidays(start=start, end=end, return_name=True).compare(pd.Series({ - pd.Timestamp('2022-01-31'): LAST_REGULAR_TRADING_DAY_OF_MONTH, - pd.Timestamp('2022-02-28'): LAST_REGULAR_TRADING_DAY_OF_MONTH, - pd.Timestamp('2022-03-31'): LAST_REGULAR_TRADING_DAY_OF_MONTH, - pd.Timestamp('2022-04-29'): LAST_REGULAR_TRADING_DAY_OF_MONTH, - pd.Timestamp('2022-05-31'): LAST_REGULAR_TRADING_DAY_OF_MONTH, - pd.Timestamp('2022-06-30'): LAST_REGULAR_TRADING_DAY_OF_MONTH, - pd.Timestamp('2022-07-29'): LAST_REGULAR_TRADING_DAY_OF_MONTH, - pd.Timestamp('2022-08-31'): LAST_REGULAR_TRADING_DAY_OF_MONTH, - pd.Timestamp('2022-09-30'): LAST_REGULAR_TRADING_DAY_OF_MONTH, - pd.Timestamp('2022-10-31'): LAST_REGULAR_TRADING_DAY_OF_MONTH, - pd.Timestamp('2022-11-30'): LAST_REGULAR_TRADING_DAY_OF_MONTH, - pd.Timestamp('2022-12-30'): LAST_REGULAR_TRADING_DAY_OF_MONTH, - pd.Timestamp('2023-01-31'): LAST_REGULAR_TRADING_DAY_OF_MONTH, - pd.Timestamp('2023-02-28'): LAST_REGULAR_TRADING_DAY_OF_MONTH, - pd.Timestamp('2023-03-31'): LAST_REGULAR_TRADING_DAY_OF_MONTH, - pd.Timestamp('2023-04-28'): LAST_REGULAR_TRADING_DAY_OF_MONTH, - pd.Timestamp('2023-05-31'): LAST_REGULAR_TRADING_DAY_OF_MONTH, - pd.Timestamp('2023-06-30'): LAST_REGULAR_TRADING_DAY_OF_MONTH, - pd.Timestamp('2023-07-31'): LAST_REGULAR_TRADING_DAY_OF_MONTH, - pd.Timestamp('2023-08-31'): LAST_REGULAR_TRADING_DAY_OF_MONTH, - pd.Timestamp('2023-09-29'): LAST_REGULAR_TRADING_DAY_OF_MONTH, - pd.Timestamp('2023-10-31'): LAST_REGULAR_TRADING_DAY_OF_MONTH, - pd.Timestamp('2023-11-30'): LAST_REGULAR_TRADING_DAY_OF_MONTH, - pd.Timestamp('2023-12-29'): LAST_REGULAR_TRADING_DAY_OF_MONTH, - pd.Timestamp('2024-01-31'): LAST_REGULAR_TRADING_DAY_OF_MONTH, - pd.Timestamp('2024-02-29'): LAST_REGULAR_TRADING_DAY_OF_MONTH, - pd.Timestamp('2024-03-29'): LAST_REGULAR_TRADING_DAY_OF_MONTH, - pd.Timestamp('2024-04-30'): LAST_REGULAR_TRADING_DAY_OF_MONTH, - pd.Timestamp('2024-05-31'): LAST_REGULAR_TRADING_DAY_OF_MONTH, - pd.Timestamp('2024-06-28'): LAST_REGULAR_TRADING_DAY_OF_MONTH, - pd.Timestamp('2024-07-31'): LAST_REGULAR_TRADING_DAY_OF_MONTH, - pd.Timestamp('2024-08-30'): LAST_REGULAR_TRADING_DAY_OF_MONTH, - pd.Timestamp('2024-09-30'): LAST_REGULAR_TRADING_DAY_OF_MONTH, - pd.Timestamp('2024-10-31'): LAST_REGULAR_TRADING_DAY_OF_MONTH, - pd.Timestamp('2024-11-29'): LAST_REGULAR_TRADING_DAY_OF_MONTH, - pd.Timestamp('2024-12-31'): LAST_REGULAR_TRADING_DAY_OF_MONTH})).empty + assert ( + c.holidays_all.holidays(start=start, end=end, return_name=True) + .compare( + pd.Series( + { + pd.Timestamp("2022-01-01"): HOLIDAY_0, + pd.Timestamp("2023-01-01"): HOLIDAY_0, + pd.Timestamp("2023-02-01"): AD_HOC_HOLIDAY, + pd.Timestamp("2024-01-01"): HOLIDAY_0, + } + ) + ) + .empty + ) + + assert ( + c.special_closes_all.holidays(start=start, end=end, return_name=True) + .compare( + pd.Series( + { + pd.Timestamp("2022-03-01"): SPECIAL_CLOSE_0, + pd.Timestamp("2023-03-01"): SPECIAL_CLOSE_0, + pd.Timestamp("2023-04-03"): AD_HOC_SPECIAL_CLOSE, + pd.Timestamp("2024-03-01"): SPECIAL_CLOSE_0, + } + ) + ) + .empty + ) + + assert ( + c.special_opens_all.holidays(start=start, end=end, return_name=True) + .compare( + pd.Series( + { + pd.Timestamp("2022-05-02"): SPECIAL_OPEN_0, + pd.Timestamp("2023-05-01"): SPECIAL_OPEN_0, + pd.Timestamp("2023-06-01"): AD_HOC_SPECIAL_OPEN, + pd.Timestamp("2024-05-01"): SPECIAL_OPEN_0, + } + ) + ) + .empty + ) + + assert ( + c.weekend_days.holidays( + start=pd.Timestamp("2023-01-01"), + end=pd.Timestamp("2023-01-31"), + return_name=True, + ) + .compare( + pd.Series( + { + pd.Timestamp("2023-01-01"): WEEKEND_DAY, + pd.Timestamp("2023-01-07"): WEEKEND_DAY, + pd.Timestamp("2023-01-08"): WEEKEND_DAY, + pd.Timestamp("2023-01-14"): WEEKEND_DAY, + pd.Timestamp("2023-01-15"): WEEKEND_DAY, + pd.Timestamp("2023-01-21"): WEEKEND_DAY, + pd.Timestamp("2023-01-22"): WEEKEND_DAY, + pd.Timestamp("2023-01-28"): WEEKEND_DAY, + pd.Timestamp("2023-01-29"): WEEKEND_DAY, + } + ) + ) + .empty + ) + + assert ( + c.quarterly_expiries.holidays(start=start, end=end, return_name=True) + .compare( + pd.Series( + { + pd.Timestamp("2022-03-18"): QUARTERLY_EXPIRY, + pd.Timestamp("2022-06-17"): QUARTERLY_EXPIRY, + pd.Timestamp("2022-09-16"): QUARTERLY_EXPIRY, + pd.Timestamp("2022-12-16"): QUARTERLY_EXPIRY, + pd.Timestamp("2023-03-17"): QUARTERLY_EXPIRY, + pd.Timestamp("2023-06-16"): QUARTERLY_EXPIRY, + pd.Timestamp("2023-09-15"): QUARTERLY_EXPIRY, + pd.Timestamp("2023-12-15"): QUARTERLY_EXPIRY, + pd.Timestamp("2024-03-15"): QUARTERLY_EXPIRY, + pd.Timestamp("2024-06-21"): QUARTERLY_EXPIRY, + pd.Timestamp("2024-09-20"): QUARTERLY_EXPIRY, + pd.Timestamp("2024-12-20"): QUARTERLY_EXPIRY, + } + ) + ) + .empty + ) + + assert ( + c.monthly_expiries.holidays(start=start, end=end, return_name=True) + .compare( + pd.Series( + { + pd.Timestamp("2022-01-21"): MONTHLY_EXPIRY, + pd.Timestamp("2022-02-18"): MONTHLY_EXPIRY, + pd.Timestamp("2022-04-15"): MONTHLY_EXPIRY, + pd.Timestamp("2022-05-20"): MONTHLY_EXPIRY, + pd.Timestamp("2022-07-15"): MONTHLY_EXPIRY, + pd.Timestamp("2022-08-19"): MONTHLY_EXPIRY, + pd.Timestamp("2022-10-21"): MONTHLY_EXPIRY, + pd.Timestamp("2022-11-18"): MONTHLY_EXPIRY, + pd.Timestamp("2023-01-20"): MONTHLY_EXPIRY, + pd.Timestamp("2023-02-17"): MONTHLY_EXPIRY, + pd.Timestamp("2023-04-21"): MONTHLY_EXPIRY, + pd.Timestamp("2023-05-19"): MONTHLY_EXPIRY, + pd.Timestamp("2023-07-21"): MONTHLY_EXPIRY, + pd.Timestamp("2023-08-18"): MONTHLY_EXPIRY, + pd.Timestamp("2023-10-20"): MONTHLY_EXPIRY, + pd.Timestamp("2023-11-17"): MONTHLY_EXPIRY, + pd.Timestamp("2024-01-19"): MONTHLY_EXPIRY, + pd.Timestamp("2024-02-16"): MONTHLY_EXPIRY, + pd.Timestamp("2024-04-19"): MONTHLY_EXPIRY, + pd.Timestamp("2024-05-17"): MONTHLY_EXPIRY, + pd.Timestamp("2024-07-19"): MONTHLY_EXPIRY, + pd.Timestamp("2024-08-16"): MONTHLY_EXPIRY, + pd.Timestamp("2024-10-18"): MONTHLY_EXPIRY, + pd.Timestamp("2024-11-15"): MONTHLY_EXPIRY, + } + ) + ) + .empty + ) + + assert ( + c.last_trading_days_of_months.holidays(start=start, end=end, return_name=True) + .compare( + pd.Series( + { + pd.Timestamp("2022-01-31"): LAST_TRADING_DAY_OF_MONTH, + pd.Timestamp("2022-02-28"): LAST_TRADING_DAY_OF_MONTH, + pd.Timestamp("2022-03-31"): LAST_TRADING_DAY_OF_MONTH, + pd.Timestamp("2022-04-29"): LAST_TRADING_DAY_OF_MONTH, + pd.Timestamp("2022-05-31"): LAST_TRADING_DAY_OF_MONTH, + pd.Timestamp("2022-06-30"): LAST_TRADING_DAY_OF_MONTH, + pd.Timestamp("2022-07-29"): LAST_TRADING_DAY_OF_MONTH, + pd.Timestamp("2022-08-31"): LAST_TRADING_DAY_OF_MONTH, + pd.Timestamp("2022-09-30"): LAST_TRADING_DAY_OF_MONTH, + pd.Timestamp("2022-10-31"): LAST_TRADING_DAY_OF_MONTH, + pd.Timestamp("2022-11-30"): LAST_TRADING_DAY_OF_MONTH, + pd.Timestamp("2022-12-30"): LAST_TRADING_DAY_OF_MONTH, + pd.Timestamp("2023-01-31"): LAST_TRADING_DAY_OF_MONTH, + pd.Timestamp("2023-02-28"): LAST_TRADING_DAY_OF_MONTH, + pd.Timestamp("2023-03-31"): LAST_TRADING_DAY_OF_MONTH, + pd.Timestamp("2023-04-28"): LAST_TRADING_DAY_OF_MONTH, + pd.Timestamp("2023-05-31"): LAST_TRADING_DAY_OF_MONTH, + pd.Timestamp("2023-06-30"): LAST_TRADING_DAY_OF_MONTH, + pd.Timestamp("2023-07-31"): LAST_TRADING_DAY_OF_MONTH, + pd.Timestamp("2023-08-31"): LAST_TRADING_DAY_OF_MONTH, + pd.Timestamp("2023-09-29"): LAST_TRADING_DAY_OF_MONTH, + pd.Timestamp("2023-10-31"): LAST_TRADING_DAY_OF_MONTH, + pd.Timestamp("2023-11-30"): LAST_TRADING_DAY_OF_MONTH, + pd.Timestamp("2023-12-29"): LAST_TRADING_DAY_OF_MONTH, + pd.Timestamp("2024-01-31"): LAST_TRADING_DAY_OF_MONTH, + pd.Timestamp("2024-02-29"): LAST_TRADING_DAY_OF_MONTH, + pd.Timestamp("2024-03-29"): LAST_TRADING_DAY_OF_MONTH, + pd.Timestamp("2024-04-30"): LAST_TRADING_DAY_OF_MONTH, + pd.Timestamp("2024-05-31"): LAST_TRADING_DAY_OF_MONTH, + pd.Timestamp("2024-06-28"): LAST_TRADING_DAY_OF_MONTH, + pd.Timestamp("2024-07-31"): LAST_TRADING_DAY_OF_MONTH, + pd.Timestamp("2024-08-30"): LAST_TRADING_DAY_OF_MONTH, + pd.Timestamp("2024-09-30"): LAST_TRADING_DAY_OF_MONTH, + pd.Timestamp("2024-10-31"): LAST_TRADING_DAY_OF_MONTH, + pd.Timestamp("2024-11-29"): LAST_TRADING_DAY_OF_MONTH, + pd.Timestamp("2024-12-31"): LAST_TRADING_DAY_OF_MONTH, + } + ) + ) + .empty + ) + + assert ( + c.last_regular_trading_days_of_months.holidays( + start=start, end=end, return_name=True + ) + .compare( + pd.Series( + { + pd.Timestamp("2022-01-31"): LAST_REGULAR_TRADING_DAY_OF_MONTH, + pd.Timestamp("2022-02-28"): LAST_REGULAR_TRADING_DAY_OF_MONTH, + pd.Timestamp("2022-03-31"): LAST_REGULAR_TRADING_DAY_OF_MONTH, + pd.Timestamp("2022-04-29"): LAST_REGULAR_TRADING_DAY_OF_MONTH, + pd.Timestamp("2022-05-31"): LAST_REGULAR_TRADING_DAY_OF_MONTH, + pd.Timestamp("2022-06-30"): LAST_REGULAR_TRADING_DAY_OF_MONTH, + pd.Timestamp("2022-07-29"): LAST_REGULAR_TRADING_DAY_OF_MONTH, + pd.Timestamp("2022-08-31"): LAST_REGULAR_TRADING_DAY_OF_MONTH, + pd.Timestamp("2022-09-30"): LAST_REGULAR_TRADING_DAY_OF_MONTH, + pd.Timestamp("2022-10-31"): LAST_REGULAR_TRADING_DAY_OF_MONTH, + pd.Timestamp("2022-11-30"): LAST_REGULAR_TRADING_DAY_OF_MONTH, + pd.Timestamp("2022-12-30"): LAST_REGULAR_TRADING_DAY_OF_MONTH, + pd.Timestamp("2023-01-31"): LAST_REGULAR_TRADING_DAY_OF_MONTH, + pd.Timestamp("2023-02-28"): LAST_REGULAR_TRADING_DAY_OF_MONTH, + pd.Timestamp("2023-03-31"): LAST_REGULAR_TRADING_DAY_OF_MONTH, + pd.Timestamp("2023-04-28"): LAST_REGULAR_TRADING_DAY_OF_MONTH, + pd.Timestamp("2023-05-31"): LAST_REGULAR_TRADING_DAY_OF_MONTH, + pd.Timestamp("2023-06-30"): LAST_REGULAR_TRADING_DAY_OF_MONTH, + pd.Timestamp("2023-07-31"): LAST_REGULAR_TRADING_DAY_OF_MONTH, + pd.Timestamp("2023-08-31"): LAST_REGULAR_TRADING_DAY_OF_MONTH, + pd.Timestamp("2023-09-29"): LAST_REGULAR_TRADING_DAY_OF_MONTH, + pd.Timestamp("2023-10-31"): LAST_REGULAR_TRADING_DAY_OF_MONTH, + pd.Timestamp("2023-11-30"): LAST_REGULAR_TRADING_DAY_OF_MONTH, + pd.Timestamp("2023-12-29"): LAST_REGULAR_TRADING_DAY_OF_MONTH, + pd.Timestamp("2024-01-31"): LAST_REGULAR_TRADING_DAY_OF_MONTH, + pd.Timestamp("2024-02-29"): LAST_REGULAR_TRADING_DAY_OF_MONTH, + pd.Timestamp("2024-03-29"): LAST_REGULAR_TRADING_DAY_OF_MONTH, + pd.Timestamp("2024-04-30"): LAST_REGULAR_TRADING_DAY_OF_MONTH, + pd.Timestamp("2024-05-31"): LAST_REGULAR_TRADING_DAY_OF_MONTH, + pd.Timestamp("2024-06-28"): LAST_REGULAR_TRADING_DAY_OF_MONTH, + pd.Timestamp("2024-07-31"): LAST_REGULAR_TRADING_DAY_OF_MONTH, + pd.Timestamp("2024-08-30"): LAST_REGULAR_TRADING_DAY_OF_MONTH, + pd.Timestamp("2024-09-30"): LAST_REGULAR_TRADING_DAY_OF_MONTH, + pd.Timestamp("2024-10-31"): LAST_REGULAR_TRADING_DAY_OF_MONTH, + pd.Timestamp("2024-11-29"): LAST_REGULAR_TRADING_DAY_OF_MONTH, + pd.Timestamp("2024-12-31"): LAST_REGULAR_TRADING_DAY_OF_MONTH, + } + ) + ) + .empty + ) @pytest.mark.isolated @@ -404,22 +584,40 @@ def test_add_new_holiday(): end = pd.Timestamp("2024-12-31") # Added holiday should show as regular holiday. - assert c.regular_holidays.holidays(start=start, end=end, return_name=True).compare(pd.Series({ - pd.Timestamp("2022-01-01"): HOLIDAY_0, - pd.Timestamp("2023-01-01"): HOLIDAY_0, - pd.Timestamp("2023-07-03"): ADDED_HOLIDAY, - pd.Timestamp("2024-01-01"): HOLIDAY_0})).empty + assert ( + c.regular_holidays.holidays(start=start, end=end, return_name=True) + .compare( + pd.Series( + { + pd.Timestamp("2022-01-01"): HOLIDAY_0, + pd.Timestamp("2023-01-01"): HOLIDAY_0, + pd.Timestamp("2023-07-03"): ADDED_HOLIDAY, + pd.Timestamp("2024-01-01"): HOLIDAY_0, + } + ) + ) + .empty + ) # Added holiday should not be in ad-hoc holidays, i.e. this should be unmodified. assert c.adhoc_holidays == [pd.Timestamp("2023-02-01")] # Added holiday should be in holidays_all calendar. - assert c.holidays_all.holidays(start=start, end=end, return_name=True).compare(pd.Series({ - pd.Timestamp("2022-01-01"): HOLIDAY_0, - pd.Timestamp("2023-01-01"): HOLIDAY_0, - pd.Timestamp("2023-02-01"): AD_HOC_HOLIDAY, - pd.Timestamp("2023-07-03"): ADDED_HOLIDAY, - pd.Timestamp("2024-01-01"): HOLIDAY_0})).empty + assert ( + c.holidays_all.holidays(start=start, end=end, return_name=True) + .compare( + pd.Series( + { + pd.Timestamp("2022-01-01"): HOLIDAY_0, + pd.Timestamp("2023-01-01"): HOLIDAY_0, + pd.Timestamp("2023-02-01"): AD_HOC_HOLIDAY, + pd.Timestamp("2023-07-03"): ADDED_HOLIDAY, + pd.Timestamp("2024-01-01"): HOLIDAY_0, + } + ) + ) + .empty + ) @pytest.mark.isolated @@ -436,20 +634,38 @@ def test_overwrite_existing_regular_holiday(): end = pd.Timestamp("2024-12-31") # Added holiday should overwrite existing regular holiday. - assert c.regular_holidays.holidays(start=start, end=end, return_name=True).compare(pd.Series({ - pd.Timestamp("2022-01-01"): HOLIDAY_0, - pd.Timestamp("2023-01-01"): ADDED_HOLIDAY, - pd.Timestamp("2024-01-01"): HOLIDAY_0})).empty + assert ( + c.regular_holidays.holidays(start=start, end=end, return_name=True) + .compare( + pd.Series( + { + pd.Timestamp("2022-01-01"): HOLIDAY_0, + pd.Timestamp("2023-01-01"): ADDED_HOLIDAY, + pd.Timestamp("2024-01-01"): HOLIDAY_0, + } + ) + ) + .empty + ) # Added holiday should not be in ad-hoc holidays, i.e. this should be unmodified. assert c.adhoc_holidays == [pd.Timestamp("2023-02-01")] # Added holiday should be in holidays_all calendar. - assert c.holidays_all.holidays(start=start, end=end, return_name=True).compare(pd.Series({ - pd.Timestamp("2022-01-01"): HOLIDAY_0, - pd.Timestamp("2023-01-01"): ADDED_HOLIDAY, - pd.Timestamp("2023-02-01"): AD_HOC_HOLIDAY, - pd.Timestamp("2024-01-01"): HOLIDAY_0})).empty + assert ( + c.holidays_all.holidays(start=start, end=end, return_name=True) + .compare( + pd.Series( + { + pd.Timestamp("2022-01-01"): HOLIDAY_0, + pd.Timestamp("2023-01-01"): ADDED_HOLIDAY, + pd.Timestamp("2023-02-01"): AD_HOC_HOLIDAY, + pd.Timestamp("2024-01-01"): HOLIDAY_0, + } + ) + ) + .empty + ) @pytest.mark.isolated @@ -466,21 +682,39 @@ def test_overwrite_existing_adhoc_holiday(): end = pd.Timestamp("2024-12-31") # Added holiday should be a regular holiday. - assert c.regular_holidays.holidays(start=start, end=end, return_name=True).compare(pd.Series({ - pd.Timestamp("2022-01-01"): HOLIDAY_0, - pd.Timestamp("2023-01-01"): HOLIDAY_0, - pd.Timestamp("2023-02-01"): ADDED_HOLIDAY, - pd.Timestamp("2024-01-01"): HOLIDAY_0})).empty + assert ( + c.regular_holidays.holidays(start=start, end=end, return_name=True) + .compare( + pd.Series( + { + pd.Timestamp("2022-01-01"): HOLIDAY_0, + pd.Timestamp("2023-01-01"): HOLIDAY_0, + pd.Timestamp("2023-02-01"): ADDED_HOLIDAY, + pd.Timestamp("2024-01-01"): HOLIDAY_0, + } + ) + ) + .empty + ) # Overwritten ad-hoc holiday should be removed from list. assert c.adhoc_holidays == [] # Added holiday should be in holidays_all calendar. - assert c.holidays_all.holidays(start=start, end=end, return_name=True).compare(pd.Series({ - pd.Timestamp("2022-01-01"): HOLIDAY_0, - pd.Timestamp("2023-01-01"): HOLIDAY_0, - pd.Timestamp("2023-02-01"): ADDED_HOLIDAY, - pd.Timestamp("2024-01-01"): HOLIDAY_0})).empty + assert ( + c.holidays_all.holidays(start=start, end=end, return_name=True) + .compare( + pd.Series( + { + pd.Timestamp("2022-01-01"): HOLIDAY_0, + pd.Timestamp("2023-01-01"): HOLIDAY_0, + pd.Timestamp("2023-02-01"): ADDED_HOLIDAY, + pd.Timestamp("2024-01-01"): HOLIDAY_0, + } + ) + ) + .empty + ) @pytest.mark.isolated @@ -497,18 +731,36 @@ def test_remove_existing_regular_holiday(): end = pd.Timestamp("2024-12-31") # Removed day should no longer be in regular holidays. - assert c.regular_holidays.holidays(start=start, end=end, return_name=True).compare(pd.Series({ - pd.Timestamp("2022-01-01"): HOLIDAY_0, - pd.Timestamp("2024-01-01"): HOLIDAY_0})).empty + assert ( + c.regular_holidays.holidays(start=start, end=end, return_name=True) + .compare( + pd.Series( + { + pd.Timestamp("2022-01-01"): HOLIDAY_0, + pd.Timestamp("2024-01-01"): HOLIDAY_0, + } + ) + ) + .empty + ) # Removed holiday should not affect ad-hoc holidays. assert c.adhoc_holidays == [pd.Timestamp("2023-02-01")] # Removed day should not be in holidays_all calendar. - assert c.holidays_all.holidays(start=start, end=end, return_name=True).compare(pd.Series({ - pd.Timestamp("2022-01-01"): HOLIDAY_0, - pd.Timestamp("2023-02-01"): AD_HOC_HOLIDAY, - pd.Timestamp("2024-01-01"): HOLIDAY_0})).empty + assert ( + c.holidays_all.holidays(start=start, end=end, return_name=True) + .compare( + pd.Series( + { + pd.Timestamp("2022-01-01"): HOLIDAY_0, + pd.Timestamp("2023-02-01"): AD_HOC_HOLIDAY, + pd.Timestamp("2024-01-01"): HOLIDAY_0, + } + ) + ) + .empty + ) @pytest.mark.isolated @@ -525,19 +777,37 @@ def test_remove_existing_adhoc_holiday(): end = pd.Timestamp("2024-12-31") # Regular holidays should be untouched. - assert c.regular_holidays.holidays(start=start, end=end, return_name=True).compare(pd.Series({ - pd.Timestamp("2022-01-01"): HOLIDAY_0, - pd.Timestamp("2023-01-01"): HOLIDAY_0, - pd.Timestamp("2024-01-01"): HOLIDAY_0})).empty + assert ( + c.regular_holidays.holidays(start=start, end=end, return_name=True) + .compare( + pd.Series( + { + pd.Timestamp("2022-01-01"): HOLIDAY_0, + pd.Timestamp("2023-01-01"): HOLIDAY_0, + pd.Timestamp("2024-01-01"): HOLIDAY_0, + } + ) + ) + .empty + ) # Removed holiday should no longer be in ad-hoc holidays. assert c.adhoc_holidays == [] # Removed day should not be in holidays_all calendar. - assert c.holidays_all.holidays(start=start, end=end, return_name=True).compare(pd.Series({ - pd.Timestamp("2022-01-01"): HOLIDAY_0, - pd.Timestamp("2023-01-01"): HOLIDAY_0, - pd.Timestamp("2024-01-01"): HOLIDAY_0})).empty + assert ( + c.holidays_all.holidays(start=start, end=end, return_name=True) + .compare( + pd.Series( + { + pd.Timestamp("2022-01-01"): HOLIDAY_0, + pd.Timestamp("2023-01-01"): HOLIDAY_0, + pd.Timestamp("2024-01-01"): HOLIDAY_0, + } + ) + ) + .empty + ) @pytest.mark.isolated @@ -554,20 +824,38 @@ def test_remove_non_existent_holiday(): end = pd.Timestamp("2024-12-31") # Regular holidays should be untouched. - assert c.regular_holidays.holidays(start=start, end=end, return_name=True).compare(pd.Series({ - pd.Timestamp("2022-01-01"): HOLIDAY_0, - pd.Timestamp("2023-01-01"): HOLIDAY_0, - pd.Timestamp("2024-01-01"): HOLIDAY_0})).empty + assert ( + c.regular_holidays.holidays(start=start, end=end, return_name=True) + .compare( + pd.Series( + { + pd.Timestamp("2022-01-01"): HOLIDAY_0, + pd.Timestamp("2023-01-01"): HOLIDAY_0, + pd.Timestamp("2024-01-01"): HOLIDAY_0, + } + ) + ) + .empty + ) # Ad-hoc holidays should be untouched. assert c.adhoc_holidays == [pd.Timestamp("2023-02-01")] # Calendar holidays_all should be untouched. - assert c.holidays_all.holidays(start=start, end=end, return_name=True).compare(pd.Series({ - pd.Timestamp("2022-01-01"): HOLIDAY_0, - pd.Timestamp("2023-01-01"): HOLIDAY_0, - pd.Timestamp("2023-02-01"): AD_HOC_HOLIDAY, - pd.Timestamp("2024-01-01"): HOLIDAY_0})).empty + assert ( + c.holidays_all.holidays(start=start, end=end, return_name=True) + .compare( + pd.Series( + { + pd.Timestamp("2022-01-01"): HOLIDAY_0, + pd.Timestamp("2023-01-01"): HOLIDAY_0, + pd.Timestamp("2023-02-01"): AD_HOC_HOLIDAY, + pd.Timestamp("2024-01-01"): HOLIDAY_0, + } + ) + ) + .empty + ) @pytest.mark.isolated @@ -586,22 +874,40 @@ def test_add_and_remove_new_holiday(): end = pd.Timestamp("2024-12-31") # Regular holidays should have new day. - assert c.regular_holidays.holidays(start=start, end=end, return_name=True).compare(pd.Series({ - pd.Timestamp("2022-01-01"): HOLIDAY_0, - pd.Timestamp("2023-01-01"): HOLIDAY_0, - pd.Timestamp("2023-07-03"): ADDED_HOLIDAY, - pd.Timestamp("2024-01-01"): HOLIDAY_0})).empty + assert ( + c.regular_holidays.holidays(start=start, end=end, return_name=True) + .compare( + pd.Series( + { + pd.Timestamp("2022-01-01"): HOLIDAY_0, + pd.Timestamp("2023-01-01"): HOLIDAY_0, + pd.Timestamp("2023-07-03"): ADDED_HOLIDAY, + pd.Timestamp("2024-01-01"): HOLIDAY_0, + } + ) + ) + .empty + ) # Ad-hoc holidays should be unchanged. assert c.adhoc_holidays == [pd.Timestamp("2023-02-01")] # Calendar holidays_all should have new day. - assert c.holidays_all.holidays(start=start, end=end, return_name=True).compare(pd.Series({ - pd.Timestamp("2022-01-01"): HOLIDAY_0, - pd.Timestamp("2023-01-01"): HOLIDAY_0, - pd.Timestamp("2023-02-01"): AD_HOC_HOLIDAY, - pd.Timestamp("2023-07-03"): ADDED_HOLIDAY, - pd.Timestamp("2024-01-01"): HOLIDAY_0})).empty + assert ( + c.holidays_all.holidays(start=start, end=end, return_name=True) + .compare( + pd.Series( + { + pd.Timestamp("2022-01-01"): HOLIDAY_0, + pd.Timestamp("2023-01-01"): HOLIDAY_0, + pd.Timestamp("2023-02-01"): AD_HOC_HOLIDAY, + pd.Timestamp("2023-07-03"): ADDED_HOLIDAY, + pd.Timestamp("2024-01-01"): HOLIDAY_0, + } + ) + ) + .empty + ) @pytest.mark.isolated @@ -620,20 +926,38 @@ def test_add_and_remove_existing_holiday(): end = pd.Timestamp("2024-12-31") # Updated day should be in regular holidays. - assert c.regular_holidays.holidays(start=start, end=end, return_name=True).compare(pd.Series({ - pd.Timestamp("2022-01-01"): HOLIDAY_0, - pd.Timestamp('2023-01-01'): ADDED_HOLIDAY, - pd.Timestamp("2024-01-01"): HOLIDAY_0})).empty + assert ( + c.regular_holidays.holidays(start=start, end=end, return_name=True) + .compare( + pd.Series( + { + pd.Timestamp("2022-01-01"): HOLIDAY_0, + pd.Timestamp("2023-01-01"): ADDED_HOLIDAY, + pd.Timestamp("2024-01-01"): HOLIDAY_0, + } + ) + ) + .empty + ) # Ad-hoc holidays should be unchanged. assert c.adhoc_holidays == [pd.Timestamp("2023-02-01")] # Updated day should be in holidays_all. - assert c.holidays_all.holidays(start=start, end=end, return_name=True).compare(pd.Series({ - pd.Timestamp("2022-01-01"): HOLIDAY_0, - pd.Timestamp('2023-01-01'): ADDED_HOLIDAY, - pd.Timestamp("2023-02-01"): AD_HOC_HOLIDAY, - pd.Timestamp("2024-01-01"): HOLIDAY_0})).empty + assert ( + c.holidays_all.holidays(start=start, end=end, return_name=True) + .compare( + pd.Series( + { + pd.Timestamp("2022-01-01"): HOLIDAY_0, + pd.Timestamp("2023-01-01"): ADDED_HOLIDAY, + pd.Timestamp("2023-02-01"): AD_HOC_HOLIDAY, + pd.Timestamp("2024-01-01"): HOLIDAY_0, + } + ) + ) + .empty + ) @pytest.mark.isolated @@ -653,22 +977,40 @@ def test_remove_and_add_new_holiday(): end = pd.Timestamp("2024-12-31") # Added holiday should show as regular holiday. - assert c.regular_holidays.holidays(start=start, end=end, return_name=True).compare(pd.Series({ - pd.Timestamp("2022-01-01"): HOLIDAY_0, - pd.Timestamp("2023-01-01"): HOLIDAY_0, - pd.Timestamp("2023-07-03"): ADDED_HOLIDAY, - pd.Timestamp("2024-01-01"): HOLIDAY_0})).empty + assert ( + c.regular_holidays.holidays(start=start, end=end, return_name=True) + .compare( + pd.Series( + { + pd.Timestamp("2022-01-01"): HOLIDAY_0, + pd.Timestamp("2023-01-01"): HOLIDAY_0, + pd.Timestamp("2023-07-03"): ADDED_HOLIDAY, + pd.Timestamp("2024-01-01"): HOLIDAY_0, + } + ) + ) + .empty + ) # Added holiday should not be in ad-hoc holidays, i.e. this should be unmodified. assert c.adhoc_holidays == [pd.Timestamp("2023-02-01")] # Added holiday should be in holidays_all calendar. - assert c.holidays_all.holidays(start=start, end=end, return_name=True).compare(pd.Series({ - pd.Timestamp("2022-01-01"): HOLIDAY_0, - pd.Timestamp("2023-01-01"): HOLIDAY_0, - pd.Timestamp("2023-02-01"): AD_HOC_HOLIDAY, - pd.Timestamp("2023-07-03"): ADDED_HOLIDAY, - pd.Timestamp("2024-01-01"): HOLIDAY_0})).empty + assert ( + c.holidays_all.holidays(start=start, end=end, return_name=True) + .compare( + pd.Series( + { + pd.Timestamp("2022-01-01"): HOLIDAY_0, + pd.Timestamp("2023-01-01"): HOLIDAY_0, + pd.Timestamp("2023-02-01"): AD_HOC_HOLIDAY, + pd.Timestamp("2023-07-03"): ADDED_HOLIDAY, + pd.Timestamp("2024-01-01"): HOLIDAY_0, + } + ) + ) + .empty + ) @pytest.mark.isolated @@ -688,20 +1030,38 @@ def test_remove_and_add_existing_regular_holiday(): end = pd.Timestamp("2024-12-31") # Regular holiday should be overwritten. - assert c.regular_holidays.holidays(start=start, end=end, return_name=True).compare(pd.Series({ - pd.Timestamp("2022-01-01"): HOLIDAY_0, - pd.Timestamp("2023-01-01"): ADDED_HOLIDAY, - pd.Timestamp("2024-01-01"): HOLIDAY_0})).empty + assert ( + c.regular_holidays.holidays(start=start, end=end, return_name=True) + .compare( + pd.Series( + { + pd.Timestamp("2022-01-01"): HOLIDAY_0, + pd.Timestamp("2023-01-01"): ADDED_HOLIDAY, + pd.Timestamp("2024-01-01"): HOLIDAY_0, + } + ) + ) + .empty + ) # Added holiday should not be in ad-hoc holidays, i.e. this should be unmodified. assert c.adhoc_holidays == [pd.Timestamp("2023-02-01")] # Added holiday should be in holidays_all calendar. - assert c.holidays_all.holidays(start=start, end=end, return_name=True).compare(pd.Series({ - pd.Timestamp("2022-01-01"): HOLIDAY_0, - pd.Timestamp("2023-01-01"): ADDED_HOLIDAY, - pd.Timestamp("2023-02-01"): AD_HOC_HOLIDAY, - pd.Timestamp("2024-01-01"): HOLIDAY_0})).empty + assert ( + c.holidays_all.holidays(start=start, end=end, return_name=True) + .compare( + pd.Series( + { + pd.Timestamp("2022-01-01"): HOLIDAY_0, + pd.Timestamp("2023-01-01"): ADDED_HOLIDAY, + pd.Timestamp("2023-02-01"): AD_HOC_HOLIDAY, + pd.Timestamp("2024-01-01"): HOLIDAY_0, + } + ) + ) + .empty + ) @pytest.mark.isolated @@ -721,21 +1081,39 @@ def test_remove_and_add_existing_adhoc_holiday(): end = pd.Timestamp("2024-12-31") # Regular holiday should contain the added holiday. - assert c.regular_holidays.holidays(start=start, end=end, return_name=True).compare(pd.Series({ - pd.Timestamp("2022-01-01"): HOLIDAY_0, - pd.Timestamp("2023-01-01"): HOLIDAY_0, - pd.Timestamp("2023-02-01"): ADDED_HOLIDAY, - pd.Timestamp("2024-01-01"): HOLIDAY_0})).empty + assert ( + c.regular_holidays.holidays(start=start, end=end, return_name=True) + .compare( + pd.Series( + { + pd.Timestamp("2022-01-01"): HOLIDAY_0, + pd.Timestamp("2023-01-01"): HOLIDAY_0, + pd.Timestamp("2023-02-01"): ADDED_HOLIDAY, + pd.Timestamp("2024-01-01"): HOLIDAY_0, + } + ) + ) + .empty + ) # Ad-hoc holidays should be empty. assert c.adhoc_holidays == [] # Added holiday should be in holidays_all calendar. - assert c.holidays_all.holidays(start=start, end=end, return_name=True).compare(pd.Series({ - pd.Timestamp("2022-01-01"): HOLIDAY_0, - pd.Timestamp("2023-01-01"): HOLIDAY_0, - pd.Timestamp("2023-02-01"): ADDED_HOLIDAY, - pd.Timestamp("2024-01-01"): HOLIDAY_0})).empty + assert ( + c.holidays_all.holidays(start=start, end=end, return_name=True) + .compare( + pd.Series( + { + pd.Timestamp("2022-01-01"): HOLIDAY_0, + pd.Timestamp("2023-01-01"): HOLIDAY_0, + pd.Timestamp("2023-02-01"): ADDED_HOLIDAY, + pd.Timestamp("2024-01-01"): HOLIDAY_0, + } + ) + ) + .empty + ) @pytest.mark.isolated @@ -744,7 +1122,9 @@ def test_add_new_special_open_with_new_time(): import exchange_calendars as ec import exchange_calendars_extensions.core as ecx - ecx.add_special_open("TEST", pd.Timestamp("2023-07-03"), time(12, 0), ADDED_SPECIAL_OPEN) + ecx.add_special_open( + "TEST", pd.Timestamp("2023-07-03"), time(12, 0), ADDED_SPECIAL_OPEN + ) c = ec.get_calendar("TEST") @@ -756,26 +1136,49 @@ def test_add_new_special_open_with_new_time(): # Special opens for regular special open time should be unchanged. assert c.special_opens[0][0] == time(11, 0) - assert c.special_opens[0][1].holidays(start=start, end=end, return_name=True).compare(pd.Series({ - pd.Timestamp("2022-05-02"): SPECIAL_OPEN_0, - pd.Timestamp("2023-05-01"): SPECIAL_OPEN_0, - pd.Timestamp("2024-05-01"): SPECIAL_OPEN_0})).empty + assert ( + c.special_opens[0][1] + .holidays(start=start, end=end, return_name=True) + .compare( + pd.Series( + { + pd.Timestamp("2022-05-02"): SPECIAL_OPEN_0, + pd.Timestamp("2023-05-01"): SPECIAL_OPEN_0, + pd.Timestamp("2024-05-01"): SPECIAL_OPEN_0, + } + ) + ) + .empty + ) # There should be a new calendar for the added special open time. assert c.special_opens[1][0] == time(12, 0) - assert c.special_opens[1][1].holidays(start=start, end=end, return_name=True).compare(pd.Series({ - pd.Timestamp("2023-07-03"): ADDED_SPECIAL_OPEN})).empty + assert ( + c.special_opens[1][1] + .holidays(start=start, end=end, return_name=True) + .compare(pd.Series({pd.Timestamp("2023-07-03"): ADDED_SPECIAL_OPEN})) + .empty + ) # Added special open should not be in ad-hoc special opens, i.e. this should be unmodified. assert c.special_opens_adhoc == [(time(11, 00), pd.Timestamp("2023-06-01"))] # Added special open should be in consolidated calendar. - assert c.special_opens_all.holidays(start=start, end=end, return_name=True).compare(pd.Series({ - pd.Timestamp("2022-05-02"): SPECIAL_OPEN_0, - pd.Timestamp("2023-05-01"): SPECIAL_OPEN_0, - pd.Timestamp("2023-06-01"): AD_HOC_SPECIAL_OPEN, - pd.Timestamp("2023-07-03"): ADDED_SPECIAL_OPEN, - pd.Timestamp("2024-05-01"): SPECIAL_OPEN_0})).empty + assert ( + c.special_opens_all.holidays(start=start, end=end, return_name=True) + .compare( + pd.Series( + { + pd.Timestamp("2022-05-02"): SPECIAL_OPEN_0, + pd.Timestamp("2023-05-01"): SPECIAL_OPEN_0, + pd.Timestamp("2023-06-01"): AD_HOC_SPECIAL_OPEN, + pd.Timestamp("2023-07-03"): ADDED_SPECIAL_OPEN, + pd.Timestamp("2024-05-01"): SPECIAL_OPEN_0, + } + ) + ) + .empty + ) @pytest.mark.isolated @@ -784,7 +1187,9 @@ def test_add_new_special_open_with_existing_time(): import exchange_calendars as ec import exchange_calendars_extensions.core as ecx - ecx.add_special_open("TEST", pd.Timestamp("2023-07-03"), time(11, 0), ADDED_SPECIAL_OPEN) + ecx.add_special_open( + "TEST", pd.Timestamp("2023-07-03"), time(11, 0), ADDED_SPECIAL_OPEN + ) c = ec.get_calendar("TEST") @@ -796,22 +1201,41 @@ def test_add_new_special_open_with_existing_time(): # Special opens for regular special open time should be unchanged. assert c.special_opens[0][0] == time(11, 0) - assert c.special_opens[0][1].holidays(start=start, end=end, return_name=True).compare(pd.Series({ - pd.Timestamp("2022-05-02"): SPECIAL_OPEN_0, - pd.Timestamp("2023-05-01"): SPECIAL_OPEN_0, - pd.Timestamp("2023-07-03"): ADDED_SPECIAL_OPEN, - pd.Timestamp("2024-05-01"): SPECIAL_OPEN_0})).empty + assert ( + c.special_opens[0][1] + .holidays(start=start, end=end, return_name=True) + .compare( + pd.Series( + { + pd.Timestamp("2022-05-02"): SPECIAL_OPEN_0, + pd.Timestamp("2023-05-01"): SPECIAL_OPEN_0, + pd.Timestamp("2023-07-03"): ADDED_SPECIAL_OPEN, + pd.Timestamp("2024-05-01"): SPECIAL_OPEN_0, + } + ) + ) + .empty + ) # Added special open should not be in ad-hoc special opens, i.e. this should be unmodified. assert c.special_opens_adhoc == [(time(11, 00), pd.Timestamp("2023-06-01"))] # Added special open should be in consolidated calendar. - assert c.special_opens_all.holidays(start=start, end=end, return_name=True).compare(pd.Series({ - pd.Timestamp("2022-05-02"): SPECIAL_OPEN_0, - pd.Timestamp("2023-05-01"): SPECIAL_OPEN_0, - pd.Timestamp("2023-06-01"): AD_HOC_SPECIAL_OPEN, - pd.Timestamp("2023-07-03"): ADDED_SPECIAL_OPEN, - pd.Timestamp("2024-05-01"): SPECIAL_OPEN_0})).empty + assert ( + c.special_opens_all.holidays(start=start, end=end, return_name=True) + .compare( + pd.Series( + { + pd.Timestamp("2022-05-02"): SPECIAL_OPEN_0, + pd.Timestamp("2023-05-01"): SPECIAL_OPEN_0, + pd.Timestamp("2023-06-01"): AD_HOC_SPECIAL_OPEN, + pd.Timestamp("2023-07-03"): ADDED_SPECIAL_OPEN, + pd.Timestamp("2024-05-01"): SPECIAL_OPEN_0, + } + ) + ) + .empty + ) @pytest.mark.isolated @@ -820,7 +1244,9 @@ def test_overwrite_existing_regular_special_open_with_new_time(): import exchange_calendars as ec import exchange_calendars_extensions.core as ecx - ecx.add_special_open("TEST", pd.Timestamp("2023-05-01"), time(12, 0), ADDED_SPECIAL_OPEN) + ecx.add_special_open( + "TEST", pd.Timestamp("2023-05-01"), time(12, 0), ADDED_SPECIAL_OPEN + ) c = ec.get_calendar("TEST") @@ -832,30 +1258,57 @@ def test_overwrite_existing_regular_special_open_with_new_time(): # Special opens for regular special open time should exclude the overwritten day. assert c.special_opens[0][0] == time(11, 0) - assert c.special_opens[0][1].holidays(start=start, end=end, return_name=True).compare(pd.Series({ - pd.Timestamp("2022-05-02"): SPECIAL_OPEN_0, - pd.Timestamp("2024-05-01"): SPECIAL_OPEN_0})).empty + assert ( + c.special_opens[0][1] + .holidays(start=start, end=end, return_name=True) + .compare( + pd.Series( + { + pd.Timestamp("2022-05-02"): SPECIAL_OPEN_0, + pd.Timestamp("2024-05-01"): SPECIAL_OPEN_0, + } + ) + ) + .empty + ) # There should be a new calendar for the added special open time. assert c.special_opens[1][0] == time(12, 0) - assert c.special_opens[1][1].holidays(start=start, end=end, return_name=True).compare(pd.Series({ - pd.Timestamp("2023-05-01"): ADDED_SPECIAL_OPEN})).empty + assert ( + c.special_opens[1][1] + .holidays(start=start, end=end, return_name=True) + .compare(pd.Series({pd.Timestamp("2023-05-01"): ADDED_SPECIAL_OPEN})) + .empty + ) # Added special open should not be in ad-hoc special opens, i.e. this should be unmodified. assert c.special_opens_adhoc == [(time(11, 00), pd.Timestamp("2023-06-01"))] # Added special open should be in consolidated calendar. - assert c.special_opens_all.holidays(start=start, end=end, return_name=True).compare(pd.Series({ - pd.Timestamp("2022-05-02"): SPECIAL_OPEN_0, - pd.Timestamp("2023-05-01"): ADDED_SPECIAL_OPEN, - pd.Timestamp("2023-06-01"): AD_HOC_SPECIAL_OPEN, - pd.Timestamp("2024-05-01"): SPECIAL_OPEN_0})).empty + assert ( + c.special_opens_all.holidays(start=start, end=end, return_name=True) + .compare( + pd.Series( + { + pd.Timestamp("2022-05-02"): SPECIAL_OPEN_0, + pd.Timestamp("2023-05-01"): ADDED_SPECIAL_OPEN, + pd.Timestamp("2023-06-01"): AD_HOC_SPECIAL_OPEN, + pd.Timestamp("2024-05-01"): SPECIAL_OPEN_0, + } + ) + ) + .empty + ) @pytest.mark.isolated def test_overwrite_existing_regular_special_open_with_existing_time(): add_test_calendar_and_apply_extensions( - special_opens=[(time(11, 00), [pd.Timestamp("2023-05-01")]), (time(12, 00), [pd.Timestamp("2023-05-04")])]) + special_opens=[ + (time(11, 00), [pd.Timestamp("2023-05-01")]), + (time(12, 00), [pd.Timestamp("2023-05-04")]), + ] + ) import exchange_calendars as ec import exchange_calendars_extensions.core as ecx @@ -869,31 +1322,62 @@ def test_overwrite_existing_regular_special_open_with_existing_time(): # Special opens for regular special open time should exclude the overwritten day. assert c.special_opens[0][0] == time(11, 0) - assert c.special_opens[0][1].holidays(start=start, end=end, return_name=True).compare(pd.Series({ - pd.Timestamp("2022-05-02"): SPECIAL_OPEN_0, - pd.Timestamp("2023-05-01"): SPECIAL_OPEN_0, - pd.Timestamp("2024-05-01"): SPECIAL_OPEN_0})).empty + assert ( + c.special_opens[0][1] + .holidays(start=start, end=end, return_name=True) + .compare( + pd.Series( + { + pd.Timestamp("2022-05-02"): SPECIAL_OPEN_0, + pd.Timestamp("2023-05-01"): SPECIAL_OPEN_0, + pd.Timestamp("2024-05-01"): SPECIAL_OPEN_0, + } + ) + ) + .empty + ) assert c.special_opens[1][0] == time(12, 0) - assert c.special_opens[1][1].holidays(start=start, end=end, return_name=True).compare(pd.Series({ - pd.Timestamp("2022-05-04"): SPECIAL_OPEN_0, - pd.Timestamp("2023-05-04"): SPECIAL_OPEN_0, - pd.Timestamp("2024-05-06"): SPECIAL_OPEN_0})).empty + assert ( + c.special_opens[1][1] + .holidays(start=start, end=end, return_name=True) + .compare( + pd.Series( + { + pd.Timestamp("2022-05-04"): SPECIAL_OPEN_0, + pd.Timestamp("2023-05-04"): SPECIAL_OPEN_0, + pd.Timestamp("2024-05-06"): SPECIAL_OPEN_0, + } + ) + ) + .empty + ) # Added special open should not be in ad-hoc special opens, i.e. this should be unmodified. assert c.special_opens_adhoc == [(time(11, 00), pd.Timestamp("2023-06-01"))] # Added special open should be in consolidated calendar. - assert c.special_opens_all.holidays(start=start, end=end, return_name=True).compare(pd.Series({ - pd.Timestamp("2022-05-02"): SPECIAL_OPEN_0, - pd.Timestamp("2022-05-04"): SPECIAL_OPEN_0, - pd.Timestamp("2023-05-01"): SPECIAL_OPEN_0, - pd.Timestamp("2023-05-04"): SPECIAL_OPEN_0, - pd.Timestamp("2023-06-01"): AD_HOC_SPECIAL_OPEN, - pd.Timestamp("2024-05-01"): SPECIAL_OPEN_0, - pd.Timestamp("2024-05-06"): SPECIAL_OPEN_0})).empty - - ecx.add_special_open("TEST", pd.Timestamp("2023-05-01"), time(12, 0), ADDED_SPECIAL_OPEN) + assert ( + c.special_opens_all.holidays(start=start, end=end, return_name=True) + .compare( + pd.Series( + { + pd.Timestamp("2022-05-02"): SPECIAL_OPEN_0, + pd.Timestamp("2022-05-04"): SPECIAL_OPEN_0, + pd.Timestamp("2023-05-01"): SPECIAL_OPEN_0, + pd.Timestamp("2023-05-04"): SPECIAL_OPEN_0, + pd.Timestamp("2023-06-01"): AD_HOC_SPECIAL_OPEN, + pd.Timestamp("2024-05-01"): SPECIAL_OPEN_0, + pd.Timestamp("2024-05-06"): SPECIAL_OPEN_0, + } + ) + ) + .empty + ) + + ecx.add_special_open( + "TEST", pd.Timestamp("2023-05-01"), time(12, 0), ADDED_SPECIAL_OPEN + ) c = ec.get_calendar("TEST") @@ -902,29 +1386,58 @@ def test_overwrite_existing_regular_special_open_with_existing_time(): # Special opens for regular special open time should exclude the overwritten day. assert c.special_opens[0][0] == time(11, 0) - assert c.special_opens[0][1].holidays(start=start, end=end, return_name=True).compare(pd.Series({ - pd.Timestamp("2022-05-02"): SPECIAL_OPEN_0, - pd.Timestamp("2024-05-01"): SPECIAL_OPEN_0})).empty + assert ( + c.special_opens[0][1] + .holidays(start=start, end=end, return_name=True) + .compare( + pd.Series( + { + pd.Timestamp("2022-05-02"): SPECIAL_OPEN_0, + pd.Timestamp("2024-05-01"): SPECIAL_OPEN_0, + } + ) + ) + .empty + ) assert c.special_opens[1][0] == time(12, 0) - assert c.special_opens[1][1].holidays(start=start, end=end, return_name=True).compare(pd.Series({ - pd.Timestamp("2022-05-04"): SPECIAL_OPEN_0, - pd.Timestamp("2023-05-01"): ADDED_SPECIAL_OPEN, - pd.Timestamp("2023-05-04"): SPECIAL_OPEN_0, - pd.Timestamp("2024-05-06"): SPECIAL_OPEN_0})).empty + assert ( + c.special_opens[1][1] + .holidays(start=start, end=end, return_name=True) + .compare( + pd.Series( + { + pd.Timestamp("2022-05-04"): SPECIAL_OPEN_0, + pd.Timestamp("2023-05-01"): ADDED_SPECIAL_OPEN, + pd.Timestamp("2023-05-04"): SPECIAL_OPEN_0, + pd.Timestamp("2024-05-06"): SPECIAL_OPEN_0, + } + ) + ) + .empty + ) # Added special open should not be in ad-hoc special opens, i.e. this should be unmodified. assert c.special_opens_adhoc == [(time(11, 00), pd.Timestamp("2023-06-01"))] # Added special open should be in consolidated calendar. - assert c.special_opens_all.holidays(start=start, end=end, return_name=True).compare(pd.Series({ - pd.Timestamp("2022-05-02"): SPECIAL_OPEN_0, - pd.Timestamp("2022-05-04"): SPECIAL_OPEN_0, - pd.Timestamp("2023-05-01"): ADDED_SPECIAL_OPEN, - pd.Timestamp("2023-05-04"): SPECIAL_OPEN_0, - pd.Timestamp("2023-06-01"): AD_HOC_SPECIAL_OPEN, - pd.Timestamp("2024-05-01"): SPECIAL_OPEN_0, - pd.Timestamp("2024-05-06"): SPECIAL_OPEN_0})).empty + assert ( + c.special_opens_all.holidays(start=start, end=end, return_name=True) + .compare( + pd.Series( + { + pd.Timestamp("2022-05-02"): SPECIAL_OPEN_0, + pd.Timestamp("2022-05-04"): SPECIAL_OPEN_0, + pd.Timestamp("2023-05-01"): ADDED_SPECIAL_OPEN, + pd.Timestamp("2023-05-04"): SPECIAL_OPEN_0, + pd.Timestamp("2023-06-01"): AD_HOC_SPECIAL_OPEN, + pd.Timestamp("2024-05-01"): SPECIAL_OPEN_0, + pd.Timestamp("2024-05-06"): SPECIAL_OPEN_0, + } + ) + ) + .empty + ) @pytest.mark.isolated @@ -933,7 +1446,9 @@ def test_overwrite_existing_ad_hoc_special_open_with_new_time(): import exchange_calendars as ec import exchange_calendars_extensions.core as ecx - ecx.add_special_open("TEST", pd.Timestamp("2023-06-01"), time(12, 0), ADDED_SPECIAL_OPEN) + ecx.add_special_open( + "TEST", pd.Timestamp("2023-06-01"), time(12, 0), ADDED_SPECIAL_OPEN + ) c = ec.get_calendar("TEST") @@ -945,24 +1460,47 @@ def test_overwrite_existing_ad_hoc_special_open_with_new_time(): # Special opens for regular special open time should exclude the overwritten day. assert c.special_opens[0][0] == time(11, 0) - assert c.special_opens[0][1].holidays(start=start, end=end, return_name=True).compare(pd.Series({ - pd.Timestamp("2022-05-02"): SPECIAL_OPEN_0, - pd.Timestamp("2023-05-01"): SPECIAL_OPEN_0, - pd.Timestamp("2024-05-01"): SPECIAL_OPEN_0})).empty + assert ( + c.special_opens[0][1] + .holidays(start=start, end=end, return_name=True) + .compare( + pd.Series( + { + pd.Timestamp("2022-05-02"): SPECIAL_OPEN_0, + pd.Timestamp("2023-05-01"): SPECIAL_OPEN_0, + pd.Timestamp("2024-05-01"): SPECIAL_OPEN_0, + } + ) + ) + .empty + ) assert c.special_opens[1][0] == time(12, 0) - assert c.special_opens[1][1].holidays(start=start, end=end, return_name=True).compare(pd.Series({ - pd.Timestamp("2023-06-01"): ADDED_SPECIAL_OPEN})).empty + assert ( + c.special_opens[1][1] + .holidays(start=start, end=end, return_name=True) + .compare(pd.Series({pd.Timestamp("2023-06-01"): ADDED_SPECIAL_OPEN})) + .empty + ) # Ad-hoc special opens should now be empty. assert c.special_opens_adhoc == [] # Added special open should be in consolidated calendar. - assert c.special_opens_all.holidays(start=start, end=end, return_name=True).compare(pd.Series({ - pd.Timestamp("2022-05-02"): SPECIAL_OPEN_0, - pd.Timestamp("2023-05-01"): SPECIAL_OPEN_0, - pd.Timestamp("2023-06-01"): ADDED_SPECIAL_OPEN, - pd.Timestamp("2024-05-01"): SPECIAL_OPEN_0})).empty + assert ( + c.special_opens_all.holidays(start=start, end=end, return_name=True) + .compare( + pd.Series( + { + pd.Timestamp("2022-05-02"): SPECIAL_OPEN_0, + pd.Timestamp("2023-05-01"): SPECIAL_OPEN_0, + pd.Timestamp("2023-06-01"): ADDED_SPECIAL_OPEN, + pd.Timestamp("2024-05-01"): SPECIAL_OPEN_0, + } + ) + ) + .empty + ) @pytest.mark.isolated @@ -971,7 +1509,9 @@ def test_overwrite_existing_ad_hoc_special_open_with_existing_time(): import exchange_calendars as ec import exchange_calendars_extensions.core as ecx - ecx.add_special_open("TEST", pd.Timestamp("2023-06-01"), time(11, 0), ADDED_SPECIAL_OPEN) + ecx.add_special_open( + "TEST", pd.Timestamp("2023-06-01"), time(11, 0), ADDED_SPECIAL_OPEN + ) c = ec.get_calendar("TEST") @@ -983,27 +1523,50 @@ def test_overwrite_existing_ad_hoc_special_open_with_existing_time(): # Special opens for regular special open time should exclude the overwritten day. assert c.special_opens[0][0] == time(11, 0) - assert c.special_opens[0][1].holidays(start=start, end=end, return_name=True).compare(pd.Series({ - pd.Timestamp("2022-05-02"): SPECIAL_OPEN_0, - pd.Timestamp("2023-05-01"): SPECIAL_OPEN_0, - pd.Timestamp("2023-06-01"): ADDED_SPECIAL_OPEN, - pd.Timestamp("2024-05-01"): SPECIAL_OPEN_0})).empty + assert ( + c.special_opens[0][1] + .holidays(start=start, end=end, return_name=True) + .compare( + pd.Series( + { + pd.Timestamp("2022-05-02"): SPECIAL_OPEN_0, + pd.Timestamp("2023-05-01"): SPECIAL_OPEN_0, + pd.Timestamp("2023-06-01"): ADDED_SPECIAL_OPEN, + pd.Timestamp("2024-05-01"): SPECIAL_OPEN_0, + } + ) + ) + .empty + ) # Ad-hoc special opens should now be empty. assert c.special_opens_adhoc == [] # Added special open should be in consolidated calendar. - assert c.special_opens_all.holidays(start=start, end=end, return_name=True).compare(pd.Series({ - pd.Timestamp("2022-05-02"): SPECIAL_OPEN_0, - pd.Timestamp("2023-05-01"): SPECIAL_OPEN_0, - pd.Timestamp("2023-06-01"): ADDED_SPECIAL_OPEN, - pd.Timestamp("2024-05-01"): SPECIAL_OPEN_0})).empty + assert ( + c.special_opens_all.holidays(start=start, end=end, return_name=True) + .compare( + pd.Series( + { + pd.Timestamp("2022-05-02"): SPECIAL_OPEN_0, + pd.Timestamp("2023-05-01"): SPECIAL_OPEN_0, + pd.Timestamp("2023-06-01"): ADDED_SPECIAL_OPEN, + pd.Timestamp("2024-05-01"): SPECIAL_OPEN_0, + } + ) + ) + .empty + ) @pytest.mark.isolated def test_remove_existing_regular_special_open(): add_test_calendar_and_apply_extensions( - special_opens=[(time(11, 00), [pd.Timestamp("2023-05-01")]), (time(12, 00), [pd.Timestamp("2023-05-04")])]) + special_opens=[ + (time(11, 00), [pd.Timestamp("2023-05-01")]), + (time(12, 00), [pd.Timestamp("2023-05-04")]), + ] + ) import exchange_calendars as ec import exchange_calendars_extensions.core as ecx @@ -1019,27 +1582,56 @@ def test_remove_existing_regular_special_open(): # Special opens for regular special open time should exclude the overwritten day. assert c.special_opens[0][0] == time(11, 0) - assert c.special_opens[0][1].holidays(start=start, end=end, return_name=True).compare(pd.Series({ - pd.Timestamp("2022-05-02"): SPECIAL_OPEN_0, - pd.Timestamp("2024-05-01"): SPECIAL_OPEN_0})).empty + assert ( + c.special_opens[0][1] + .holidays(start=start, end=end, return_name=True) + .compare( + pd.Series( + { + pd.Timestamp("2022-05-02"): SPECIAL_OPEN_0, + pd.Timestamp("2024-05-01"): SPECIAL_OPEN_0, + } + ) + ) + .empty + ) assert c.special_opens[1][0] == time(12, 0) - assert c.special_opens[1][1].holidays(start=start, end=end, return_name=True).compare(pd.Series({ - pd.Timestamp("2022-05-04"): SPECIAL_OPEN_0, - pd.Timestamp("2023-05-04"): SPECIAL_OPEN_0, - pd.Timestamp("2024-05-06"): SPECIAL_OPEN_0})).empty + assert ( + c.special_opens[1][1] + .holidays(start=start, end=end, return_name=True) + .compare( + pd.Series( + { + pd.Timestamp("2022-05-04"): SPECIAL_OPEN_0, + pd.Timestamp("2023-05-04"): SPECIAL_OPEN_0, + pd.Timestamp("2024-05-06"): SPECIAL_OPEN_0, + } + ) + ) + .empty + ) # Ad-hoc special opens should now be empty. assert c.special_opens_adhoc == [(time(11, 00), pd.Timestamp("2023-06-01"))] # Added special open should be in consolidated calendar. - assert c.special_opens_all.holidays(start=start, end=end, return_name=True).compare(pd.Series({ - pd.Timestamp("2022-05-02"): SPECIAL_OPEN_0, - pd.Timestamp("2022-05-04"): SPECIAL_OPEN_0, - pd.Timestamp("2023-05-04"): SPECIAL_OPEN_0, - pd.Timestamp("2023-06-01"): AD_HOC_SPECIAL_OPEN, - pd.Timestamp("2024-05-01"): SPECIAL_OPEN_0, - pd.Timestamp("2024-05-06"): SPECIAL_OPEN_0})).empty + assert ( + c.special_opens_all.holidays(start=start, end=end, return_name=True) + .compare( + pd.Series( + { + pd.Timestamp("2022-05-02"): SPECIAL_OPEN_0, + pd.Timestamp("2022-05-04"): SPECIAL_OPEN_0, + pd.Timestamp("2023-05-04"): SPECIAL_OPEN_0, + pd.Timestamp("2023-06-01"): AD_HOC_SPECIAL_OPEN, + pd.Timestamp("2024-05-01"): SPECIAL_OPEN_0, + pd.Timestamp("2024-05-06"): SPECIAL_OPEN_0, + } + ) + ) + .empty + ) @pytest.mark.isolated @@ -1060,19 +1652,38 @@ def test_remove_existing_ad_hoc_special_open(): # Special opens for regular special open time should exclude the overwritten day. assert c.special_opens[0][0] == time(11, 0) - assert c.special_opens[0][1].holidays(start=start, end=end, return_name=True).compare(pd.Series({ - pd.Timestamp("2022-05-02"): SPECIAL_OPEN_0, - pd.Timestamp("2023-05-01"): SPECIAL_OPEN_0, - pd.Timestamp("2024-05-01"): SPECIAL_OPEN_0})).empty + assert ( + c.special_opens[0][1] + .holidays(start=start, end=end, return_name=True) + .compare( + pd.Series( + { + pd.Timestamp("2022-05-02"): SPECIAL_OPEN_0, + pd.Timestamp("2023-05-01"): SPECIAL_OPEN_0, + pd.Timestamp("2024-05-01"): SPECIAL_OPEN_0, + } + ) + ) + .empty + ) # Ad-hoc special opens should now be empty. assert c.special_opens_adhoc == [] # Added special open should be in consolidated calendar. - assert c.special_opens_all.holidays(start=start, end=end, return_name=True).compare(pd.Series({ - pd.Timestamp("2022-05-02"): SPECIAL_OPEN_0, - pd.Timestamp("2023-05-01"): SPECIAL_OPEN_0, - pd.Timestamp("2024-05-01"): SPECIAL_OPEN_0})).empty + assert ( + c.special_opens_all.holidays(start=start, end=end, return_name=True) + .compare( + pd.Series( + { + pd.Timestamp("2022-05-02"): SPECIAL_OPEN_0, + pd.Timestamp("2023-05-01"): SPECIAL_OPEN_0, + pd.Timestamp("2024-05-01"): SPECIAL_OPEN_0, + } + ) + ) + .empty + ) @pytest.mark.isolated @@ -1093,20 +1704,39 @@ def test_remove_non_existent_special_open(): # Special opens for regular special open time should exclude the overwritten day. assert c.special_opens[0][0] == time(11, 0) - assert c.special_opens[0][1].holidays(start=start, end=end, return_name=True).compare(pd.Series({ - pd.Timestamp("2022-05-02"): SPECIAL_OPEN_0, - pd.Timestamp("2023-05-01"): SPECIAL_OPEN_0, - pd.Timestamp("2024-05-01"): SPECIAL_OPEN_0})).empty + assert ( + c.special_opens[0][1] + .holidays(start=start, end=end, return_name=True) + .compare( + pd.Series( + { + pd.Timestamp("2022-05-02"): SPECIAL_OPEN_0, + pd.Timestamp("2023-05-01"): SPECIAL_OPEN_0, + pd.Timestamp("2024-05-01"): SPECIAL_OPEN_0, + } + ) + ) + .empty + ) # Ad-hoc special opens should now be empty. assert c.special_opens_adhoc == [(time(11, 00), pd.Timestamp("2023-06-01"))] # Added special open should be in consolidated calendar. - assert c.special_opens_all.holidays(start=start, end=end, return_name=True).compare(pd.Series({ - pd.Timestamp("2022-05-02"): SPECIAL_OPEN_0, - pd.Timestamp("2023-05-01"): SPECIAL_OPEN_0, - pd.Timestamp("2023-06-01"): AD_HOC_SPECIAL_OPEN, - pd.Timestamp("2024-05-01"): SPECIAL_OPEN_0})).empty + assert ( + c.special_opens_all.holidays(start=start, end=end, return_name=True) + .compare( + pd.Series( + { + pd.Timestamp("2022-05-02"): SPECIAL_OPEN_0, + pd.Timestamp("2023-05-01"): SPECIAL_OPEN_0, + pd.Timestamp("2023-06-01"): AD_HOC_SPECIAL_OPEN, + pd.Timestamp("2024-05-01"): SPECIAL_OPEN_0, + } + ) + ) + .empty + ) @pytest.mark.isolated @@ -1115,7 +1745,9 @@ def test_add_new_special_close_with_new_time(): import exchange_calendars as ec import exchange_calendars_extensions.core as ecx - ecx.add_special_close("TEST", pd.Timestamp("2023-07-03"), time(15, 0), ADDED_SPECIAL_CLOSE) + ecx.add_special_close( + "TEST", pd.Timestamp("2023-07-03"), time(15, 0), ADDED_SPECIAL_CLOSE + ) c = ec.get_calendar("TEST") @@ -1127,26 +1759,49 @@ def test_add_new_special_close_with_new_time(): # Special closes for regular special close time should be unchanged. assert c.special_closes[0][0] == time(14, 0) - assert c.special_closes[0][1].holidays(start=start, end=end, return_name=True).compare(pd.Series({ - pd.Timestamp("2022-03-01"): SPECIAL_CLOSE_0, - pd.Timestamp("2023-03-01"): SPECIAL_CLOSE_0, - pd.Timestamp("2024-03-01"): SPECIAL_CLOSE_0})).empty + assert ( + c.special_closes[0][1] + .holidays(start=start, end=end, return_name=True) + .compare( + pd.Series( + { + pd.Timestamp("2022-03-01"): SPECIAL_CLOSE_0, + pd.Timestamp("2023-03-01"): SPECIAL_CLOSE_0, + pd.Timestamp("2024-03-01"): SPECIAL_CLOSE_0, + } + ) + ) + .empty + ) # There should be a new calendar for the added special close time. assert c.special_closes[1][0] == time(15, 0) - assert c.special_closes[1][1].holidays(start=start, end=end, return_name=True).compare(pd.Series({ - pd.Timestamp("2023-07-03"): ADDED_SPECIAL_CLOSE})).empty + assert ( + c.special_closes[1][1] + .holidays(start=start, end=end, return_name=True) + .compare(pd.Series({pd.Timestamp("2023-07-03"): ADDED_SPECIAL_CLOSE})) + .empty + ) # Added special close should not be in ad-hoc special closes, i.e. this should be unmodified. assert c.special_closes_adhoc == [(time(14, 00), pd.Timestamp("2023-04-03"))] # Added special close should be in consolidated calendar. - assert c.special_closes_all.holidays(start=start, end=end, return_name=True).compare(pd.Series({ - pd.Timestamp("2022-03-01"): SPECIAL_CLOSE_0, - pd.Timestamp("2023-03-01"): SPECIAL_CLOSE_0, - pd.Timestamp("2023-04-03"): AD_HOC_SPECIAL_CLOSE, - pd.Timestamp("2023-07-03"): ADDED_SPECIAL_CLOSE, - pd.Timestamp("2024-03-01"): SPECIAL_CLOSE_0})).empty + assert ( + c.special_closes_all.holidays(start=start, end=end, return_name=True) + .compare( + pd.Series( + { + pd.Timestamp("2022-03-01"): SPECIAL_CLOSE_0, + pd.Timestamp("2023-03-01"): SPECIAL_CLOSE_0, + pd.Timestamp("2023-04-03"): AD_HOC_SPECIAL_CLOSE, + pd.Timestamp("2023-07-03"): ADDED_SPECIAL_CLOSE, + pd.Timestamp("2024-03-01"): SPECIAL_CLOSE_0, + } + ) + ) + .empty + ) @pytest.mark.isolated @@ -1155,7 +1810,9 @@ def test_add_new_special_close_with_existing_time(): import exchange_calendars as ec import exchange_calendars_extensions.core as ecx - ecx.add_special_close("TEST", pd.Timestamp("2023-07-03"), time(14, 0), ADDED_SPECIAL_CLOSE) + ecx.add_special_close( + "TEST", pd.Timestamp("2023-07-03"), time(14, 0), ADDED_SPECIAL_CLOSE + ) c = ec.get_calendar("TEST") @@ -1167,22 +1824,41 @@ def test_add_new_special_close_with_existing_time(): # Special Closes for regular special close time should be unchanged. assert c.special_closes[0][0] == time(14, 0) - assert c.special_closes[0][1].holidays(start=start, end=end, return_name=True).compare(pd.Series({ - pd.Timestamp("2022-03-01"): SPECIAL_CLOSE_0, - pd.Timestamp("2023-03-01"): SPECIAL_CLOSE_0, - pd.Timestamp("2023-07-03"): ADDED_SPECIAL_CLOSE, - pd.Timestamp("2024-03-01"): SPECIAL_CLOSE_0})).empty + assert ( + c.special_closes[0][1] + .holidays(start=start, end=end, return_name=True) + .compare( + pd.Series( + { + pd.Timestamp("2022-03-01"): SPECIAL_CLOSE_0, + pd.Timestamp("2023-03-01"): SPECIAL_CLOSE_0, + pd.Timestamp("2023-07-03"): ADDED_SPECIAL_CLOSE, + pd.Timestamp("2024-03-01"): SPECIAL_CLOSE_0, + } + ) + ) + .empty + ) # Added special close should not be in ad-hoc special closes, i.e. this should be unmodified. assert c.special_closes_adhoc == [(time(14, 0), pd.Timestamp("2023-04-03"))] # Added special close should be in consolidated calendar. - assert c.special_closes_all.holidays(start=start, end=end, return_name=True).compare(pd.Series({ - pd.Timestamp("2022-03-01"): SPECIAL_CLOSE_0, - pd.Timestamp("2023-03-01"): SPECIAL_CLOSE_0, - pd.Timestamp("2023-04-03"): AD_HOC_SPECIAL_CLOSE, - pd.Timestamp("2023-07-03"): ADDED_SPECIAL_CLOSE, - pd.Timestamp("2024-03-01"): SPECIAL_CLOSE_0})).empty + assert ( + c.special_closes_all.holidays(start=start, end=end, return_name=True) + .compare( + pd.Series( + { + pd.Timestamp("2022-03-01"): SPECIAL_CLOSE_0, + pd.Timestamp("2023-03-01"): SPECIAL_CLOSE_0, + pd.Timestamp("2023-04-03"): AD_HOC_SPECIAL_CLOSE, + pd.Timestamp("2023-07-03"): ADDED_SPECIAL_CLOSE, + pd.Timestamp("2024-03-01"): SPECIAL_CLOSE_0, + } + ) + ) + .empty + ) @pytest.mark.isolated @@ -1191,7 +1867,9 @@ def test_overwrite_existing_regular_special_close_with_new_time(): import exchange_calendars as ec import exchange_calendars_extensions.core as ecx - ecx.add_special_close("TEST", pd.Timestamp("2023-03-01"), time(15, 0), ADDED_SPECIAL_CLOSE) + ecx.add_special_close( + "TEST", pd.Timestamp("2023-03-01"), time(15, 0), ADDED_SPECIAL_CLOSE + ) c = ec.get_calendar("TEST") @@ -1203,30 +1881,57 @@ def test_overwrite_existing_regular_special_close_with_new_time(): # Special Closes for regular special close time should exclude the overwritten day. assert c.special_closes[0][0] == time(14, 0) - assert c.special_closes[0][1].holidays(start=start, end=end, return_name=True).compare(pd.Series({ - pd.Timestamp("2022-03-01"): SPECIAL_CLOSE_0, - pd.Timestamp("2024-03-01"): SPECIAL_CLOSE_0})).empty + assert ( + c.special_closes[0][1] + .holidays(start=start, end=end, return_name=True) + .compare( + pd.Series( + { + pd.Timestamp("2022-03-01"): SPECIAL_CLOSE_0, + pd.Timestamp("2024-03-01"): SPECIAL_CLOSE_0, + } + ) + ) + .empty + ) # There should be a new calendar for the added special close time. assert c.special_closes[1][0] == time(15, 0) - assert c.special_closes[1][1].holidays(start=start, end=end, return_name=True).compare(pd.Series({ - pd.Timestamp("2023-03-01"): ADDED_SPECIAL_CLOSE})).empty + assert ( + c.special_closes[1][1] + .holidays(start=start, end=end, return_name=True) + .compare(pd.Series({pd.Timestamp("2023-03-01"): ADDED_SPECIAL_CLOSE})) + .empty + ) # Added special close should not be in ad-hoc special closes, i.e. this should be unmodified. assert c.special_closes_adhoc == [(time(14, 00), pd.Timestamp("2023-04-03"))] # Added special close should be in consolidated calendar. - assert c.special_closes_all.holidays(start=start, end=end, return_name=True).compare(pd.Series({ - pd.Timestamp("2022-03-01"): SPECIAL_CLOSE_0, - pd.Timestamp("2023-03-01"): ADDED_SPECIAL_CLOSE, - pd.Timestamp("2023-04-03"): AD_HOC_SPECIAL_CLOSE, - pd.Timestamp("2024-03-01"): SPECIAL_CLOSE_0})).empty + assert ( + c.special_closes_all.holidays(start=start, end=end, return_name=True) + .compare( + pd.Series( + { + pd.Timestamp("2022-03-01"): SPECIAL_CLOSE_0, + pd.Timestamp("2023-03-01"): ADDED_SPECIAL_CLOSE, + pd.Timestamp("2023-04-03"): AD_HOC_SPECIAL_CLOSE, + pd.Timestamp("2024-03-01"): SPECIAL_CLOSE_0, + } + ) + ) + .empty + ) @pytest.mark.isolated def test_overwrite_existing_regular_special_close_with_existing_time(): add_test_calendar_and_apply_extensions( - special_closes=[(time(14, 00), [pd.Timestamp("2023-03-01")]), (time(15, 00), [pd.Timestamp("2023-03-04")])]) + special_closes=[ + (time(14, 00), [pd.Timestamp("2023-03-01")]), + (time(15, 00), [pd.Timestamp("2023-03-04")]), + ] + ) import exchange_calendars as ec import exchange_calendars_extensions.core as ecx @@ -1240,31 +1945,62 @@ def test_overwrite_existing_regular_special_close_with_existing_time(): # Special Closes for regular special close time should exclude the overwritten day. assert c.special_closes[0][0] == time(14, 0) - assert c.special_closes[0][1].holidays(start=start, end=end, return_name=True).compare(pd.Series({ - pd.Timestamp("2022-03-01"): SPECIAL_CLOSE_0, - pd.Timestamp("2023-03-01"): SPECIAL_CLOSE_0, - pd.Timestamp("2024-03-01"): SPECIAL_CLOSE_0})).empty + assert ( + c.special_closes[0][1] + .holidays(start=start, end=end, return_name=True) + .compare( + pd.Series( + { + pd.Timestamp("2022-03-01"): SPECIAL_CLOSE_0, + pd.Timestamp("2023-03-01"): SPECIAL_CLOSE_0, + pd.Timestamp("2024-03-01"): SPECIAL_CLOSE_0, + } + ) + ) + .empty + ) assert c.special_closes[1][0] == time(15, 0) - assert c.special_closes[1][1].holidays(start=start, end=end, return_name=True).compare(pd.Series({ - pd.Timestamp("2022-03-04"): SPECIAL_CLOSE_0, - pd.Timestamp("2023-03-06"): SPECIAL_CLOSE_0, - pd.Timestamp("2024-03-04"): SPECIAL_CLOSE_0})).empty + assert ( + c.special_closes[1][1] + .holidays(start=start, end=end, return_name=True) + .compare( + pd.Series( + { + pd.Timestamp("2022-03-04"): SPECIAL_CLOSE_0, + pd.Timestamp("2023-03-06"): SPECIAL_CLOSE_0, + pd.Timestamp("2024-03-04"): SPECIAL_CLOSE_0, + } + ) + ) + .empty + ) # Added special close should not be in ad-hoc special closes, i.e. this should be unmodified. assert c.special_closes_adhoc == [(time(14, 00), pd.Timestamp("2023-04-03"))] # Added special close should be in consolidated calendar. - assert c.special_closes_all.holidays(start=start, end=end, return_name=True).compare(pd.Series({ - pd.Timestamp("2022-03-01"): SPECIAL_CLOSE_0, - pd.Timestamp("2022-03-04"): SPECIAL_CLOSE_0, - pd.Timestamp("2023-03-01"): SPECIAL_CLOSE_0, - pd.Timestamp("2023-03-06"): SPECIAL_CLOSE_0, - pd.Timestamp("2023-04-03"): AD_HOC_SPECIAL_CLOSE, - pd.Timestamp("2024-03-01"): SPECIAL_CLOSE_0, - pd.Timestamp("2024-03-04"): SPECIAL_CLOSE_0})).empty - - ecx.add_special_close("TEST", pd.Timestamp("2023-03-01"), time(15, 0), ADDED_SPECIAL_CLOSE) + assert ( + c.special_closes_all.holidays(start=start, end=end, return_name=True) + .compare( + pd.Series( + { + pd.Timestamp("2022-03-01"): SPECIAL_CLOSE_0, + pd.Timestamp("2022-03-04"): SPECIAL_CLOSE_0, + pd.Timestamp("2023-03-01"): SPECIAL_CLOSE_0, + pd.Timestamp("2023-03-06"): SPECIAL_CLOSE_0, + pd.Timestamp("2023-04-03"): AD_HOC_SPECIAL_CLOSE, + pd.Timestamp("2024-03-01"): SPECIAL_CLOSE_0, + pd.Timestamp("2024-03-04"): SPECIAL_CLOSE_0, + } + ) + ) + .empty + ) + + ecx.add_special_close( + "TEST", pd.Timestamp("2023-03-01"), time(15, 0), ADDED_SPECIAL_CLOSE + ) c = ec.get_calendar("TEST") @@ -1273,29 +2009,58 @@ def test_overwrite_existing_regular_special_close_with_existing_time(): # Special Closes for regular special close time should exclude the overwritten day. assert c.special_closes[0][0] == time(14, 0) - assert c.special_closes[0][1].holidays(start=start, end=end, return_name=True).compare(pd.Series({ - pd.Timestamp("2022-03-01"): SPECIAL_CLOSE_0, - pd.Timestamp("2024-03-01"): SPECIAL_CLOSE_0})).empty + assert ( + c.special_closes[0][1] + .holidays(start=start, end=end, return_name=True) + .compare( + pd.Series( + { + pd.Timestamp("2022-03-01"): SPECIAL_CLOSE_0, + pd.Timestamp("2024-03-01"): SPECIAL_CLOSE_0, + } + ) + ) + .empty + ) assert c.special_closes[1][0] == time(15, 0) - assert c.special_closes[1][1].holidays(start=start, end=end, return_name=True).compare(pd.Series({ - pd.Timestamp("2022-03-04"): SPECIAL_CLOSE_0, - pd.Timestamp("2023-03-01"): ADDED_SPECIAL_CLOSE, - pd.Timestamp("2023-03-06"): SPECIAL_CLOSE_0, - pd.Timestamp("2024-03-04"): SPECIAL_CLOSE_0})).empty + assert ( + c.special_closes[1][1] + .holidays(start=start, end=end, return_name=True) + .compare( + pd.Series( + { + pd.Timestamp("2022-03-04"): SPECIAL_CLOSE_0, + pd.Timestamp("2023-03-01"): ADDED_SPECIAL_CLOSE, + pd.Timestamp("2023-03-06"): SPECIAL_CLOSE_0, + pd.Timestamp("2024-03-04"): SPECIAL_CLOSE_0, + } + ) + ) + .empty + ) # Added special close should not be in ad-hoc special closes, i.e. this should be unmodified. assert c.special_closes_adhoc == [(time(14, 00), pd.Timestamp("2023-04-03"))] # Added special close should be in consolidated calendar. - assert c.special_closes_all.holidays(start=start, end=end, return_name=True).compare(pd.Series({ - pd.Timestamp("2022-03-01"): SPECIAL_CLOSE_0, - pd.Timestamp("2022-03-04"): SPECIAL_CLOSE_0, - pd.Timestamp("2023-03-01"): ADDED_SPECIAL_CLOSE, - pd.Timestamp("2023-03-06"): SPECIAL_CLOSE_0, - pd.Timestamp("2023-04-03"): AD_HOC_SPECIAL_CLOSE, - pd.Timestamp("2024-03-01"): SPECIAL_CLOSE_0, - pd.Timestamp("2024-03-04"): SPECIAL_CLOSE_0})).empty + assert ( + c.special_closes_all.holidays(start=start, end=end, return_name=True) + .compare( + pd.Series( + { + pd.Timestamp("2022-03-01"): SPECIAL_CLOSE_0, + pd.Timestamp("2022-03-04"): SPECIAL_CLOSE_0, + pd.Timestamp("2023-03-01"): ADDED_SPECIAL_CLOSE, + pd.Timestamp("2023-03-06"): SPECIAL_CLOSE_0, + pd.Timestamp("2023-04-03"): AD_HOC_SPECIAL_CLOSE, + pd.Timestamp("2024-03-01"): SPECIAL_CLOSE_0, + pd.Timestamp("2024-03-04"): SPECIAL_CLOSE_0, + } + ) + ) + .empty + ) @pytest.mark.isolated @@ -1304,7 +2069,9 @@ def test_overwrite_existing_ad_hoc_special_close_with_new_time(): import exchange_calendars as ec import exchange_calendars_extensions.core as ecx - ecx.add_special_close("TEST", pd.Timestamp("2023-04-03"), time(15, 0), ADDED_SPECIAL_CLOSE) + ecx.add_special_close( + "TEST", pd.Timestamp("2023-04-03"), time(15, 0), ADDED_SPECIAL_CLOSE + ) c = ec.get_calendar("TEST") @@ -1316,24 +2083,47 @@ def test_overwrite_existing_ad_hoc_special_close_with_new_time(): # Special Closes for regular special close time should exclude the overwritten day. assert c.special_closes[0][0] == time(14, 0) - assert c.special_closes[0][1].holidays(start=start, end=end, return_name=True).compare(pd.Series({ - pd.Timestamp("2022-03-01"): SPECIAL_CLOSE_0, - pd.Timestamp("2023-03-01"): SPECIAL_CLOSE_0, - pd.Timestamp("2024-03-01"): SPECIAL_CLOSE_0})).empty + assert ( + c.special_closes[0][1] + .holidays(start=start, end=end, return_name=True) + .compare( + pd.Series( + { + pd.Timestamp("2022-03-01"): SPECIAL_CLOSE_0, + pd.Timestamp("2023-03-01"): SPECIAL_CLOSE_0, + pd.Timestamp("2024-03-01"): SPECIAL_CLOSE_0, + } + ) + ) + .empty + ) assert c.special_closes[1][0] == time(15, 0) - assert c.special_closes[1][1].holidays(start=start, end=end, return_name=True).compare(pd.Series({ - pd.Timestamp("2023-04-03"): ADDED_SPECIAL_CLOSE})).empty + assert ( + c.special_closes[1][1] + .holidays(start=start, end=end, return_name=True) + .compare(pd.Series({pd.Timestamp("2023-04-03"): ADDED_SPECIAL_CLOSE})) + .empty + ) # Ad-hoc special closes should now be empty. assert c.special_closes_adhoc == [] # Added special close should be in consolidated calendar. - assert c.special_closes_all.holidays(start=start, end=end, return_name=True).compare(pd.Series({ - pd.Timestamp("2022-03-01"): SPECIAL_CLOSE_0, - pd.Timestamp("2023-03-01"): SPECIAL_CLOSE_0, - pd.Timestamp("2023-04-03"): ADDED_SPECIAL_CLOSE, - pd.Timestamp("2024-03-01"): SPECIAL_CLOSE_0})).empty + assert ( + c.special_closes_all.holidays(start=start, end=end, return_name=True) + .compare( + pd.Series( + { + pd.Timestamp("2022-03-01"): SPECIAL_CLOSE_0, + pd.Timestamp("2023-03-01"): SPECIAL_CLOSE_0, + pd.Timestamp("2023-04-03"): ADDED_SPECIAL_CLOSE, + pd.Timestamp("2024-03-01"): SPECIAL_CLOSE_0, + } + ) + ) + .empty + ) @pytest.mark.isolated @@ -1342,7 +2132,9 @@ def test_overwrite_existing_ad_hoc_special_close_with_existing_time(): import exchange_calendars as ec import exchange_calendars_extensions.core as ecx - ecx.add_special_close("TEST", pd.Timestamp("2023-04-03"), time(14, 0), ADDED_SPECIAL_CLOSE) + ecx.add_special_close( + "TEST", pd.Timestamp("2023-04-03"), time(14, 0), ADDED_SPECIAL_CLOSE + ) c = ec.get_calendar("TEST") @@ -1354,27 +2146,50 @@ def test_overwrite_existing_ad_hoc_special_close_with_existing_time(): # Special Closes for regular special close time should exclude the overwritten day. assert c.special_closes[0][0] == time(14, 0) - assert c.special_closes[0][1].holidays(start=start, end=end, return_name=True).compare(pd.Series({ - pd.Timestamp("2022-03-01"): SPECIAL_CLOSE_0, - pd.Timestamp("2023-03-01"): SPECIAL_CLOSE_0, - pd.Timestamp("2023-04-03"): ADDED_SPECIAL_CLOSE, - pd.Timestamp("2024-03-01"): SPECIAL_CLOSE_0})).empty + assert ( + c.special_closes[0][1] + .holidays(start=start, end=end, return_name=True) + .compare( + pd.Series( + { + pd.Timestamp("2022-03-01"): SPECIAL_CLOSE_0, + pd.Timestamp("2023-03-01"): SPECIAL_CLOSE_0, + pd.Timestamp("2023-04-03"): ADDED_SPECIAL_CLOSE, + pd.Timestamp("2024-03-01"): SPECIAL_CLOSE_0, + } + ) + ) + .empty + ) # Ad-hoc special closes should now be empty. assert c.special_closes_adhoc == [] # Added special close should be in consolidated calendar. - assert c.special_closes_all.holidays(start=start, end=end, return_name=True).compare(pd.Series({ - pd.Timestamp("2022-03-01"): SPECIAL_CLOSE_0, - pd.Timestamp("2023-03-01"): SPECIAL_CLOSE_0, - pd.Timestamp("2023-04-03"): ADDED_SPECIAL_CLOSE, - pd.Timestamp("2024-03-01"): SPECIAL_CLOSE_0})).empty + assert ( + c.special_closes_all.holidays(start=start, end=end, return_name=True) + .compare( + pd.Series( + { + pd.Timestamp("2022-03-01"): SPECIAL_CLOSE_0, + pd.Timestamp("2023-03-01"): SPECIAL_CLOSE_0, + pd.Timestamp("2023-04-03"): ADDED_SPECIAL_CLOSE, + pd.Timestamp("2024-03-01"): SPECIAL_CLOSE_0, + } + ) + ) + .empty + ) @pytest.mark.isolated def test_remove_existing_regular_special_close(): add_test_calendar_and_apply_extensions( - special_closes=[(time(14, 00), [pd.Timestamp("2023-03-01")]), (time(15, 00), [pd.Timestamp("2023-03-04")])]) + special_closes=[ + (time(14, 00), [pd.Timestamp("2023-03-01")]), + (time(15, 00), [pd.Timestamp("2023-03-04")]), + ] + ) import exchange_calendars as ec import exchange_calendars_extensions.core as ecx @@ -1390,27 +2205,56 @@ def test_remove_existing_regular_special_close(): # Special Closes for regular special close time should exclude the overwritten day. assert c.special_closes[0][0] == time(14, 0) - assert c.special_closes[0][1].holidays(start=start, end=end, return_name=True).compare(pd.Series({ - pd.Timestamp("2022-03-01"): SPECIAL_CLOSE_0, - pd.Timestamp("2024-03-01"): SPECIAL_CLOSE_0})).empty + assert ( + c.special_closes[0][1] + .holidays(start=start, end=end, return_name=True) + .compare( + pd.Series( + { + pd.Timestamp("2022-03-01"): SPECIAL_CLOSE_0, + pd.Timestamp("2024-03-01"): SPECIAL_CLOSE_0, + } + ) + ) + .empty + ) assert c.special_closes[1][0] == time(15, 0) - assert c.special_closes[1][1].holidays(start=start, end=end, return_name=True).compare(pd.Series({ - pd.Timestamp("2022-03-04"): SPECIAL_CLOSE_0, - pd.Timestamp("2023-03-06"): SPECIAL_CLOSE_0, - pd.Timestamp("2024-03-04"): SPECIAL_CLOSE_0})).empty + assert ( + c.special_closes[1][1] + .holidays(start=start, end=end, return_name=True) + .compare( + pd.Series( + { + pd.Timestamp("2022-03-04"): SPECIAL_CLOSE_0, + pd.Timestamp("2023-03-06"): SPECIAL_CLOSE_0, + pd.Timestamp("2024-03-04"): SPECIAL_CLOSE_0, + } + ) + ) + .empty + ) # Ad-hoc special closes should now be empty. assert c.special_closes_adhoc == [(time(14, 00), pd.Timestamp("2023-04-03"))] # Added special close should be in consolidated calendar. - assert c.special_closes_all.holidays(start=start, end=end, return_name=True).compare(pd.Series({ - pd.Timestamp("2022-03-01"): SPECIAL_CLOSE_0, - pd.Timestamp("2022-03-04"): SPECIAL_CLOSE_0, - pd.Timestamp("2023-03-06"): SPECIAL_CLOSE_0, - pd.Timestamp("2023-04-03"): AD_HOC_SPECIAL_CLOSE, - pd.Timestamp("2024-03-01"): SPECIAL_CLOSE_0, - pd.Timestamp("2024-03-04"): SPECIAL_CLOSE_0})).empty + assert ( + c.special_closes_all.holidays(start=start, end=end, return_name=True) + .compare( + pd.Series( + { + pd.Timestamp("2022-03-01"): SPECIAL_CLOSE_0, + pd.Timestamp("2022-03-04"): SPECIAL_CLOSE_0, + pd.Timestamp("2023-03-06"): SPECIAL_CLOSE_0, + pd.Timestamp("2023-04-03"): AD_HOC_SPECIAL_CLOSE, + pd.Timestamp("2024-03-01"): SPECIAL_CLOSE_0, + pd.Timestamp("2024-03-04"): SPECIAL_CLOSE_0, + } + ) + ) + .empty + ) @pytest.mark.isolated @@ -1431,19 +2275,38 @@ def test_remove_existing_ad_hoc_special_close(): # Special Closes for regular special close time should exclude the overwritten day. assert c.special_closes[0][0] == time(14, 0) - assert c.special_closes[0][1].holidays(start=start, end=end, return_name=True).compare(pd.Series({ - pd.Timestamp("2022-03-01"): SPECIAL_CLOSE_0, - pd.Timestamp("2023-03-01"): SPECIAL_CLOSE_0, - pd.Timestamp("2024-03-01"): SPECIAL_CLOSE_0})).empty + assert ( + c.special_closes[0][1] + .holidays(start=start, end=end, return_name=True) + .compare( + pd.Series( + { + pd.Timestamp("2022-03-01"): SPECIAL_CLOSE_0, + pd.Timestamp("2023-03-01"): SPECIAL_CLOSE_0, + pd.Timestamp("2024-03-01"): SPECIAL_CLOSE_0, + } + ) + ) + .empty + ) # Ad-hoc special closes should now be empty. assert c.special_closes_adhoc == [] # Added special close should be in consolidated calendar. - assert c.special_closes_all.holidays(start=start, end=end, return_name=True).compare(pd.Series({ - pd.Timestamp("2022-03-01"): SPECIAL_CLOSE_0, - pd.Timestamp("2023-03-01"): SPECIAL_CLOSE_0, - pd.Timestamp("2024-03-01"): SPECIAL_CLOSE_0})).empty + assert ( + c.special_closes_all.holidays(start=start, end=end, return_name=True) + .compare( + pd.Series( + { + pd.Timestamp("2022-03-01"): SPECIAL_CLOSE_0, + pd.Timestamp("2023-03-01"): SPECIAL_CLOSE_0, + pd.Timestamp("2024-03-01"): SPECIAL_CLOSE_0, + } + ) + ) + .empty + ) @pytest.mark.isolated @@ -1464,20 +2327,39 @@ def test_remove_non_existent_special_close(): # Special Closes for regular special close time should exclude the overwritten day. assert c.special_closes[0][0] == time(14, 0) - assert c.special_closes[0][1].holidays(start=start, end=end, return_name=True).compare(pd.Series({ - pd.Timestamp("2022-03-01"): SPECIAL_CLOSE_0, - pd.Timestamp("2023-03-01"): SPECIAL_CLOSE_0, - pd.Timestamp("2024-03-01"): SPECIAL_CLOSE_0})).empty + assert ( + c.special_closes[0][1] + .holidays(start=start, end=end, return_name=True) + .compare( + pd.Series( + { + pd.Timestamp("2022-03-01"): SPECIAL_CLOSE_0, + pd.Timestamp("2023-03-01"): SPECIAL_CLOSE_0, + pd.Timestamp("2024-03-01"): SPECIAL_CLOSE_0, + } + ) + ) + .empty + ) # Ad-hoc special closes should now be empty. assert c.special_closes_adhoc == [(time(14, 00), pd.Timestamp("2023-04-03"))] # Added special close should be in consolidated calendar. - assert c.special_closes_all.holidays(start=start, end=end, return_name=True).compare(pd.Series({ - pd.Timestamp("2022-03-01"): SPECIAL_CLOSE_0, - pd.Timestamp("2023-03-01"): SPECIAL_CLOSE_0, - pd.Timestamp("2023-04-03"): AD_HOC_SPECIAL_CLOSE, - pd.Timestamp("2024-03-01"): SPECIAL_CLOSE_0})).empty + assert ( + c.special_closes_all.holidays(start=start, end=end, return_name=True) + .compare( + pd.Series( + { + pd.Timestamp("2022-03-01"): SPECIAL_CLOSE_0, + pd.Timestamp("2023-03-01"): SPECIAL_CLOSE_0, + pd.Timestamp("2023-04-03"): AD_HOC_SPECIAL_CLOSE, + pd.Timestamp("2024-03-01"): SPECIAL_CLOSE_0, + } + ) + ) + .empty + ) @pytest.mark.isolated @@ -1487,7 +2369,9 @@ def test_add_quarterly_expiry(): import exchange_calendars_extensions.core as ecx # Add quarterly expiry. - ecx.add_quarterly_expiry("TEST", pd.Timestamp("2023-06-15"), "Added Quarterly Expiry") + ecx.add_quarterly_expiry( + "TEST", pd.Timestamp("2023-06-15"), "Added Quarterly Expiry" + ) c = ec.get_calendar("TEST") @@ -1495,20 +2379,29 @@ def test_add_quarterly_expiry(): end = pd.Timestamp("2024-12-31") # Quarterly expiry dates should be empty. - assert c.quarterly_expiries.holidays(start=start, end=end, return_name=True).compare(pd.Series({ - pd.Timestamp("2022-03-18"): QUARTERLY_EXPIRY, - pd.Timestamp("2022-06-17"): QUARTERLY_EXPIRY, - pd.Timestamp("2022-09-16"): QUARTERLY_EXPIRY, - pd.Timestamp("2022-12-16"): QUARTERLY_EXPIRY, - pd.Timestamp("2023-03-17"): QUARTERLY_EXPIRY, - pd.Timestamp("2023-06-15"): "Added Quarterly Expiry", - pd.Timestamp("2023-06-16"): QUARTERLY_EXPIRY, - pd.Timestamp("2023-09-15"): QUARTERLY_EXPIRY, - pd.Timestamp("2023-12-15"): QUARTERLY_EXPIRY, - pd.Timestamp("2024-03-15"): QUARTERLY_EXPIRY, - pd.Timestamp("2024-06-21"): QUARTERLY_EXPIRY, - pd.Timestamp("2024-09-20"): QUARTERLY_EXPIRY, - pd.Timestamp("2024-12-20"): QUARTERLY_EXPIRY})).empty + assert ( + c.quarterly_expiries.holidays(start=start, end=end, return_name=True) + .compare( + pd.Series( + { + pd.Timestamp("2022-03-18"): QUARTERLY_EXPIRY, + pd.Timestamp("2022-06-17"): QUARTERLY_EXPIRY, + pd.Timestamp("2022-09-16"): QUARTERLY_EXPIRY, + pd.Timestamp("2022-12-16"): QUARTERLY_EXPIRY, + pd.Timestamp("2023-03-17"): QUARTERLY_EXPIRY, + pd.Timestamp("2023-06-15"): "Added Quarterly Expiry", + pd.Timestamp("2023-06-16"): QUARTERLY_EXPIRY, + pd.Timestamp("2023-09-15"): QUARTERLY_EXPIRY, + pd.Timestamp("2023-12-15"): QUARTERLY_EXPIRY, + pd.Timestamp("2024-03-15"): QUARTERLY_EXPIRY, + pd.Timestamp("2024-06-21"): QUARTERLY_EXPIRY, + pd.Timestamp("2024-09-20"): QUARTERLY_EXPIRY, + pd.Timestamp("2024-12-20"): QUARTERLY_EXPIRY, + } + ) + ) + .empty + ) @pytest.mark.isolated @@ -1526,18 +2419,27 @@ def test_remove_quarterly_expiry(): end = pd.Timestamp("2024-12-31") # Quarterly expiry dates should be empty. - assert c.quarterly_expiries.holidays(start=start, end=end, return_name=True).compare(pd.Series({ - pd.Timestamp("2022-03-18"): QUARTERLY_EXPIRY, - pd.Timestamp("2022-06-17"): QUARTERLY_EXPIRY, - pd.Timestamp("2022-09-16"): QUARTERLY_EXPIRY, - pd.Timestamp("2022-12-16"): QUARTERLY_EXPIRY, - pd.Timestamp("2023-03-17"): QUARTERLY_EXPIRY, - pd.Timestamp("2023-09-15"): QUARTERLY_EXPIRY, - pd.Timestamp("2023-12-15"): QUARTERLY_EXPIRY, - pd.Timestamp("2024-03-15"): QUARTERLY_EXPIRY, - pd.Timestamp("2024-06-21"): QUARTERLY_EXPIRY, - pd.Timestamp("2024-09-20"): QUARTERLY_EXPIRY, - pd.Timestamp("2024-12-20"): QUARTERLY_EXPIRY})).empty + assert ( + c.quarterly_expiries.holidays(start=start, end=end, return_name=True) + .compare( + pd.Series( + { + pd.Timestamp("2022-03-18"): QUARTERLY_EXPIRY, + pd.Timestamp("2022-06-17"): QUARTERLY_EXPIRY, + pd.Timestamp("2022-09-16"): QUARTERLY_EXPIRY, + pd.Timestamp("2022-12-16"): QUARTERLY_EXPIRY, + pd.Timestamp("2023-03-17"): QUARTERLY_EXPIRY, + pd.Timestamp("2023-09-15"): QUARTERLY_EXPIRY, + pd.Timestamp("2023-12-15"): QUARTERLY_EXPIRY, + pd.Timestamp("2024-03-15"): QUARTERLY_EXPIRY, + pd.Timestamp("2024-06-21"): QUARTERLY_EXPIRY, + pd.Timestamp("2024-09-20"): QUARTERLY_EXPIRY, + pd.Timestamp("2024-12-20"): QUARTERLY_EXPIRY, + } + ) + ) + .empty + ) @pytest.mark.isolated @@ -1555,32 +2457,41 @@ def test_add_monthly_expiry(): end = pd.Timestamp("2024-12-31") # Quarterly expiry dates should be empty. - assert c.monthly_expiries.holidays(start=start, end=end, return_name=True).compare(pd.Series({ - pd.Timestamp("2022-01-21"): MONTHLY_EXPIRY, - pd.Timestamp("2022-02-18"): MONTHLY_EXPIRY, - pd.Timestamp("2022-04-15"): MONTHLY_EXPIRY, - pd.Timestamp("2022-05-20"): MONTHLY_EXPIRY, - pd.Timestamp("2022-07-15"): MONTHLY_EXPIRY, - pd.Timestamp("2022-08-19"): MONTHLY_EXPIRY, - pd.Timestamp("2022-10-21"): MONTHLY_EXPIRY, - pd.Timestamp("2022-11-18"): MONTHLY_EXPIRY, - pd.Timestamp("2023-01-19"): "Added Monthly Expiry", - pd.Timestamp("2023-01-20"): MONTHLY_EXPIRY, - pd.Timestamp("2023-02-17"): MONTHLY_EXPIRY, - pd.Timestamp("2023-04-21"): MONTHLY_EXPIRY, - pd.Timestamp("2023-05-19"): MONTHLY_EXPIRY, - pd.Timestamp("2023-07-21"): MONTHLY_EXPIRY, - pd.Timestamp("2023-08-18"): MONTHLY_EXPIRY, - pd.Timestamp("2023-10-20"): MONTHLY_EXPIRY, - pd.Timestamp("2023-11-17"): MONTHLY_EXPIRY, - pd.Timestamp("2024-01-19"): MONTHLY_EXPIRY, - pd.Timestamp("2024-02-16"): MONTHLY_EXPIRY, - pd.Timestamp("2024-04-19"): MONTHLY_EXPIRY, - pd.Timestamp("2024-05-17"): MONTHLY_EXPIRY, - pd.Timestamp("2024-07-19"): MONTHLY_EXPIRY, - pd.Timestamp("2024-08-16"): MONTHLY_EXPIRY, - pd.Timestamp("2024-10-18"): MONTHLY_EXPIRY, - pd.Timestamp("2024-11-15"): MONTHLY_EXPIRY})).empty + assert ( + c.monthly_expiries.holidays(start=start, end=end, return_name=True) + .compare( + pd.Series( + { + pd.Timestamp("2022-01-21"): MONTHLY_EXPIRY, + pd.Timestamp("2022-02-18"): MONTHLY_EXPIRY, + pd.Timestamp("2022-04-15"): MONTHLY_EXPIRY, + pd.Timestamp("2022-05-20"): MONTHLY_EXPIRY, + pd.Timestamp("2022-07-15"): MONTHLY_EXPIRY, + pd.Timestamp("2022-08-19"): MONTHLY_EXPIRY, + pd.Timestamp("2022-10-21"): MONTHLY_EXPIRY, + pd.Timestamp("2022-11-18"): MONTHLY_EXPIRY, + pd.Timestamp("2023-01-19"): "Added Monthly Expiry", + pd.Timestamp("2023-01-20"): MONTHLY_EXPIRY, + pd.Timestamp("2023-02-17"): MONTHLY_EXPIRY, + pd.Timestamp("2023-04-21"): MONTHLY_EXPIRY, + pd.Timestamp("2023-05-19"): MONTHLY_EXPIRY, + pd.Timestamp("2023-07-21"): MONTHLY_EXPIRY, + pd.Timestamp("2023-08-18"): MONTHLY_EXPIRY, + pd.Timestamp("2023-10-20"): MONTHLY_EXPIRY, + pd.Timestamp("2023-11-17"): MONTHLY_EXPIRY, + pd.Timestamp("2024-01-19"): MONTHLY_EXPIRY, + pd.Timestamp("2024-02-16"): MONTHLY_EXPIRY, + pd.Timestamp("2024-04-19"): MONTHLY_EXPIRY, + pd.Timestamp("2024-05-17"): MONTHLY_EXPIRY, + pd.Timestamp("2024-07-19"): MONTHLY_EXPIRY, + pd.Timestamp("2024-08-16"): MONTHLY_EXPIRY, + pd.Timestamp("2024-10-18"): MONTHLY_EXPIRY, + pd.Timestamp("2024-11-15"): MONTHLY_EXPIRY, + } + ) + ) + .empty + ) @pytest.mark.isolated @@ -1589,7 +2500,9 @@ def test_overwrite_regular_holiday_with_special_open(): import exchange_calendars as ec import exchange_calendars_extensions.core as ecx - ecx.add_special_open("TEST", pd.Timestamp("2023-01-02"), time(11, 0), ADDED_SPECIAL_OPEN) + ecx.add_special_open( + "TEST", pd.Timestamp("2023-01-02"), time(11, 0), ADDED_SPECIAL_OPEN + ) c = ec.get_calendar("TEST") @@ -1597,40 +2510,77 @@ def test_overwrite_regular_holiday_with_special_open(): end = pd.Timestamp("2024-12-31") # Overwritten holiday should no longer be in regular holidays. - assert c.regular_holidays.holidays(start=start, end=end, return_name=True).compare(pd.Series({ - pd.Timestamp("2022-01-02"): HOLIDAY_0, - pd.Timestamp("2024-01-02"): HOLIDAY_0})).empty + assert ( + c.regular_holidays.holidays(start=start, end=end, return_name=True) + .compare( + pd.Series( + { + pd.Timestamp("2022-01-02"): HOLIDAY_0, + pd.Timestamp("2024-01-02"): HOLIDAY_0, + } + ) + ) + .empty + ) # Ad-hoc holidays should be unmodified. assert c.adhoc_holidays == [pd.Timestamp("2023-02-01")] # Overwritten holiday should no longer be in holidays_all calendar. - assert c.holidays_all.holidays(start=start, end=end, return_name=True).compare(pd.Series({ - pd.Timestamp("2022-01-02"): HOLIDAY_0, - pd.Timestamp("2023-02-01"): AD_HOC_HOLIDAY, - pd.Timestamp("2024-01-02"): HOLIDAY_0})).empty + assert ( + c.holidays_all.holidays(start=start, end=end, return_name=True) + .compare( + pd.Series( + { + pd.Timestamp("2022-01-02"): HOLIDAY_0, + pd.Timestamp("2023-02-01"): AD_HOC_HOLIDAY, + pd.Timestamp("2024-01-02"): HOLIDAY_0, + } + ) + ) + .empty + ) # Check number of distinct special open times. assert len(c.special_opens) == 1 # Added special open should be in special opens for regular time. assert c.special_opens[0][0] == time(11, 0) - assert c.special_opens[0][1].holidays(start=start, end=end, return_name=True).compare(pd.Series({ - pd.Timestamp("2022-05-02"): SPECIAL_OPEN_0, - pd.Timestamp("2023-01-02"): ADDED_SPECIAL_OPEN, - pd.Timestamp("2023-05-01"): SPECIAL_OPEN_0, - pd.Timestamp("2024-05-01"): SPECIAL_OPEN_0})).empty + assert ( + c.special_opens[0][1] + .holidays(start=start, end=end, return_name=True) + .compare( + pd.Series( + { + pd.Timestamp("2022-05-02"): SPECIAL_OPEN_0, + pd.Timestamp("2023-01-02"): ADDED_SPECIAL_OPEN, + pd.Timestamp("2023-05-01"): SPECIAL_OPEN_0, + pd.Timestamp("2024-05-01"): SPECIAL_OPEN_0, + } + ) + ) + .empty + ) # Ad-hoc special opens should be unmodified. assert c.special_opens_adhoc == [(time(11, 00), pd.Timestamp("2023-06-01"))] # Added special open should be in consolidated calendar. - assert c.special_opens_all.holidays(start=start, end=end, return_name=True).compare(pd.Series({ - pd.Timestamp("2022-05-02"): SPECIAL_OPEN_0, - pd.Timestamp("2023-01-02"): ADDED_SPECIAL_OPEN, - pd.Timestamp("2023-05-01"): SPECIAL_OPEN_0, - pd.Timestamp("2023-06-01"): AD_HOC_SPECIAL_OPEN, - pd.Timestamp("2024-05-01"): SPECIAL_OPEN_0})).empty + assert ( + c.special_opens_all.holidays(start=start, end=end, return_name=True) + .compare( + pd.Series( + { + pd.Timestamp("2022-05-02"): SPECIAL_OPEN_0, + pd.Timestamp("2023-01-02"): ADDED_SPECIAL_OPEN, + pd.Timestamp("2023-05-01"): SPECIAL_OPEN_0, + pd.Timestamp("2023-06-01"): AD_HOC_SPECIAL_OPEN, + pd.Timestamp("2024-05-01"): SPECIAL_OPEN_0, + } + ) + ) + .empty + ) @pytest.mark.isolated @@ -1640,19 +2590,36 @@ def test_apply_changeset(): import exchange_calendars_extensions.core as ecx changes = { - 'add': { - '2023-01-02': {'type': 'holiday', 'name': INSERTED_HOLIDAY}, - '2023-05-02': {'type': 'special_open', 'name': "Inserted Special Open", 'time': '11:00'}, - '2023-03-02': {'type': 'special_close', 'name': "Inserted Special Close", 'time': '14:00'}, - '2023-08-17': {'type': 'monthly_expiry', 'name': "Inserted Monthly Expiry"}, - '2023-09-14': {'type': 'quarterly_expiry', 'name': "Inserted Quarterly Expiry"}, + "add": { + "2023-01-02": {"type": "holiday", "name": INSERTED_HOLIDAY}, + "2023-05-02": { + "type": "special_open", + "name": "Inserted Special Open", + "time": "11:00", + }, + "2023-03-02": { + "type": "special_close", + "name": "Inserted Special Close", + "time": "14:00", + }, + "2023-08-17": {"type": "monthly_expiry", "name": "Inserted Monthly Expiry"}, + "2023-09-14": { + "type": "quarterly_expiry", + "name": "Inserted Quarterly Expiry", + }, + }, + "remove": [ + "2023-01-01", + "2023-05-01", + "2023-03-01", + "2023-08-18", + "2023-09-15", + ], + "meta": { + "2023-01-03": {"tags": ["tag1", "tag2"]}, + "2023-05-03": {"comment": "This is a comment"}, + "2023-03-03": {"tags": ["tag3", "tag´4"], "comment": "This is a comment"}, }, - 'remove': ['2023-01-01', '2023-05-01', '2023-03-01', '2023-08-18', '2023-09-15'], - 'meta': { - '2023-01-03': {'tags': ['tag1', 'tag2']}, - '2023-05-03': {'comment': 'This is a comment'}, - '2023-03-03': {'tags': ['tag3', 'tag´4'], 'comment': 'This is a comment'}, - } } ecx.update_calendar("TEST", changes) c = ec.get_calendar("TEST") @@ -1663,11 +2630,20 @@ def test_apply_changeset(): end = pd.Timestamp("2024-12-31") # Verify regular holidays for 2022, 2023, and 2024. - assert c.regular_holidays.holidays(start=start, end=end, return_name=True).compare(pd.Series({ - pd.Timestamp("2022-01-01"): HOLIDAY_0, - # removed: pd.Timestamp("2023-01-01"): HOLIDAY_0, - pd.Timestamp("2023-01-02"): INSERTED_HOLIDAY, - pd.Timestamp("2024-01-01"): HOLIDAY_0})).empty + assert ( + c.regular_holidays.holidays(start=start, end=end, return_name=True) + .compare( + pd.Series( + { + pd.Timestamp("2022-01-01"): HOLIDAY_0, + # removed: pd.Timestamp("2023-01-01"): HOLIDAY_0, + pd.Timestamp("2023-01-02"): INSERTED_HOLIDAY, + pd.Timestamp("2024-01-01"): HOLIDAY_0, + } + ) + ) + .empty + ) # Verify adhoc holidays. assert c.adhoc_holidays == [pd.Timestamp("2023-02-01")] @@ -1676,148 +2652,232 @@ def test_apply_changeset(): assert len(c.special_closes) == 1 assert len(c.special_closes[0]) == 2 assert c.special_closes[0][0] == datetime.time(14, 0) - assert c.special_closes[0][1].holidays(start=start, end=end, return_name=True).compare(pd.Series({ - pd.Timestamp('2022-03-01'): SPECIAL_CLOSE_0, - # removed: pd.Timestamp('2023-03-01'): SPECIAL_CLOSE_0, - pd.Timestamp('2023-03-02'): 'Inserted Special Close', - pd.Timestamp('2024-03-01'): SPECIAL_CLOSE_0})).empty + assert ( + c.special_closes[0][1] + .holidays(start=start, end=end, return_name=True) + .compare( + pd.Series( + { + pd.Timestamp("2022-03-01"): SPECIAL_CLOSE_0, + # removed: pd.Timestamp('2023-03-01'): SPECIAL_CLOSE_0, + pd.Timestamp("2023-03-02"): "Inserted Special Close", + pd.Timestamp("2024-03-01"): SPECIAL_CLOSE_0, + } + ) + ) + .empty + ) # Verify adhoc special closes. - assert c.special_closes_adhoc == [(datetime.time(14, 0), pd.DatetimeIndex([pd.Timestamp("2023-04-03")]))] + assert c.special_closes_adhoc == [ + (datetime.time(14, 0), pd.DatetimeIndex([pd.Timestamp("2023-04-03")])) + ] # Verify special opens for 2022, 2023, and 2024. assert len(c.special_opens) == 1 assert len(c.special_opens[0]) == 2 assert c.special_opens[0][0] == datetime.time(11, 0) - assert c.special_opens[0][1].holidays(start=start, end=end, return_name=True).compare(pd.Series({ - pd.Timestamp('2022-05-02'): SPECIAL_OPEN_0, - # removed pd.Timestamp('2023-05-01'): SPECIAL_OPEN_0, - pd.Timestamp('2023-05-02'): 'Inserted Special Open', - pd.Timestamp('2024-05-01'): SPECIAL_OPEN_0})).empty + assert ( + c.special_opens[0][1] + .holidays(start=start, end=end, return_name=True) + .compare( + pd.Series( + { + pd.Timestamp("2022-05-02"): SPECIAL_OPEN_0, + # removed pd.Timestamp('2023-05-01'): SPECIAL_OPEN_0, + pd.Timestamp("2023-05-02"): "Inserted Special Open", + pd.Timestamp("2024-05-01"): SPECIAL_OPEN_0, + } + ) + ) + .empty + ) # Verify adhoc special opens. - assert c.special_opens_adhoc == [(datetime.time(11, 0), pd.DatetimeIndex([pd.Timestamp("2023-06-01")]))] + assert c.special_opens_adhoc == [ + (datetime.time(11, 0), pd.DatetimeIndex([pd.Timestamp("2023-06-01")])) + ] # Verify additional holiday calendars. - assert c.holidays_all.holidays(start=start, end=end, return_name=True).compare(pd.Series({ - pd.Timestamp("2022-01-01"): HOLIDAY_0, - # removed: pd.Timestamp("2023-01-01"): HOLIDAY_0, - pd.Timestamp("2023-01-02"): INSERTED_HOLIDAY, - pd.Timestamp("2023-02-01"): AD_HOC_HOLIDAY, - pd.Timestamp("2024-01-01"): HOLIDAY_0})).empty - - assert c.special_closes_all.holidays(start=start, end=end, return_name=True).compare(pd.Series({ - pd.Timestamp('2022-03-01'): SPECIAL_CLOSE_0, - # removed: pd.Timestamp('2023-03-01'): SPECIAL_CLOSE_0, - pd.Timestamp('2023-03-02'): 'Inserted Special Close', - pd.Timestamp('2023-04-03'): AD_HOC_SPECIAL_CLOSE, - pd.Timestamp('2024-03-01'): SPECIAL_CLOSE_0})).empty - - assert c.special_opens_all.holidays(start=start, end=end, return_name=True).compare(pd.Series({ - pd.Timestamp('2022-05-02'): SPECIAL_OPEN_0, - # removed: pd.Timestamp('2023-05-01'): SPECIAL_OPEN_0, - pd.Timestamp('2023-05-02'): 'Inserted Special Open', - pd.Timestamp('2023-06-01'): AD_HOC_SPECIAL_OPEN, - pd.Timestamp('2024-05-01'): SPECIAL_OPEN_0})).empty - - assert c.quarterly_expiries.holidays(start=start, end=end, return_name=True).compare(pd.Series({ - pd.Timestamp('2022-03-18'): QUARTERLY_EXPIRY, - pd.Timestamp('2022-06-17'): QUARTERLY_EXPIRY, - pd.Timestamp('2022-09-16'): QUARTERLY_EXPIRY, - pd.Timestamp('2022-12-16'): QUARTERLY_EXPIRY, - pd.Timestamp('2023-03-17'): QUARTERLY_EXPIRY, - pd.Timestamp('2023-06-16'): QUARTERLY_EXPIRY, - pd.Timestamp('2023-09-14'): 'Inserted Quarterly Expiry', - # removed: pd.Timestamp('2023-09-15'): QUARTERLY_EXPIRY, - pd.Timestamp('2023-12-15'): QUARTERLY_EXPIRY, - pd.Timestamp('2024-03-15'): QUARTERLY_EXPIRY, - pd.Timestamp('2024-06-21'): QUARTERLY_EXPIRY, - pd.Timestamp('2024-09-20'): QUARTERLY_EXPIRY, - pd.Timestamp('2024-12-20'): QUARTERLY_EXPIRY})).empty - - assert c.monthly_expiries.holidays(start=start, end=end, return_name=True).compare(pd.Series({ - pd.Timestamp('2022-01-21'): MONTHLY_EXPIRY, - pd.Timestamp('2022-02-18'): MONTHLY_EXPIRY, - pd.Timestamp('2022-04-15'): MONTHLY_EXPIRY, - pd.Timestamp('2022-05-20'): MONTHLY_EXPIRY, - pd.Timestamp('2022-07-15'): MONTHLY_EXPIRY, - pd.Timestamp('2022-08-19'): MONTHLY_EXPIRY, - pd.Timestamp('2022-10-21'): MONTHLY_EXPIRY, - pd.Timestamp('2022-11-18'): MONTHLY_EXPIRY, - pd.Timestamp('2023-01-20'): MONTHLY_EXPIRY, - pd.Timestamp('2023-02-17'): MONTHLY_EXPIRY, - pd.Timestamp('2023-04-21'): MONTHLY_EXPIRY, - pd.Timestamp('2023-05-19'): MONTHLY_EXPIRY, - pd.Timestamp('2023-07-21'): MONTHLY_EXPIRY, - pd.Timestamp('2023-08-17'): 'Inserted Monthly Expiry', - # removed: pd.Timestamp('2023-08-18'): MONTHLY_EXPIRY, - pd.Timestamp('2023-10-20'): MONTHLY_EXPIRY, - pd.Timestamp('2023-11-17'): MONTHLY_EXPIRY, - pd.Timestamp('2024-01-19'): MONTHLY_EXPIRY, - pd.Timestamp('2024-02-16'): MONTHLY_EXPIRY, - pd.Timestamp('2024-04-19'): MONTHLY_EXPIRY, - pd.Timestamp('2024-05-17'): MONTHLY_EXPIRY, - pd.Timestamp('2024-07-19'): MONTHLY_EXPIRY, - pd.Timestamp('2024-08-16'): MONTHLY_EXPIRY, - pd.Timestamp('2024-10-18'): MONTHLY_EXPIRY, - pd.Timestamp('2024-11-15'): MONTHLY_EXPIRY})).empty + assert ( + c.holidays_all.holidays(start=start, end=end, return_name=True) + .compare( + pd.Series( + { + pd.Timestamp("2022-01-01"): HOLIDAY_0, + # removed: pd.Timestamp("2023-01-01"): HOLIDAY_0, + pd.Timestamp("2023-01-02"): INSERTED_HOLIDAY, + pd.Timestamp("2023-02-01"): AD_HOC_HOLIDAY, + pd.Timestamp("2024-01-01"): HOLIDAY_0, + } + ) + ) + .empty + ) + + assert ( + c.special_closes_all.holidays(start=start, end=end, return_name=True) + .compare( + pd.Series( + { + pd.Timestamp("2022-03-01"): SPECIAL_CLOSE_0, + # removed: pd.Timestamp('2023-03-01'): SPECIAL_CLOSE_0, + pd.Timestamp("2023-03-02"): "Inserted Special Close", + pd.Timestamp("2023-04-03"): AD_HOC_SPECIAL_CLOSE, + pd.Timestamp("2024-03-01"): SPECIAL_CLOSE_0, + } + ) + ) + .empty + ) + + assert ( + c.special_opens_all.holidays(start=start, end=end, return_name=True) + .compare( + pd.Series( + { + pd.Timestamp("2022-05-02"): SPECIAL_OPEN_0, + # removed: pd.Timestamp('2023-05-01'): SPECIAL_OPEN_0, + pd.Timestamp("2023-05-02"): "Inserted Special Open", + pd.Timestamp("2023-06-01"): AD_HOC_SPECIAL_OPEN, + pd.Timestamp("2024-05-01"): SPECIAL_OPEN_0, + } + ) + ) + .empty + ) + + assert ( + c.quarterly_expiries.holidays(start=start, end=end, return_name=True) + .compare( + pd.Series( + { + pd.Timestamp("2022-03-18"): QUARTERLY_EXPIRY, + pd.Timestamp("2022-06-17"): QUARTERLY_EXPIRY, + pd.Timestamp("2022-09-16"): QUARTERLY_EXPIRY, + pd.Timestamp("2022-12-16"): QUARTERLY_EXPIRY, + pd.Timestamp("2023-03-17"): QUARTERLY_EXPIRY, + pd.Timestamp("2023-06-16"): QUARTERLY_EXPIRY, + pd.Timestamp("2023-09-14"): "Inserted Quarterly Expiry", + # removed: pd.Timestamp('2023-09-15'): QUARTERLY_EXPIRY, + pd.Timestamp("2023-12-15"): QUARTERLY_EXPIRY, + pd.Timestamp("2024-03-15"): QUARTERLY_EXPIRY, + pd.Timestamp("2024-06-21"): QUARTERLY_EXPIRY, + pd.Timestamp("2024-09-20"): QUARTERLY_EXPIRY, + pd.Timestamp("2024-12-20"): QUARTERLY_EXPIRY, + } + ) + ) + .empty + ) + + assert ( + c.monthly_expiries.holidays(start=start, end=end, return_name=True) + .compare( + pd.Series( + { + pd.Timestamp("2022-01-21"): MONTHLY_EXPIRY, + pd.Timestamp("2022-02-18"): MONTHLY_EXPIRY, + pd.Timestamp("2022-04-15"): MONTHLY_EXPIRY, + pd.Timestamp("2022-05-20"): MONTHLY_EXPIRY, + pd.Timestamp("2022-07-15"): MONTHLY_EXPIRY, + pd.Timestamp("2022-08-19"): MONTHLY_EXPIRY, + pd.Timestamp("2022-10-21"): MONTHLY_EXPIRY, + pd.Timestamp("2022-11-18"): MONTHLY_EXPIRY, + pd.Timestamp("2023-01-20"): MONTHLY_EXPIRY, + pd.Timestamp("2023-02-17"): MONTHLY_EXPIRY, + pd.Timestamp("2023-04-21"): MONTHLY_EXPIRY, + pd.Timestamp("2023-05-19"): MONTHLY_EXPIRY, + pd.Timestamp("2023-07-21"): MONTHLY_EXPIRY, + pd.Timestamp("2023-08-17"): "Inserted Monthly Expiry", + # removed: pd.Timestamp('2023-08-18'): MONTHLY_EXPIRY, + pd.Timestamp("2023-10-20"): MONTHLY_EXPIRY, + pd.Timestamp("2023-11-17"): MONTHLY_EXPIRY, + pd.Timestamp("2024-01-19"): MONTHLY_EXPIRY, + pd.Timestamp("2024-02-16"): MONTHLY_EXPIRY, + pd.Timestamp("2024-04-19"): MONTHLY_EXPIRY, + pd.Timestamp("2024-05-17"): MONTHLY_EXPIRY, + pd.Timestamp("2024-07-19"): MONTHLY_EXPIRY, + pd.Timestamp("2024-08-16"): MONTHLY_EXPIRY, + pd.Timestamp("2024-10-18"): MONTHLY_EXPIRY, + pd.Timestamp("2024-11-15"): MONTHLY_EXPIRY, + } + ) + ) + .empty + ) # Verify tags and comments. - - @pytest.mark.isolated def test_test(): import exchange_calendars_extensions.core as ecx + ecx.apply_extensions() import exchange_calendars as ec changes = { - 'add': { - '2022-01-10': {'type': 'holiday', 'name': 'Holiday'}, - '2022-01-12': {'type': 'special_open', 'name': 'Special Open', 'time': '10:00'}, - '2022-01-14': {'type': 'special_close', 'name': 'Special Close', 'time': '16:00'}, - '2022-01-18': {'type': 'monthly_expiry', 'name': MONTHLY_EXPIRY}, - '2022-01-20': {'type': 'quarterly_expiry', 'name': QUARTERLY_EXPIRY} + "add": { + "2022-01-10": {"type": "holiday", "name": "Holiday"}, + "2022-01-12": { + "type": "special_open", + "name": "Special Open", + "time": "10:00", + }, + "2022-01-14": { + "type": "special_close", + "name": "Special Close", + "time": "16:00", + }, + "2022-01-18": {"type": "monthly_expiry", "name": MONTHLY_EXPIRY}, + "2022-01-20": {"type": "quarterly_expiry", "name": QUARTERLY_EXPIRY}, + }, + "remove": [ + "2022-01-11", + "2022-01-13", + "2022-01-17", + "2022-01-19", + "2022-01-21", + ], + "meta": { + "2022-01-22": {"tags": ["tag1", "tag2"]}, + "2022-01-23": {"comment": "This is a comment"}, + "2022-01-24": {"tags": ["tag3", "tag4"], "comment": "This is a comment"}, }, - 'remove': ['2022-01-11', '2022-01-13', '2022-01-17', '2022-01-19', '2022-01-21'], - 'meta': { - '2022-01-22': {'tags': ['tag1', 'tag2']}, - '2022-01-23': {'comment': 'This is a comment'}, - '2022-01-24': {'tags': ['tag3', 'tag4'], 'comment': 'This is a comment'}, - } } - ecx.update_calendar('XLON', changes) + ecx.update_calendar("XLON", changes) - calendar = ec.get_calendar('XLON') + calendar = ec.get_calendar("XLON") - assert '2022-01-10' in calendar.holidays_all.holidays() - assert '2022-01-11' not in calendar.holidays_all.holidays() - assert '2022-01-12' in calendar.special_opens_all.holidays() - assert '2022-01-13' not in calendar.special_opens_all.holidays() - assert '2022-01-14' in calendar.special_closes_all.holidays() - assert '2022-01-17' not in calendar.special_closes_all.holidays() - assert '2022-01-18' in calendar.monthly_expiries.holidays() - assert '2022-01-19' not in calendar.monthly_expiries.holidays() - assert '2022-01-20' in calendar.quarterly_expiries.holidays() - assert '2022-01-21' not in calendar.quarterly_expiries.holidays() + assert "2022-01-10" in calendar.holidays_all.holidays() + assert "2022-01-11" not in calendar.holidays_all.holidays() + assert "2022-01-12" in calendar.special_opens_all.holidays() + assert "2022-01-13" not in calendar.special_opens_all.holidays() + assert "2022-01-14" in calendar.special_closes_all.holidays() + assert "2022-01-17" not in calendar.special_closes_all.holidays() + assert "2022-01-18" in calendar.monthly_expiries.holidays() + assert "2022-01-19" not in calendar.monthly_expiries.holidays() + assert "2022-01-20" in calendar.quarterly_expiries.holidays() + assert "2022-01-21" not in calendar.quarterly_expiries.holidays() @pytest.mark.isolated def test_quarterly_expiry_rollback_one_day(): - add_test_calendar_and_apply_extensions(holidays=[pd.Timestamp("2022-03-18")], - adhoc_holidays=[], - regular_special_close=time(14, 00), - special_closes=[], - adhoc_special_closes=[], - regular_special_open=time(11, 00), - special_opens=[], - adhoc_special_opens=[], - weekmask="1111100", - day_of_week_expiry=4) + add_test_calendar_and_apply_extensions( + holidays=[pd.Timestamp("2022-03-18")], + adhoc_holidays=[], + regular_special_close=time(14, 00), + special_closes=[], + adhoc_special_closes=[], + regular_special_open=time(11, 00), + special_opens=[], + adhoc_special_opens=[], + weekmask="1111100", + day_of_week_expiry=4, + ) import exchange_calendars as ec c = ec.get_calendar("TEST") @@ -1827,25 +2887,38 @@ def test_quarterly_expiry_rollback_one_day(): start = pd.Timestamp("2022-01-01") end = pd.Timestamp("2022-12-31") - assert c.quarterly_expiries.holidays(start=start, end=end, return_name=True).compare(pd.Series({ - pd.Timestamp('2022-03-17'): QUARTERLY_EXPIRY, # Should be rolled back from 2022-03-18 since it is a holiday. - pd.Timestamp('2022-06-17'): QUARTERLY_EXPIRY, - pd.Timestamp('2022-09-16'): QUARTERLY_EXPIRY, - pd.Timestamp('2022-12-16'): QUARTERLY_EXPIRY})).empty + assert ( + c.quarterly_expiries.holidays(start=start, end=end, return_name=True) + .compare( + pd.Series( + { + pd.Timestamp( + "2022-03-17" + ): QUARTERLY_EXPIRY, # Should be rolled back from 2022-03-18 since it is a holiday. + pd.Timestamp("2022-06-17"): QUARTERLY_EXPIRY, + pd.Timestamp("2022-09-16"): QUARTERLY_EXPIRY, + pd.Timestamp("2022-12-16"): QUARTERLY_EXPIRY, + } + ) + ) + .empty + ) @pytest.mark.isolated def test_quarterly_expiry_rollback_multiple_days(): - add_test_calendar_and_apply_extensions(holidays=[pd.Timestamp("2022-03-18")], - adhoc_holidays=[pd.Timestamp("2022-03-17")], - regular_special_close=time(14, 00), - special_closes=[(time(14, 00), [pd.Timestamp("2022-03-16")])], - adhoc_special_closes=[(time(14, 00), [pd.Timestamp("2022-03-15")])], - regular_special_open=time(11, 00), - special_opens=[(time(11, 00), [pd.Timestamp("2022-03-14")])], - adhoc_special_opens=[], - weekmask="1111100", - day_of_week_expiry=4) + add_test_calendar_and_apply_extensions( + holidays=[pd.Timestamp("2022-03-18")], + adhoc_holidays=[pd.Timestamp("2022-03-17")], + regular_special_close=time(14, 00), + special_closes=[(time(14, 00), [pd.Timestamp("2022-03-16")])], + adhoc_special_closes=[(time(14, 00), [pd.Timestamp("2022-03-15")])], + regular_special_open=time(11, 00), + special_opens=[(time(11, 00), [pd.Timestamp("2022-03-14")])], + adhoc_special_opens=[], + weekmask="1111100", + day_of_week_expiry=4, + ) import exchange_calendars as ec c = ec.get_calendar("TEST") @@ -1853,25 +2926,38 @@ def test_quarterly_expiry_rollback_multiple_days(): start = pd.Timestamp("2022-01-01") end = pd.Timestamp("2022-12-31") - assert c.quarterly_expiries.holidays(start=start, end=end, return_name=True).compare(pd.Series({ - pd.Timestamp('2022-03-11'): QUARTERLY_EXPIRY, # Should be rolled back from 2022-03-18. - pd.Timestamp('2022-06-17'): QUARTERLY_EXPIRY, - pd.Timestamp('2022-09-16'): QUARTERLY_EXPIRY, - pd.Timestamp('2022-12-16'): QUARTERLY_EXPIRY})).empty + assert ( + c.quarterly_expiries.holidays(start=start, end=end, return_name=True) + .compare( + pd.Series( + { + pd.Timestamp( + "2022-03-11" + ): QUARTERLY_EXPIRY, # Should be rolled back from 2022-03-18. + pd.Timestamp("2022-06-17"): QUARTERLY_EXPIRY, + pd.Timestamp("2022-09-16"): QUARTERLY_EXPIRY, + pd.Timestamp("2022-12-16"): QUARTERLY_EXPIRY, + } + ) + ) + .empty + ) @pytest.mark.isolated def test_monthly_expiry_rollback_one_day(): - add_test_calendar_and_apply_extensions(holidays=[pd.Timestamp("2022-02-18")], - adhoc_holidays=[], - regular_special_close=time(14, 00), - special_closes=[], - adhoc_special_closes=[], - regular_special_open=time(11, 00), - special_opens=[], - adhoc_special_opens=[], - weekmask="1111100", - day_of_week_expiry=4) + add_test_calendar_and_apply_extensions( + holidays=[pd.Timestamp("2022-02-18")], + adhoc_holidays=[], + regular_special_close=time(14, 00), + special_closes=[], + adhoc_special_closes=[], + regular_special_open=time(11, 00), + special_opens=[], + adhoc_special_opens=[], + weekmask="1111100", + day_of_week_expiry=4, + ) import exchange_calendars as ec c = ec.get_calendar("TEST") @@ -1879,29 +2965,42 @@ def test_monthly_expiry_rollback_one_day(): start = pd.Timestamp("2022-01-01") end = pd.Timestamp("2022-12-31") - assert c.monthly_expiries.holidays(start=start, end=end, return_name=True).compare(pd.Series({ - pd.Timestamp('2022-01-21'): MONTHLY_EXPIRY, - pd.Timestamp('2022-02-17'): MONTHLY_EXPIRY, # Should be rolled back from 2022-02-18 since it is a holiday. - pd.Timestamp('2022-04-15'): MONTHLY_EXPIRY, - pd.Timestamp('2022-05-20'): MONTHLY_EXPIRY, - pd.Timestamp('2022-07-15'): MONTHLY_EXPIRY, - pd.Timestamp('2022-08-19'): MONTHLY_EXPIRY, - pd.Timestamp('2022-10-21'): MONTHLY_EXPIRY, - pd.Timestamp('2022-11-18'): MONTHLY_EXPIRY})).empty + assert ( + c.monthly_expiries.holidays(start=start, end=end, return_name=True) + .compare( + pd.Series( + { + pd.Timestamp("2022-01-21"): MONTHLY_EXPIRY, + pd.Timestamp( + "2022-02-17" + ): MONTHLY_EXPIRY, # Should be rolled back from 2022-02-18 since it is a holiday. + pd.Timestamp("2022-04-15"): MONTHLY_EXPIRY, + pd.Timestamp("2022-05-20"): MONTHLY_EXPIRY, + pd.Timestamp("2022-07-15"): MONTHLY_EXPIRY, + pd.Timestamp("2022-08-19"): MONTHLY_EXPIRY, + pd.Timestamp("2022-10-21"): MONTHLY_EXPIRY, + pd.Timestamp("2022-11-18"): MONTHLY_EXPIRY, + } + ) + ) + .empty + ) @pytest.mark.isolated def test_monthly_expiry_rollback_multiple_days(): - add_test_calendar_and_apply_extensions(holidays=[pd.Timestamp("2022-02-18")], - adhoc_holidays=[pd.Timestamp("2022-02-17")], - regular_special_close=time(14, 00), - special_closes=[(time(14, 00), [pd.Timestamp("2022-02-16")])], - adhoc_special_closes=[(time(14, 00), [pd.Timestamp("2022-02-15")])], - regular_special_open=time(11, 00), - special_opens=[(time(11, 00), [pd.Timestamp("2022-02-14")])], - adhoc_special_opens=[], - weekmask="1111100", - day_of_week_expiry=4) + add_test_calendar_and_apply_extensions( + holidays=[pd.Timestamp("2022-02-18")], + adhoc_holidays=[pd.Timestamp("2022-02-17")], + regular_special_close=time(14, 00), + special_closes=[(time(14, 00), [pd.Timestamp("2022-02-16")])], + adhoc_special_closes=[(time(14, 00), [pd.Timestamp("2022-02-15")])], + regular_special_open=time(11, 00), + special_opens=[(time(11, 00), [pd.Timestamp("2022-02-14")])], + adhoc_special_opens=[], + weekmask="1111100", + day_of_week_expiry=4, + ) import exchange_calendars as ec c = ec.get_calendar("TEST") @@ -1909,15 +3008,26 @@ def test_monthly_expiry_rollback_multiple_days(): start = pd.Timestamp("2022-01-01") end = pd.Timestamp("2022-12-31") - assert c.monthly_expiries.holidays(start=start, end=end, return_name=True).compare(pd.Series({ - pd.Timestamp('2022-01-21'): MONTHLY_EXPIRY, - pd.Timestamp('2022-02-11'): MONTHLY_EXPIRY, # Should be rolled back from 2022-02-18. - pd.Timestamp('2022-04-15'): MONTHLY_EXPIRY, - pd.Timestamp('2022-05-20'): MONTHLY_EXPIRY, - pd.Timestamp('2022-07-15'): MONTHLY_EXPIRY, - pd.Timestamp('2022-08-19'): MONTHLY_EXPIRY, - pd.Timestamp('2022-10-21'): MONTHLY_EXPIRY, - pd.Timestamp('2022-11-18'): MONTHLY_EXPIRY})).empty + assert ( + c.monthly_expiries.holidays(start=start, end=end, return_name=True) + .compare( + pd.Series( + { + pd.Timestamp("2022-01-21"): MONTHLY_EXPIRY, + pd.Timestamp( + "2022-02-11" + ): MONTHLY_EXPIRY, # Should be rolled back from 2022-02-18. + pd.Timestamp("2022-04-15"): MONTHLY_EXPIRY, + pd.Timestamp("2022-05-20"): MONTHLY_EXPIRY, + pd.Timestamp("2022-07-15"): MONTHLY_EXPIRY, + pd.Timestamp("2022-08-19"): MONTHLY_EXPIRY, + pd.Timestamp("2022-10-21"): MONTHLY_EXPIRY, + pd.Timestamp("2022-11-18"): MONTHLY_EXPIRY, + } + ) + ) + .empty + ) @pytest.mark.isolated @@ -1931,7 +3041,9 @@ def test_set_tags(): c = ec.get_calendar("TEST") - assert c.meta() == {pd.Timestamp("2023-01-03"): DayMeta(tags=["tag1", "tag2"], comment=None)} + assert c.meta() == { + pd.Timestamp("2023-01-03"): DayMeta(tags=["tag1", "tag2"], comment=None) + } ecx.set_tags("TEST", "2023-01-03", None) @@ -1957,7 +3069,9 @@ def test_set_comment(): c = ec.get_calendar("TEST") - assert c.meta() == {pd.Timestamp("2023-01-03"): DayMeta(tags=[], comment="This is a comment")} + assert c.meta() == { + pd.Timestamp("2023-01-03"): DayMeta(tags=[], comment="This is a comment") + } ecx.set_comment("TEST", "2023-01-03", None) @@ -1979,9 +3093,11 @@ def test_set_meta(): import exchange_calendars as ec import exchange_calendars_extensions.core as ecx - ecx.set_meta("TEST", "2023-01-03", {'comment': 'This is a comment'}) - ecx.set_meta("TEST", "2023-01-04", {'tags': ['tag1', 'tag2']}) - ecx.set_meta("TEST", "2023-01-05", {'tags': ['tag1', 'tag2'], 'comment': 'This is a comment'}) + ecx.set_meta("TEST", "2023-01-03", {"comment": "This is a comment"}) + ecx.set_meta("TEST", "2023-01-04", {"tags": ["tag1", "tag2"]}) + ecx.set_meta( + "TEST", "2023-01-05", {"tags": ["tag1", "tag2"], "comment": "This is a comment"} + ) ecx.set_meta("TEST", "2023-01-06", None) c = ec.get_calendar("TEST") @@ -1989,7 +3105,9 @@ def test_set_meta(): assert c.meta() == { pd.Timestamp("2023-01-03"): DayMeta(tags=[], comment="This is a comment"), pd.Timestamp("2023-01-04"): DayMeta(tags=["tag1", "tag2"], comment=None), - pd.Timestamp("2023-01-05"): DayMeta(tags=["tag1", "tag2"], comment="This is a comment") + pd.Timestamp("2023-01-05"): DayMeta( + tags=["tag1", "tag2"], comment="This is a comment" + ), } ecx.set_meta("TEST", "2023-01-03", None) @@ -2008,16 +3126,20 @@ def test_get_meta(): import exchange_calendars as ec import exchange_calendars_extensions.core as ecx - ecx.set_meta("TEST", "2023-01-03", {'comment': 'This is a comment'}) - ecx.set_meta("TEST", "2023-01-04", {'tags': ['tag1', 'tag2']}) - ecx.set_meta("TEST", "2023-01-05", {'tags': ['tag1', 'tag2'], 'comment': 'This is a comment'}) + ecx.set_meta("TEST", "2023-01-03", {"comment": "This is a comment"}) + ecx.set_meta("TEST", "2023-01-04", {"tags": ["tag1", "tag2"]}) + ecx.set_meta( + "TEST", "2023-01-05", {"tags": ["tag1", "tag2"], "comment": "This is a comment"} + ) c = ec.get_calendar("TEST") assert c.meta() == { pd.Timestamp("2023-01-03"): DayMeta(tags=[], comment="This is a comment"), pd.Timestamp("2023-01-04"): DayMeta(tags=["tag1", "tag2"], comment=None), - pd.Timestamp("2023-01-05"): DayMeta(tags=["tag1", "tag2"], comment="This is a comment") + pd.Timestamp("2023-01-05"): DayMeta( + tags=["tag1", "tag2"], comment="This is a comment" + ), } @@ -2045,40 +3167,82 @@ def test_get_meta_tz_naive(): # Test combinations of start and end aligned with date boundary. # start - assert c.meta(start=day_1 - pd.Timedelta(days=1)) == OrderedDict([(day_1, meta_1), (day_2, meta_2), (day_3, meta_3)]) - assert c.meta(start=day_1) == OrderedDict([(day_1, meta_1), (day_2, meta_2), (day_3, meta_3)]) + assert c.meta(start=day_1 - pd.Timedelta(days=1)) == OrderedDict( + [(day_1, meta_1), (day_2, meta_2), (day_3, meta_3)] + ) + assert c.meta(start=day_1) == OrderedDict( + [(day_1, meta_1), (day_2, meta_2), (day_3, meta_3)] + ) assert c.meta(start=day_2) == OrderedDict([(day_2, meta_2), (day_3, meta_3)]) assert c.meta(start=day_3) == OrderedDict([(day_3, meta_3)]) assert c.meta(start=day_3 + pd.Timedelta(days=1)) == OrderedDict() # end - assert c.meta(end=day_3 + pd.Timedelta(days=1)) == OrderedDict([(day_1, meta_1), (day_2, meta_2), (day_3, meta_3)]) - assert c.meta(end=day_3) == OrderedDict([(day_1, meta_1), (day_2, meta_2), (day_3, meta_3)]) + assert c.meta(end=day_3 + pd.Timedelta(days=1)) == OrderedDict( + [(day_1, meta_1), (day_2, meta_2), (day_3, meta_3)] + ) + assert c.meta(end=day_3) == OrderedDict( + [(day_1, meta_1), (day_2, meta_2), (day_3, meta_3)] + ) assert c.meta(end=day_2) == OrderedDict([(day_1, meta_1), (day_2, meta_2)]) assert c.meta(end=day_1) == OrderedDict([(day_1, meta_1)]) assert c.meta(end=day_1 - pd.Timedelta(days=1)) == OrderedDict() # start & end - assert c.meta(start=day_1, end=day_3) == OrderedDict([(day_1, meta_1), (day_2, meta_2), (day_3, meta_3)]) + assert c.meta(start=day_1, end=day_3) == OrderedDict( + [(day_1, meta_1), (day_2, meta_2), (day_3, meta_3)] + ) assert c.meta(start=day_2, end=day_2) == OrderedDict([(day_2, meta_2)]) # Test combinations of start and end not aligned with date boundary. # start - assert c.meta(start=day_1 - pd.Timedelta(hours=1)) == OrderedDict([(day_1, meta_1), (day_2, meta_2), (day_3, meta_3)]) - assert c.meta(start=day_1 + pd.Timedelta(hours=1)) == OrderedDict([(day_1, meta_1), (day_2, meta_2), (day_3, meta_3)]) - assert c.meta(start=day_1 + pd.Timedelta(hours=23, minutes=59, seconds=59, milliseconds=999, microseconds=999, nanoseconds=999)) == OrderedDict([(day_1, meta_1), (day_2, meta_2), (day_3, meta_3)]) - assert c.meta(start=day_1 + pd.Timedelta(days=1) - pd.Timedelta(nanoseconds=1)) == OrderedDict([(day_1, meta_1), (day_2, meta_2), (day_3, meta_3)]) - assert c.meta(start=day_1 + pd.Timedelta(hours=24)) == OrderedDict([(day_2, meta_2), (day_3, meta_3)]) - assert c.meta(start=day_1 + pd.Timedelta(days=1)) == OrderedDict([(day_2, meta_2), (day_3, meta_3)]) + assert c.meta(start=day_1 - pd.Timedelta(hours=1)) == OrderedDict( + [(day_1, meta_1), (day_2, meta_2), (day_3, meta_3)] + ) + assert c.meta(start=day_1 + pd.Timedelta(hours=1)) == OrderedDict( + [(day_1, meta_1), (day_2, meta_2), (day_3, meta_3)] + ) + assert c.meta( + start=day_1 + + pd.Timedelta( + hours=23, + minutes=59, + seconds=59, + milliseconds=999, + microseconds=999, + nanoseconds=999, + ) + ) == OrderedDict([(day_1, meta_1), (day_2, meta_2), (day_3, meta_3)]) + assert c.meta( + start=day_1 + pd.Timedelta(days=1) - pd.Timedelta(nanoseconds=1) + ) == OrderedDict([(day_1, meta_1), (day_2, meta_2), (day_3, meta_3)]) + assert c.meta(start=day_1 + pd.Timedelta(hours=24)) == OrderedDict( + [(day_2, meta_2), (day_3, meta_3)] + ) + assert c.meta(start=day_1 + pd.Timedelta(days=1)) == OrderedDict( + [(day_2, meta_2), (day_3, meta_3)] + ) # end - assert c.meta(end=day_3 + pd.Timedelta(hours=24)) == OrderedDict([(day_1, meta_1), (day_2, meta_2), (day_3, meta_3)]) - assert c.meta(end=day_3 + pd.Timedelta(days=1)) == OrderedDict([(day_1, meta_1), (day_2, meta_2), (day_3, meta_3)]) - assert c.meta(end=day_3 + pd.Timedelta(hours=1) - pd.Timedelta(nanoseconds=1)) == OrderedDict([(day_1, meta_1), (day_2, meta_2), (day_3, meta_3)]) - assert c.meta(end=day_3 + pd.Timedelta(nanoseconds=1)) == OrderedDict([(day_1, meta_1), (day_2, meta_2), (day_3, meta_3)]) - assert c.meta(end=day_3) == OrderedDict([(day_1, meta_1), (day_2, meta_2), (day_3, meta_3)]) - assert c.meta(end=day_3 - pd.Timedelta(nanoseconds=1)) == OrderedDict([(day_1, meta_1), (day_2, meta_2)]) + assert c.meta(end=day_3 + pd.Timedelta(hours=24)) == OrderedDict( + [(day_1, meta_1), (day_2, meta_2), (day_3, meta_3)] + ) + assert c.meta(end=day_3 + pd.Timedelta(days=1)) == OrderedDict( + [(day_1, meta_1), (day_2, meta_2), (day_3, meta_3)] + ) + assert c.meta( + end=day_3 + pd.Timedelta(hours=1) - pd.Timedelta(nanoseconds=1) + ) == OrderedDict([(day_1, meta_1), (day_2, meta_2), (day_3, meta_3)]) + assert c.meta(end=day_3 + pd.Timedelta(nanoseconds=1)) == OrderedDict( + [(day_1, meta_1), (day_2, meta_2), (day_3, meta_3)] + ) + assert c.meta(end=day_3) == OrderedDict( + [(day_1, meta_1), (day_2, meta_2), (day_3, meta_3)] + ) + assert c.meta(end=day_3 - pd.Timedelta(nanoseconds=1)) == OrderedDict( + [(day_1, meta_1), (day_2, meta_2)] + ) def test_get_meta_tz_aware(): @@ -2108,32 +3272,115 @@ def test_get_meta_tz_aware(): # day_1 only has 23 hours due to DST transition # start in timezone CET, same as the calendar - assert c.meta(start=day_1.tz_localize(tz='CET')) == OrderedDict([(day_1, meta_1), (day_2, meta_2)]) - assert c.meta(start=day_1.tz_localize(tz='CET') + pd.Timedelta(days=1) - pd.Timedelta(hours=1, nanoseconds=1)) == OrderedDict([(day_1, meta_1), (day_2, meta_2)]) - assert c.meta(start=day_1.tz_localize(tz='CET') + pd.Timedelta(hours=22, minutes=59, seconds=59, milliseconds=999, microseconds=999, nanoseconds=999)) == OrderedDict([(day_1, meta_1), (day_2, meta_2)]) - assert c.meta(start=day_1.tz_localize(tz='CET') + pd.Timedelta(days=1) - pd.Timedelta(hours=1)) == OrderedDict([(day_2, meta_2)]) - assert c.meta(start=day_1.tz_localize(tz='CET') + pd.Timedelta(hours=23)) == OrderedDict([(day_2, meta_2)]) + assert c.meta(start=day_1.tz_localize(tz="CET")) == OrderedDict( + [(day_1, meta_1), (day_2, meta_2)] + ) + assert c.meta( + start=day_1.tz_localize(tz="CET") + + pd.Timedelta(days=1) + - pd.Timedelta(hours=1, nanoseconds=1) + ) == OrderedDict([(day_1, meta_1), (day_2, meta_2)]) + assert c.meta( + start=day_1.tz_localize(tz="CET") + + pd.Timedelta( + hours=22, + minutes=59, + seconds=59, + milliseconds=999, + microseconds=999, + nanoseconds=999, + ) + ) == OrderedDict([(day_1, meta_1), (day_2, meta_2)]) + assert c.meta( + start=day_1.tz_localize(tz="CET") + pd.Timedelta(days=1) - pd.Timedelta(hours=1) + ) == OrderedDict([(day_2, meta_2)]) + assert c.meta( + start=day_1.tz_localize(tz="CET") + pd.Timedelta(hours=23) + ) == OrderedDict([(day_2, meta_2)]) # start in UTC - assert c.meta(start=day_1.tz_localize(tz='CET').tz_convert(tz='UTC')) == OrderedDict([(day_1, meta_1), (day_2, meta_2)]) - assert c.meta(start=day_1.tz_localize(tz='CET').tz_convert(tz='UTC') + pd.Timedelta(days=1) - pd.Timedelta(hours=1, nanoseconds=1)) == OrderedDict([(day_1, meta_1), (day_2, meta_2)]) - assert c.meta(start=day_1.tz_localize(tz='CET').tz_convert(tz='UTC') + pd.Timedelta(hours=22, minutes=59, seconds=59, milliseconds=999, microseconds=999, nanoseconds=999)) == OrderedDict([(day_1, meta_1), (day_2, meta_2)]) - assert c.meta(start=day_1.tz_localize(tz='CET').tz_convert(tz='UTC') + pd.Timedelta(days=1) - pd.Timedelta(hours=1)) == OrderedDict([(day_2, meta_2)]) - assert c.meta(start=day_1.tz_localize(tz='CET').tz_convert(tz='UTC') + pd.Timedelta(hours=23)) == OrderedDict([(day_2, meta_2)]) + assert c.meta( + start=day_1.tz_localize(tz="CET").tz_convert(tz="UTC") + ) == OrderedDict([(day_1, meta_1), (day_2, meta_2)]) + assert c.meta( + start=day_1.tz_localize(tz="CET").tz_convert(tz="UTC") + + pd.Timedelta(days=1) + - pd.Timedelta(hours=1, nanoseconds=1) + ) == OrderedDict([(day_1, meta_1), (day_2, meta_2)]) + assert c.meta( + start=day_1.tz_localize(tz="CET").tz_convert(tz="UTC") + + pd.Timedelta( + hours=22, + minutes=59, + seconds=59, + milliseconds=999, + microseconds=999, + nanoseconds=999, + ) + ) == OrderedDict([(day_1, meta_1), (day_2, meta_2)]) + assert c.meta( + start=day_1.tz_localize(tz="CET").tz_convert(tz="UTC") + + pd.Timedelta(days=1) + - pd.Timedelta(hours=1) + ) == OrderedDict([(day_2, meta_2)]) + assert c.meta( + start=day_1.tz_localize(tz="CET").tz_convert(tz="UTC") + pd.Timedelta(hours=23) + ) == OrderedDict([(day_2, meta_2)]) # end # day_2 has 25 hours due to DST transition # end in timezone CET, same as the calendar - assert c.meta(end=day_2.tz_localize(tz='CET') + pd.Timedelta(hours=25)) == OrderedDict([(day_1, meta_1), (day_2, meta_2)]) - assert c.meta(end=day_2.tz_localize(tz='CET') + pd.Timedelta(hours=24, minutes=59, seconds=59, milliseconds=999, microseconds=999, nanoseconds=999)) == OrderedDict([(day_1, meta_1), (day_2, meta_2)]) - assert c.meta(end=day_2.tz_localize(tz='CET') + pd.Timedelta(days=1) + pd.Timedelta(hours=1, nanoseconds=-1)) == OrderedDict([(day_1, meta_1), (day_2, meta_2)]) - assert c.meta(end=day_2.tz_localize(tz='CET')) == OrderedDict([(day_1, meta_1), (day_2, meta_2)]) - assert c.meta(end=day_2.tz_localize(tz='CET') - pd.Timedelta(nanoseconds=1)) == OrderedDict([(day_1, meta_1)]) + assert c.meta( + end=day_2.tz_localize(tz="CET") + pd.Timedelta(hours=25) + ) == OrderedDict([(day_1, meta_1), (day_2, meta_2)]) + assert c.meta( + end=day_2.tz_localize(tz="CET") + + pd.Timedelta( + hours=24, + minutes=59, + seconds=59, + milliseconds=999, + microseconds=999, + nanoseconds=999, + ) + ) == OrderedDict([(day_1, meta_1), (day_2, meta_2)]) + assert c.meta( + end=day_2.tz_localize(tz="CET") + + pd.Timedelta(days=1) + + pd.Timedelta(hours=1, nanoseconds=-1) + ) == OrderedDict([(day_1, meta_1), (day_2, meta_2)]) + assert c.meta(end=day_2.tz_localize(tz="CET")) == OrderedDict( + [(day_1, meta_1), (day_2, meta_2)] + ) + assert c.meta( + end=day_2.tz_localize(tz="CET") - pd.Timedelta(nanoseconds=1) + ) == OrderedDict([(day_1, meta_1)]) # end in UTC - assert c.meta(end=day_2.tz_localize(tz='CET').tz_convert(tz='UTC') + pd.Timedelta(hours=25)) == OrderedDict([(day_1, meta_1), (day_2, meta_2)]) - assert c.meta(end=day_2.tz_localize(tz='CET').tz_convert(tz='UTC') + pd.Timedelta(hours=24, minutes=59, seconds=59, milliseconds=999, microseconds=999, nanoseconds=999)) == OrderedDict([(day_1, meta_1), (day_2, meta_2)]) - assert c.meta(end=day_2.tz_localize(tz='CET').tz_convert(tz='UTC') + pd.Timedelta(days=1) + pd.Timedelta(hours=1, nanoseconds=-1)) == OrderedDict([(day_1, meta_1), (day_2, meta_2)]) - assert c.meta(end=day_2.tz_localize(tz='CET').tz_convert(tz='UTC')) == OrderedDict([(day_1, meta_1), (day_2, meta_2)]) - assert c.meta(end=day_2.tz_localize(tz='CET').tz_convert(tz='UTC') - pd.Timedelta(nanoseconds=1)) == OrderedDict([(day_1, meta_1)]) + assert c.meta( + end=day_2.tz_localize(tz="CET").tz_convert(tz="UTC") + pd.Timedelta(hours=25) + ) == OrderedDict([(day_1, meta_1), (day_2, meta_2)]) + assert c.meta( + end=day_2.tz_localize(tz="CET").tz_convert(tz="UTC") + + pd.Timedelta( + hours=24, + minutes=59, + seconds=59, + milliseconds=999, + microseconds=999, + nanoseconds=999, + ) + ) == OrderedDict([(day_1, meta_1), (day_2, meta_2)]) + assert c.meta( + end=day_2.tz_localize(tz="CET").tz_convert(tz="UTC") + + pd.Timedelta(days=1) + + pd.Timedelta(hours=1, nanoseconds=-1) + ) == OrderedDict([(day_1, meta_1), (day_2, meta_2)]) + assert c.meta(end=day_2.tz_localize(tz="CET").tz_convert(tz="UTC")) == OrderedDict( + [(day_1, meta_1), (day_2, meta_2)] + ) + assert c.meta( + end=day_2.tz_localize(tz="CET").tz_convert(tz="UTC") + - pd.Timedelta(nanoseconds=1) + ) == OrderedDict([(day_1, meta_1)]) diff --git a/tests/test_holiday.py b/tests/test_holiday.py index 42aae00..f784222 100644 --- a/tests/test_holiday.py +++ b/tests/test_holiday.py @@ -1,71 +1,183 @@ import pandas as pd from pandas import DatetimeIndex, Series -from exchange_calendars_extensions.core.holiday import get_monthly_expiry_holiday, DayOfWeekPeriodicHoliday +from exchange_calendars_extensions.core.holiday import ( + get_monthly_expiry_holiday, + DayOfWeekPeriodicHoliday, +) -DATETIME_64_NS = 'datetime64[ns]' +DATETIME_64_NS = "datetime64[ns]" class TestHolidays: - def test_get_monthly_expiry_holiday(self): - holiday = get_monthly_expiry_holiday(name="Holiday", day_of_week=0, month=1, start_date=pd.Timestamp("2019-01-01"), end_date=pd.Timestamp("2021-12-31")) + holiday = get_monthly_expiry_holiday( + name="Holiday", + day_of_week=0, + month=1, + start_date=pd.Timestamp("2019-01-01"), + end_date=pd.Timestamp("2021-12-31"), + ) # Verify name. assert holiday.name == "Holiday" # No holidays in 2018 because it's before the start date. - assert holiday.dates(start_date=pd.Timestamp("2018-01-01"), end_date=pd.Timestamp("2018-12-31"), return_name=False).equals(DatetimeIndex([], dtype=DATETIME_64_NS, freq=None)) + assert holiday.dates( + start_date=pd.Timestamp("2018-01-01"), + end_date=pd.Timestamp("2018-12-31"), + return_name=False, + ).equals(DatetimeIndex([], dtype=DATETIME_64_NS, freq=None)) # No holidays since the end date is before the first holiday. - assert holiday.dates(start_date=pd.Timestamp("2019-01-01"), end_date=pd.Timestamp("2019-01-20"), return_name=False).equals(DatetimeIndex([], dtype=DATETIME_64_NS, freq=None)) + assert holiday.dates( + start_date=pd.Timestamp("2019-01-01"), + end_date=pd.Timestamp("2019-01-20"), + return_name=False, + ).equals(DatetimeIndex([], dtype=DATETIME_64_NS, freq=None)) # A single holiday in 2019. - assert holiday.dates(start_date=pd.Timestamp("2019-01-01"), end_date=pd.Timestamp("2019-12-31"), return_name=False).equals(DatetimeIndex([pd.Timestamp("2019-01-21")], dtype=DATETIME_64_NS, freq=None)) + assert holiday.dates( + start_date=pd.Timestamp("2019-01-01"), + end_date=pd.Timestamp("2019-12-31"), + return_name=False, + ).equals( + DatetimeIndex([pd.Timestamp("2019-01-21")], dtype=DATETIME_64_NS, freq=None) + ) # A single holiday in 2020. - assert holiday.dates(start_date=pd.Timestamp("2020-01-01"), end_date=pd.Timestamp("2020-12-31"), return_name=False).equals(DatetimeIndex([pd.Timestamp("2020-01-20")], dtype=DATETIME_64_NS, freq=None)) + assert holiday.dates( + start_date=pd.Timestamp("2020-01-01"), + end_date=pd.Timestamp("2020-12-31"), + return_name=False, + ).equals( + DatetimeIndex([pd.Timestamp("2020-01-20")], dtype=DATETIME_64_NS, freq=None) + ) # A single holiday in 2021. - assert holiday.dates(start_date=pd.Timestamp("2021-01-01"), end_date=pd.Timestamp("2021-12-31"), return_name=False).equals(DatetimeIndex([pd.Timestamp("2021-01-18")], dtype=DATETIME_64_NS, freq=None)) + assert holiday.dates( + start_date=pd.Timestamp("2021-01-01"), + end_date=pd.Timestamp("2021-12-31"), + return_name=False, + ).equals( + DatetimeIndex([pd.Timestamp("2021-01-18")], dtype=DATETIME_64_NS, freq=None) + ) # No holidays since the start date is after the last holiday. - assert holiday.dates(start_date=pd.Timestamp("2021-01-19"), end_date=pd.Timestamp("2021-12-31"), return_name=False).equals(DatetimeIndex([], dtype=DATETIME_64_NS, freq=None)) + assert holiday.dates( + start_date=pd.Timestamp("2021-01-19"), + end_date=pd.Timestamp("2021-12-31"), + return_name=False, + ).equals(DatetimeIndex([], dtype=DATETIME_64_NS, freq=None)) # No holidays in 2022 because it's after the end date. - assert holiday.dates(start_date=pd.Timestamp("2022-01-01"), end_date=pd.Timestamp("2022-12-31"), return_name=False).equals(DatetimeIndex([], dtype=DATETIME_64_NS, freq=None)) + assert holiday.dates( + start_date=pd.Timestamp("2022-01-01"), + end_date=pd.Timestamp("2022-12-31"), + return_name=False, + ).equals(DatetimeIndex([], dtype=DATETIME_64_NS, freq=None)) # Verify that the holiday name is returned. - assert holiday.dates(start_date=pd.Timestamp("2019-01-01"), end_date=pd.Timestamp("2019-12-31"), return_name=True).equals(Series(['Holiday'], index=[pd.Timestamp('2019-01-21')])) + assert holiday.dates( + start_date=pd.Timestamp("2019-01-01"), + end_date=pd.Timestamp("2019-12-31"), + return_name=True, + ).equals(Series(["Holiday"], index=[pd.Timestamp("2019-01-21")])) def test_day_of_week_periodic_holiday(self): - holiday = DayOfWeekPeriodicHoliday(name="Holiday", day_of_week=0, start_date=pd.Timestamp("2019-01-01"), end_date=pd.Timestamp("2021-12-31")) + holiday = DayOfWeekPeriodicHoliday( + name="Holiday", + day_of_week=0, + start_date=pd.Timestamp("2019-01-01"), + end_date=pd.Timestamp("2021-12-31"), + ) # Verify name. assert holiday.name == "Holiday" # No holidays in 2018 because it's before the start date. - assert holiday.dates(start_date=pd.Timestamp("2018-01-01"), end_date=pd.Timestamp("2018-12-31"), return_name=False).equals(DatetimeIndex([], dtype=DATETIME_64_NS, freq=None)) + assert holiday.dates( + start_date=pd.Timestamp("2018-01-01"), + end_date=pd.Timestamp("2018-12-31"), + return_name=False, + ).equals(DatetimeIndex([], dtype=DATETIME_64_NS, freq=None)) # No holidays since the end date is before the first holiday. - assert holiday.dates(start_date=pd.Timestamp("2019-01-01"), end_date=pd.Timestamp("2019-01-06"), return_name=False).equals(DatetimeIndex([], dtype=DATETIME_64_NS, freq=None)) + assert holiday.dates( + start_date=pd.Timestamp("2019-01-01"), + end_date=pd.Timestamp("2019-01-06"), + return_name=False, + ).equals(DatetimeIndex([], dtype=DATETIME_64_NS, freq=None)) # No holidays since the provided range doesn't include the day of week. - assert holiday.dates(start_date=pd.Timestamp("2019-01-03"), end_date=pd.Timestamp("2019-01-05"), return_name=False).equals(DatetimeIndex([], dtype=DATETIME_64_NS, freq=None)) + assert holiday.dates( + start_date=pd.Timestamp("2019-01-03"), + end_date=pd.Timestamp("2019-01-05"), + return_name=False, + ).equals(DatetimeIndex([], dtype=DATETIME_64_NS, freq=None)) # Verify expected days of week in 2019. - assert holiday.dates(start_date=pd.Timestamp("2019-01-01"), end_date=pd.Timestamp("2019-12-31"), return_name=False).equals(DatetimeIndex([ - pd.Timestamp("2019-01-07"), pd.Timestamp("2019-01-14"), pd.Timestamp("2019-01-21"), pd.Timestamp("2019-01-28"), - pd.Timestamp("2019-02-04"), pd.Timestamp("2019-02-11"), pd.Timestamp("2019-02-18"), pd.Timestamp("2019-02-25"), - pd.Timestamp("2019-03-04"), pd.Timestamp("2019-03-11"), pd.Timestamp("2019-03-18"), pd.Timestamp("2019-03-25"), - pd.Timestamp("2019-04-01"), pd.Timestamp("2019-04-08"), pd.Timestamp("2019-04-15"), pd.Timestamp("2019-04-22"), - pd.Timestamp("2019-04-29"), pd.Timestamp("2019-05-06"), pd.Timestamp("2019-05-13"), pd.Timestamp("2019-05-20"), - pd.Timestamp("2019-05-27"), pd.Timestamp("2019-06-03"), pd.Timestamp("2019-06-10"), pd.Timestamp("2019-06-17"), - pd.Timestamp("2019-06-24"), pd.Timestamp("2019-07-01"), pd.Timestamp("2019-07-08"), pd.Timestamp("2019-07-15"), - pd.Timestamp("2019-07-22"), pd.Timestamp("2019-07-29"), pd.Timestamp("2019-08-05"), pd.Timestamp("2019-08-12"), - pd.Timestamp("2019-08-19"), pd.Timestamp("2019-08-26"), pd.Timestamp("2019-09-02"), pd.Timestamp("2019-09-09"), - pd.Timestamp("2019-09-16"), pd.Timestamp("2019-09-23"), pd.Timestamp("2019-09-30"), pd.Timestamp("2019-10-07"), - pd.Timestamp("2019-10-14"), pd.Timestamp("2019-10-21"), pd.Timestamp("2019-10-28"), pd.Timestamp("2019-11-04"), - pd.Timestamp("2019-11-11"), pd.Timestamp("2019-11-18"), pd.Timestamp("2019-11-25"), pd.Timestamp("2019-12-02"), - pd.Timestamp("2019-12-09"), pd.Timestamp("2019-12-16"), pd.Timestamp("2019-12-23"), pd.Timestamp("2019-12-30"), - ], dtype='datetime64[ns]', freq=None)) + assert holiday.dates( + start_date=pd.Timestamp("2019-01-01"), + end_date=pd.Timestamp("2019-12-31"), + return_name=False, + ).equals( + DatetimeIndex( + [ + pd.Timestamp("2019-01-07"), + pd.Timestamp("2019-01-14"), + pd.Timestamp("2019-01-21"), + pd.Timestamp("2019-01-28"), + pd.Timestamp("2019-02-04"), + pd.Timestamp("2019-02-11"), + pd.Timestamp("2019-02-18"), + pd.Timestamp("2019-02-25"), + pd.Timestamp("2019-03-04"), + pd.Timestamp("2019-03-11"), + pd.Timestamp("2019-03-18"), + pd.Timestamp("2019-03-25"), + pd.Timestamp("2019-04-01"), + pd.Timestamp("2019-04-08"), + pd.Timestamp("2019-04-15"), + pd.Timestamp("2019-04-22"), + pd.Timestamp("2019-04-29"), + pd.Timestamp("2019-05-06"), + pd.Timestamp("2019-05-13"), + pd.Timestamp("2019-05-20"), + pd.Timestamp("2019-05-27"), + pd.Timestamp("2019-06-03"), + pd.Timestamp("2019-06-10"), + pd.Timestamp("2019-06-17"), + pd.Timestamp("2019-06-24"), + pd.Timestamp("2019-07-01"), + pd.Timestamp("2019-07-08"), + pd.Timestamp("2019-07-15"), + pd.Timestamp("2019-07-22"), + pd.Timestamp("2019-07-29"), + pd.Timestamp("2019-08-05"), + pd.Timestamp("2019-08-12"), + pd.Timestamp("2019-08-19"), + pd.Timestamp("2019-08-26"), + pd.Timestamp("2019-09-02"), + pd.Timestamp("2019-09-09"), + pd.Timestamp("2019-09-16"), + pd.Timestamp("2019-09-23"), + pd.Timestamp("2019-09-30"), + pd.Timestamp("2019-10-07"), + pd.Timestamp("2019-10-14"), + pd.Timestamp("2019-10-21"), + pd.Timestamp("2019-10-28"), + pd.Timestamp("2019-11-04"), + pd.Timestamp("2019-11-11"), + pd.Timestamp("2019-11-18"), + pd.Timestamp("2019-11-25"), + pd.Timestamp("2019-12-02"), + pd.Timestamp("2019-12-09"), + pd.Timestamp("2019-12-16"), + pd.Timestamp("2019-12-23"), + pd.Timestamp("2019-12-30"), + ], + dtype="datetime64[ns]", + freq=None, + ) + ) diff --git a/tests/test_holiday_calendar.py b/tests/test_holiday_calendar.py index adbbfbe..7c730e9 100644 --- a/tests/test_holiday_calendar.py +++ b/tests/test_holiday_calendar.py @@ -4,15 +4,28 @@ import pytest from exchange_calendars import get_calendar, ExchangeCalendar from exchange_calendars.exchange_calendar import HolidayCalendar -from exchange_calendars.exchange_calendar import HolidayCalendar as ExchangeCalendarsHolidayCalendar +from exchange_calendars.exchange_calendar import ( + HolidayCalendar as ExchangeCalendarsHolidayCalendar, +) from exchange_calendars.exchange_calendar_xlon import ChristmasEve, NewYearsEvePost2000 from exchange_calendars.pandas_extensions.holiday import Holiday from pytz import timezone -from exchange_calendars_extensions.core.holiday_calendar import get_holiday_calendar_from_timestamps, \ - get_holiday_calendar_from_day_of_week, merge_calendars, get_holidays_calendar, get_special_closes_calendar, \ - get_special_opens_calendar, get_weekend_days_calendar, get_monthly_expiry_rules, get_quadruple_witching_rules, \ - get_last_day_of_month_rules, roll_one_day_same_month, AdjustedHolidayCalendar, RollFn +from exchange_calendars_extensions.core.holiday_calendar import ( + get_holiday_calendar_from_timestamps, + get_holiday_calendar_from_day_of_week, + merge_calendars, + get_holidays_calendar, + get_special_closes_calendar, + get_special_opens_calendar, + get_weekend_days_calendar, + get_monthly_expiry_rules, + get_quadruple_witching_rules, + get_last_day_of_month_rules, + roll_one_day_same_month, + AdjustedHolidayCalendar, + RollFn, +) from tests.util import date2args, roll_backward, roll_forward import tests.util @@ -27,45 +40,63 @@ class TestRollOneDaySameMonth: - - @pytest.mark.parametrize("date", [ - pd.Timestamp("2020-01-02"), - pd.Timestamp("2020-01-03"), - pd.Timestamp("2020-01-04")]) + @pytest.mark.parametrize( + "date", + [ + pd.Timestamp("2020-01-02"), + pd.Timestamp("2020-01-03"), + pd.Timestamp("2020-01-04"), + ], + ) def test_same_month(self, date: pd.Timestamp): print(date) print(roll_one_day_same_month(date)) print(date - pd.Timedelta(days=1)) assert roll_one_day_same_month(date) == date - pd.Timedelta(days=1) - @pytest.mark.parametrize("date", [ - pd.Timestamp("2020-01-01"), - pd.Timestamp("2020-02-01"), - pd.Timestamp("2020-03-01")]) + @pytest.mark.parametrize( + "date", + [ + pd.Timestamp("2020-01-01"), + pd.Timestamp("2020-02-01"), + pd.Timestamp("2020-03-01"), + ], + ) def test_not_same_month(self, date: pd.Timestamp): assert roll_one_day_same_month(date) is None class TestAdjustedHolidayCalendar: - - @pytest.mark.parametrize("return_name", [False, True], ids=["return_name=False", "return_name=True"]) - @pytest.mark.parametrize("weekmask, day, day_adjusted, roll_fn", [ - ("1111100", "2024-01-15", "2024-01-15", roll_backward), # Mon - ("1111100", "2024-01-16", "2024-01-16", roll_backward), # Tue - ("1111100", "2024-01-17", "2024-01-17", roll_backward), # Wed - ("1111100", "2024-01-18", "2024-01-18", roll_backward), # Thu - ("1111100", "2024-01-19", "2024-01-19", roll_backward), # Fri - ("1111100", "2024-01-20", "2024-01-19", roll_backward), # Sat - ("1111100", "2024-01-21", "2024-01-19", roll_backward), # Sun - ("1111100", "2024-01-15", "2024-01-15", roll_forward), # Mon - ("1111100", "2024-01-16", "2024-01-16", roll_forward), # Tue - ("1111100", "2024-01-17", "2024-01-17", roll_forward), # Wed - ("1111100", "2024-01-18", "2024-01-18", roll_forward), # Thu - ("1111100", "2024-01-19", "2024-01-19", roll_forward), # Fri - ("1111100", "2024-01-20", "2024-01-22", roll_forward), # Sat - ("1111100", "2024-01-21", "2024-01-22", roll_forward), # Sun - ]) - def test_weekmask(self, weekmask: str, day: str, day_adjusted: str, roll_fn: RollFn, return_name: bool): + @pytest.mark.parametrize( + "return_name", [False, True], ids=["return_name=False", "return_name=True"] + ) + @pytest.mark.parametrize( + "weekmask, day, day_adjusted, roll_fn", + [ + ("1111100", "2024-01-15", "2024-01-15", roll_backward), # Mon + ("1111100", "2024-01-16", "2024-01-16", roll_backward), # Tue + ("1111100", "2024-01-17", "2024-01-17", roll_backward), # Wed + ("1111100", "2024-01-18", "2024-01-18", roll_backward), # Thu + ("1111100", "2024-01-19", "2024-01-19", roll_backward), # Fri + ("1111100", "2024-01-20", "2024-01-19", roll_backward), # Sat + ("1111100", "2024-01-21", "2024-01-19", roll_backward), # Sun + ("1111100", "2024-01-15", "2024-01-15", roll_forward), # Mon + ("1111100", "2024-01-16", "2024-01-16", roll_forward), # Tue + ("1111100", "2024-01-17", "2024-01-17", roll_forward), # Wed + ("1111100", "2024-01-18", "2024-01-18", roll_forward), # Thu + ("1111100", "2024-01-19", "2024-01-19", roll_forward), # Fri + ("1111100", "2024-01-20", "2024-01-22", roll_forward), # Sat + ("1111100", "2024-01-21", "2024-01-22", roll_forward), # Sun + ], + ) + def test_weekmask( + self, + weekmask: str, + day: str, + day_adjusted: str, + roll_fn: RollFn, + return_name: bool, + ): """Test that the weekmask is applied correctly when adjusting the holidays.""" # Unadjusted holiday. @@ -76,24 +107,85 @@ def test_weekmask(self, weekmask: str, day: str, day_adjusted: str, roll_fn: Rol # Calendar containing just the single holiday, the given weekmask, and using the given roll function. The other # calendar is empty. - calendar = AdjustedHolidayCalendar(rules=[ - Holiday(name=HOLIDAY, **date2args(day)), - ], other=ExchangeCalendarsHolidayCalendar([]), weekmask=weekmask, roll_fn=roll_fn) + calendar = AdjustedHolidayCalendar( + rules=[ + Holiday(name=HOLIDAY, **date2args(day)), + ], + other=ExchangeCalendarsHolidayCalendar([]), + weekmask=weekmask, + roll_fn=roll_fn, + ) # Check that holiday is adjusted to the expected day. assert calendar.holidays(return_name=return_name).equals( - pd.Series([HOLIDAY, ], index=[day_adjusted, ]) if return_name else pd.DatetimeIndex([day_adjusted, ])) - - @pytest.mark.parametrize("return_name", [False, True], ids=["return_name=False", "return_name=True"]) - @pytest.mark.parametrize("day, day_adjusted, day_other, roll_fn", [ - ("2024-01-15", "2024-01-15", "2024-01-16", roll_backward), # Day after holiday is a holiday. No adjustment. - ("2024-01-16", "2024-01-15", "2024-01-16", roll_backward), # Day coincides with other holiday. Adjust. - ("2024-01-17", "2024-01-17", "2024-01-16", roll_backward), # Day before holiday is a holiday. No adjustment. - ("2024-01-15", "2024-01-15", "2024-01-16", roll_forward), # Day after holiday is a holiday. No adjustment. - ("2024-01-16", "2024-01-17", "2024-01-16", roll_forward), # Day coincides with other holiday. Adjust. - ("2024-01-17", "2024-01-17", "2024-01-16", roll_forward), # Day before holiday is a holiday. No adjustment. - ]) - def test_other_calendar(self, day: str, day_adjusted: str, day_other: str, roll_fn: RollFn, return_name: bool): + pd.Series( + [ + HOLIDAY, + ], + index=[ + day_adjusted, + ], + ) + if return_name + else pd.DatetimeIndex( + [ + day_adjusted, + ] + ) + ) + + @pytest.mark.parametrize( + "return_name", [False, True], ids=["return_name=False", "return_name=True"] + ) + @pytest.mark.parametrize( + "day, day_adjusted, day_other, roll_fn", + [ + ( + "2024-01-15", + "2024-01-15", + "2024-01-16", + roll_backward, + ), # Day after holiday is a holiday. No adjustment. + ( + "2024-01-16", + "2024-01-15", + "2024-01-16", + roll_backward, + ), # Day coincides with other holiday. Adjust. + ( + "2024-01-17", + "2024-01-17", + "2024-01-16", + roll_backward, + ), # Day before holiday is a holiday. No adjustment. + ( + "2024-01-15", + "2024-01-15", + "2024-01-16", + roll_forward, + ), # Day after holiday is a holiday. No adjustment. + ( + "2024-01-16", + "2024-01-17", + "2024-01-16", + roll_forward, + ), # Day coincides with other holiday. Adjust. + ( + "2024-01-17", + "2024-01-17", + "2024-01-16", + roll_forward, + ), # Day before holiday is a holiday. No adjustment. + ], + ) + def test_other_calendar( + self, + day: str, + day_adjusted: str, + day_other: str, + roll_fn: RollFn, + return_name: bool, + ): """Test that the other given calendar is applied correctly when adjusting the holidays.""" # Unadjusted holiday. @@ -107,14 +199,36 @@ def test_other_calendar(self, day: str, day_adjusted: str, day_other: str, roll_ # Calendar containing the single holiday, and another holiday in the other given calendar. The weekmask covers # all days of the week, so it should not have any impact on adjustments. Also uses the given roll function. - calendar = AdjustedHolidayCalendar(rules=[Holiday(name=HOLIDAY, **date2args(day)), ], - other=ExchangeCalendarsHolidayCalendar( - rules=[Holiday(name=HOLIDAY, **date2args(day_other)), ]), - weekmask="1111111", roll_fn=roll_fn) + calendar = AdjustedHolidayCalendar( + rules=[ + Holiday(name=HOLIDAY, **date2args(day)), + ], + other=ExchangeCalendarsHolidayCalendar( + rules=[ + Holiday(name=HOLIDAY, **date2args(day_other)), + ] + ), + weekmask="1111111", + roll_fn=roll_fn, + ) # Test that the holidays are adjusted correctly. assert calendar.holidays(return_name=return_name).equals( - pd.Series([HOLIDAY, ], index=[day_adjusted, ]) if return_name else pd.DatetimeIndex([day_adjusted, ])) + pd.Series( + [ + HOLIDAY, + ], + index=[ + day_adjusted, + ], + ) + if return_name + else pd.DatetimeIndex( + [ + day_adjusted, + ] + ) + ) def test_multiple_adjustments_and_roll_fn(self, mocker): """Test that the roll function is called multiple times when necessary.""" @@ -129,20 +243,35 @@ def test_multiple_adjustments_and_roll_fn(self, mocker): thu = fri - pd.Timedelta(days=1) # Create a spy on the roll_backward function. - spy_roll_fn = mocker.spy(tests.util, 'roll_backward') + spy_roll_fn = mocker.spy(tests.util, "roll_backward") # Calendar with mon as a0 holiday. In the given other calendar, mon is a holiday as well, as is fri. The # weekmask covers all days of the week, so it should not have any impact on adjustments. Also uses the given # roll function. - calendar = AdjustedHolidayCalendar(rules=[ - Holiday(name=HOLIDAY, **date2args(mon)), # Monday. - ], other=ExchangeCalendarsHolidayCalendar(rules=[ - Holiday(name="Other Holiday 1", **date2args(fri)), # Previous Friday. - Holiday(name="Other Holiday 2", **date2args(mon)), # Monday. - ]), weekmask="1111100", roll_fn=spy_roll_fn) + calendar = AdjustedHolidayCalendar( + rules=[ + Holiday(name=HOLIDAY, **date2args(mon)), # Monday. + ], + other=ExchangeCalendarsHolidayCalendar( + rules=[ + Holiday( + name="Other Holiday 1", **date2args(fri) + ), # Previous Friday. + Holiday(name="Other Holiday 2", **date2args(mon)), # Monday. + ] + ), + weekmask="1111100", + roll_fn=spy_roll_fn, + ) # Check if the holiday is adjusted correctly. It should be rolled back to the previous Thrusday. - assert calendar.holidays().equals(pd.DatetimeIndex([thu, ])) + assert calendar.holidays().equals( + pd.DatetimeIndex( + [ + thu, + ] + ) + ) # Should have rolled four times. assert spy_roll_fn.call_count == 4 @@ -167,10 +296,18 @@ def test_roll_fn_returns_none(self, mocker): # Calendar with a holiday that conflicts with a holiday in the other calendar. The weekmask is set to Monday to # Sunday. - calendar = AdjustedHolidayCalendar(rules=[Holiday(name=HOLIDAY, **date2args(day)), ], - other=ExchangeCalendarsHolidayCalendar( - rules=[Holiday(name=HOLIDAY, **date2args(day)), ]), - weekmask="1111111", roll_fn=mock_roll_fn) + calendar = AdjustedHolidayCalendar( + rules=[ + Holiday(name=HOLIDAY, **date2args(day)), + ], + other=ExchangeCalendarsHolidayCalendar( + rules=[ + Holiday(name=HOLIDAY, **date2args(day)), + ] + ), + weekmask="1111111", + roll_fn=mock_roll_fn, + ) # Check if holiday gets dropped due to the roll function returning None. assert calendar.holidays().equals(pd.DatetimeIndex([])) @@ -190,15 +327,30 @@ def test_internal_conflict(self, roll_fn: RollFn): # Calendar with conflicting rules. The given other calendar is empty. The weekmask covers all days of the week, # so it should not have any impact on adjustments. Also uses the given roll function. - calendar = AdjustedHolidayCalendar(rules=[ - Holiday(name=HOLIDAY, **date2args(day)), # Mon - Holiday(name=HOLIDAY, **date2args(day)), # Same day as holiday above. - ], other=ExchangeCalendarsHolidayCalendar([]), weekmask="1111111", roll_fn=roll_fn) + calendar = AdjustedHolidayCalendar( + rules=[ + Holiday(name=HOLIDAY, **date2args(day)), # Mon + Holiday(name=HOLIDAY, **date2args(day)), # Same day as holiday above. + ], + other=ExchangeCalendarsHolidayCalendar([]), + weekmask="1111111", + roll_fn=roll_fn, + ) # Check if the holidays are adjusted in order of definition, i.e. Holiday gets adjusted by roll_fn, Other # Holiday remains untouched. assert calendar.holidays(return_name=True).equals( - pd.Series([HOLIDAY, HOLIDAY, ], index=[day_adjusted, day, ])) + pd.Series( + [ + HOLIDAY, + HOLIDAY, + ], + index=[ + day_adjusted, + day, + ], + ) + ) def test_roll_precedence(self): """Test case where a holiday gets rolled back due to a conflict with a holiday in another calendar, but then, @@ -217,14 +369,33 @@ def test_roll_precedence(self): # other calendar. The weekmask covers all days of the week, so it should not have any impact on adjustments. # Uses a roll function that rolls back one day. calendar = AdjustedHolidayCalendar( - rules=[Holiday(name="Holiday 1", **date2args(day)), Holiday(name="Holiday 2", **date2args(day_after)), ], - other=ExchangeCalendarsHolidayCalendar(rules=[Holiday(name=HOLIDAY, **date2args(day_after)), ]), - weekmask="1111111", roll_fn=roll_backward) + rules=[ + Holiday(name="Holiday 1", **date2args(day)), + Holiday(name="Holiday 2", **date2args(day_after)), + ], + other=ExchangeCalendarsHolidayCalendar( + rules=[ + Holiday(name=HOLIDAY, **date2args(day_after)), + ] + ), + weekmask="1111111", + roll_fn=roll_backward, + ) # Holiday 2 should first be adjusted to `day` due to the conflict with Other Holiday. Then, Holiday 1 # should be adjusted to `day_before` due to the conflict with the adjusted Holiday 2. assert calendar.holidays(return_name=True).equals( - pd.Series(["Holiday 1", "Holiday 2", ], index=[day_before, day, ])) + pd.Series( + [ + "Holiday 1", + "Holiday 2", + ], + index=[ + day_before, + day, + ], + ) + ) @pytest.mark.parametrize("roll_fn", [roll_backward, roll_forward]) def test_roll_outside_range(self, roll_fn: RollFn): @@ -237,28 +408,48 @@ def test_roll_outside_range(self, roll_fn: RollFn): # Adjusted holiday. day_adjusted = roll_fn(day) - calendar = AdjustedHolidayCalendar(rules=[Holiday(name=HOLIDAY, **date2args(day)), ], - other=ExchangeCalendarsHolidayCalendar( - rules=[Holiday(name=HOLIDAY, **date2args(day)), ]), - weekmask="1111111", roll_fn=roll_fn) + calendar = AdjustedHolidayCalendar( + rules=[ + Holiday(name=HOLIDAY, **date2args(day)), + ], + other=ExchangeCalendarsHolidayCalendar( + rules=[ + Holiday(name=HOLIDAY, **date2args(day)), + ] + ), + weekmask="1111111", + roll_fn=roll_fn, + ) # Adjusted holiday should be different from unadjusted one. assert day_adjusted != day # Holiday should be adjusted by rolling once due to the conflict with Other Holiday. - assert calendar.holidays(return_name=True).equals(pd.Series([HOLIDAY, ], index=[day_adjusted, ])) + assert calendar.holidays(return_name=True).equals( + pd.Series( + [ + HOLIDAY, + ], + index=[ + day_adjusted, + ], + ) + ) # Holiday should not be included when the requested date range only covers the original date, although the # unadjusted date falls within. - assert calendar.holidays(start=day, end=day, return_name=True).equals(pd.Series([], dtype="object")) + assert calendar.holidays(start=day, end=day, return_name=True).equals( + pd.Series([], dtype="object") + ) class TestHolidayCalendars: - def test_get_holiday_calendar_from_timestamps(self): timestamps = [pd.Timestamp("2019-01-01"), pd.Timestamp("2019-01-02")] calendar = get_holiday_calendar_from_timestamps(timestamps) - holidays = calendar.holidays(start=pd.Timestamp("2019-01-01"), end=pd.Timestamp("2019-01-31")) + holidays = calendar.holidays( + start=pd.Timestamp("2019-01-01"), end=pd.Timestamp("2019-01-31") + ) assert pd.Timestamp("2019-01-01") in holidays assert pd.Timestamp("2019-01-02") in holidays assert pd.Timestamp("2019-01-03") not in holidays @@ -266,7 +457,9 @@ def test_get_holiday_calendar_from_timestamps(self): def test_get_holiday_calendar_from_day_of_week(self): calendar = get_holiday_calendar_from_day_of_week(0) - holidays = calendar.holidays(start=pd.Timestamp("2019-01-01"), end=pd.Timestamp("2019-01-31")) + holidays = calendar.holidays( + start=pd.Timestamp("2019-01-01"), end=pd.Timestamp("2019-01-31") + ) assert pd.Timestamp("2019-01-07") in holidays assert pd.Timestamp("2019-01-14") in holidays assert pd.Timestamp("2019-01-21") in holidays @@ -291,10 +484,14 @@ def test_get_holiday_calendar_from_day_of_week(self): assert pd.Timestamp("2019-01-20") not in holidays def test_merge_calendars(self): - calendar1 = get_holiday_calendar_from_timestamps([pd.Timestamp("2019-01-01"), pd.Timestamp("2019-01-02")]) + calendar1 = get_holiday_calendar_from_timestamps( + [pd.Timestamp("2019-01-01"), pd.Timestamp("2019-01-02")] + ) calendar2 = get_holiday_calendar_from_day_of_week(0) calendar = merge_calendars((calendar1, calendar2)) - holidays = calendar.holidays(start=pd.Timestamp("2019-01-01"), end=pd.Timestamp("2019-01-31")) + holidays = calendar.holidays( + start=pd.Timestamp("2019-01-01"), end=pd.Timestamp("2019-01-31") + ) assert pd.Timestamp("2019-01-01") in holidays assert pd.Timestamp("2019-01-02") in holidays assert pd.Timestamp("2019-01-07") in holidays @@ -323,10 +520,16 @@ def test_merge_calendars(self): assert pd.Timestamp("2019-01-25") not in holidays def test_merge_calendars_with_overlapping_holidays(self): - calendar1 = get_holiday_calendar_from_timestamps([pd.Timestamp("2019-01-01"), pd.Timestamp("2019-01-02")]) - calendar2 = get_holiday_calendar_from_timestamps([pd.Timestamp("2019-01-01"), pd.Timestamp("2019-01-03")]) + calendar1 = get_holiday_calendar_from_timestamps( + [pd.Timestamp("2019-01-01"), pd.Timestamp("2019-01-02")] + ) + calendar2 = get_holiday_calendar_from_timestamps( + [pd.Timestamp("2019-01-01"), pd.Timestamp("2019-01-03")] + ) calendar = merge_calendars((calendar1, calendar2)) - holidays = calendar.holidays(start=pd.Timestamp("2019-01-01"), end=pd.Timestamp("2019-01-31")) + holidays = calendar.holidays( + start=pd.Timestamp("2019-01-01"), end=pd.Timestamp("2019-01-31") + ) assert len(holidays) == 3 assert pd.Timestamp("2019-01-01") in holidays assert pd.Timestamp("2019-01-02") in holidays @@ -336,19 +539,24 @@ def test_merge_calendars_with_overlapping_holidays(self): def test_get_holidays_calendar(self): calendar = get_calendar("XLON") holidays_calendar = get_holidays_calendar(calendar) - holidays = holidays_calendar.holidays(start=pd.Timestamp("2020-01-01"), end=pd.Timestamp("2020-12-31"), - return_name=True) - expected_holidays = pd.Series({ - pd.Timestamp("2020-01-01"): "New Year's Day", - pd.Timestamp("2020-04-10"): "Good Friday", - pd.Timestamp("2020-04-13"): "Easter Monday", - pd.Timestamp("2020-05-08"): "ad-hoc holiday", - pd.Timestamp("2020-05-25"): "Spring Bank Holiday", - pd.Timestamp("2020-08-31"): "Summer Bank Holiday", - pd.Timestamp("2020-12-25"): "Christmas", - pd.Timestamp("2020-12-26"): "Boxing Day", - pd.Timestamp("2020-12-28"): "Weekend Boxing Day", - }) + holidays = holidays_calendar.holidays( + start=pd.Timestamp("2020-01-01"), + end=pd.Timestamp("2020-12-31"), + return_name=True, + ) + expected_holidays = pd.Series( + { + pd.Timestamp("2020-01-01"): "New Year's Day", + pd.Timestamp("2020-04-10"): "Good Friday", + pd.Timestamp("2020-04-13"): "Easter Monday", + pd.Timestamp("2020-05-08"): "ad-hoc holiday", + pd.Timestamp("2020-05-25"): "Spring Bank Holiday", + pd.Timestamp("2020-08-31"): "Summer Bank Holiday", + pd.Timestamp("2020-12-25"): "Christmas", + pd.Timestamp("2020-12-26"): "Boxing Day", + pd.Timestamp("2020-12-28"): "Weekend Boxing Day", + } + ) assert holidays.compare(expected_holidays).empty def test_get_special_closes_calendar(self): @@ -360,10 +568,12 @@ class TestCalendar(ExchangeCalendar): close_times = ((None, time(16, 30)),) @property - def regular_holidays(self): return HolidayCalendar([]) + def regular_holidays(self): + return HolidayCalendar([]) @property - def adhoc_holidays(self): return [] + def adhoc_holidays(self): + return [] @property def special_closes(self): @@ -377,7 +587,7 @@ def special_closes(self): ] ), ), - (time(11, 30), 0) # Monday + (time(11, 30), 0), # Monday ] @property @@ -385,10 +595,12 @@ def special_closes_adhoc(self): return [ ( self.regular_early_close, - pd.DatetimeIndex([ - pd.Timestamp("2020-01-08"), - pd.Timestamp("2020-08-12"), - ]) + pd.DatetimeIndex( + [ + pd.Timestamp("2020-01-08"), + pd.Timestamp("2020-08-12"), + ] + ), ) ] @@ -396,66 +608,71 @@ def special_closes_adhoc(self): calendar = TestCalendar() special_closes_calendar = get_special_closes_calendar(calendar) - special_closes = special_closes_calendar.holidays(start=pd.Timestamp("2020-01-01"), - end=pd.Timestamp("2020-12-31"), return_name=True) - expected_special_closes = pd.Series({ - pd.Timestamp("2020-01-06"): SPECIAL_CLOSE, - pd.Timestamp("2020-01-08"): "ad-hoc special close", - pd.Timestamp("2020-01-13"): SPECIAL_CLOSE, - pd.Timestamp("2020-01-20"): SPECIAL_CLOSE, - pd.Timestamp("2020-01-27"): SPECIAL_CLOSE, - pd.Timestamp("2020-02-03"): SPECIAL_CLOSE, - pd.Timestamp("2020-02-10"): SPECIAL_CLOSE, - pd.Timestamp("2020-02-17"): SPECIAL_CLOSE, - pd.Timestamp("2020-02-24"): SPECIAL_CLOSE, - pd.Timestamp("2020-03-02"): SPECIAL_CLOSE, - pd.Timestamp("2020-03-09"): SPECIAL_CLOSE, - pd.Timestamp("2020-03-16"): SPECIAL_CLOSE, - pd.Timestamp("2020-03-23"): SPECIAL_CLOSE, - pd.Timestamp("2020-03-30"): SPECIAL_CLOSE, - pd.Timestamp("2020-04-06"): SPECIAL_CLOSE, - pd.Timestamp("2020-04-13"): SPECIAL_CLOSE, - pd.Timestamp("2020-04-20"): SPECIAL_CLOSE, - pd.Timestamp("2020-04-27"): SPECIAL_CLOSE, - pd.Timestamp("2020-05-04"): SPECIAL_CLOSE, - pd.Timestamp("2020-05-11"): SPECIAL_CLOSE, - pd.Timestamp("2020-05-18"): SPECIAL_CLOSE, - pd.Timestamp("2020-05-25"): SPECIAL_CLOSE, - pd.Timestamp("2020-06-01"): SPECIAL_CLOSE, - pd.Timestamp("2020-06-08"): SPECIAL_CLOSE, - pd.Timestamp("2020-06-15"): SPECIAL_CLOSE, - pd.Timestamp("2020-06-22"): SPECIAL_CLOSE, - pd.Timestamp("2020-06-29"): SPECIAL_CLOSE, - pd.Timestamp("2020-07-06"): SPECIAL_CLOSE, - pd.Timestamp("2020-07-13"): SPECIAL_CLOSE, - pd.Timestamp("2020-07-20"): SPECIAL_CLOSE, - pd.Timestamp("2020-07-27"): SPECIAL_CLOSE, - pd.Timestamp("2020-08-03"): SPECIAL_CLOSE, - pd.Timestamp("2020-08-10"): SPECIAL_CLOSE, - pd.Timestamp("2020-08-12"): "ad-hoc special close", - pd.Timestamp("2020-08-17"): SPECIAL_CLOSE, - pd.Timestamp("2020-08-24"): SPECIAL_CLOSE, - pd.Timestamp("2020-08-31"): SPECIAL_CLOSE, - pd.Timestamp("2020-09-07"): SPECIAL_CLOSE, - pd.Timestamp("2020-09-14"): SPECIAL_CLOSE, - pd.Timestamp("2020-09-21"): SPECIAL_CLOSE, - pd.Timestamp("2020-09-28"): SPECIAL_CLOSE, - pd.Timestamp("2020-10-05"): SPECIAL_CLOSE, - pd.Timestamp("2020-10-12"): SPECIAL_CLOSE, - pd.Timestamp("2020-10-19"): SPECIAL_CLOSE, - pd.Timestamp("2020-10-26"): SPECIAL_CLOSE, - pd.Timestamp("2020-11-02"): SPECIAL_CLOSE, - pd.Timestamp("2020-11-09"): SPECIAL_CLOSE, - pd.Timestamp("2020-11-16"): SPECIAL_CLOSE, - pd.Timestamp("2020-11-23"): SPECIAL_CLOSE, - pd.Timestamp("2020-11-30"): SPECIAL_CLOSE, - pd.Timestamp("2020-12-07"): SPECIAL_CLOSE, - pd.Timestamp("2020-12-14"): SPECIAL_CLOSE, - pd.Timestamp("2020-12-21"): SPECIAL_CLOSE, - pd.Timestamp("2020-12-24"): "Christmas Eve", - pd.Timestamp("2020-12-28"): SPECIAL_CLOSE, - pd.Timestamp("2020-12-31"): "New Year's Eve", - }) + special_closes = special_closes_calendar.holidays( + start=pd.Timestamp("2020-01-01"), + end=pd.Timestamp("2020-12-31"), + return_name=True, + ) + expected_special_closes = pd.Series( + { + pd.Timestamp("2020-01-06"): SPECIAL_CLOSE, + pd.Timestamp("2020-01-08"): "ad-hoc special close", + pd.Timestamp("2020-01-13"): SPECIAL_CLOSE, + pd.Timestamp("2020-01-20"): SPECIAL_CLOSE, + pd.Timestamp("2020-01-27"): SPECIAL_CLOSE, + pd.Timestamp("2020-02-03"): SPECIAL_CLOSE, + pd.Timestamp("2020-02-10"): SPECIAL_CLOSE, + pd.Timestamp("2020-02-17"): SPECIAL_CLOSE, + pd.Timestamp("2020-02-24"): SPECIAL_CLOSE, + pd.Timestamp("2020-03-02"): SPECIAL_CLOSE, + pd.Timestamp("2020-03-09"): SPECIAL_CLOSE, + pd.Timestamp("2020-03-16"): SPECIAL_CLOSE, + pd.Timestamp("2020-03-23"): SPECIAL_CLOSE, + pd.Timestamp("2020-03-30"): SPECIAL_CLOSE, + pd.Timestamp("2020-04-06"): SPECIAL_CLOSE, + pd.Timestamp("2020-04-13"): SPECIAL_CLOSE, + pd.Timestamp("2020-04-20"): SPECIAL_CLOSE, + pd.Timestamp("2020-04-27"): SPECIAL_CLOSE, + pd.Timestamp("2020-05-04"): SPECIAL_CLOSE, + pd.Timestamp("2020-05-11"): SPECIAL_CLOSE, + pd.Timestamp("2020-05-18"): SPECIAL_CLOSE, + pd.Timestamp("2020-05-25"): SPECIAL_CLOSE, + pd.Timestamp("2020-06-01"): SPECIAL_CLOSE, + pd.Timestamp("2020-06-08"): SPECIAL_CLOSE, + pd.Timestamp("2020-06-15"): SPECIAL_CLOSE, + pd.Timestamp("2020-06-22"): SPECIAL_CLOSE, + pd.Timestamp("2020-06-29"): SPECIAL_CLOSE, + pd.Timestamp("2020-07-06"): SPECIAL_CLOSE, + pd.Timestamp("2020-07-13"): SPECIAL_CLOSE, + pd.Timestamp("2020-07-20"): SPECIAL_CLOSE, + pd.Timestamp("2020-07-27"): SPECIAL_CLOSE, + pd.Timestamp("2020-08-03"): SPECIAL_CLOSE, + pd.Timestamp("2020-08-10"): SPECIAL_CLOSE, + pd.Timestamp("2020-08-12"): "ad-hoc special close", + pd.Timestamp("2020-08-17"): SPECIAL_CLOSE, + pd.Timestamp("2020-08-24"): SPECIAL_CLOSE, + pd.Timestamp("2020-08-31"): SPECIAL_CLOSE, + pd.Timestamp("2020-09-07"): SPECIAL_CLOSE, + pd.Timestamp("2020-09-14"): SPECIAL_CLOSE, + pd.Timestamp("2020-09-21"): SPECIAL_CLOSE, + pd.Timestamp("2020-09-28"): SPECIAL_CLOSE, + pd.Timestamp("2020-10-05"): SPECIAL_CLOSE, + pd.Timestamp("2020-10-12"): SPECIAL_CLOSE, + pd.Timestamp("2020-10-19"): SPECIAL_CLOSE, + pd.Timestamp("2020-10-26"): SPECIAL_CLOSE, + pd.Timestamp("2020-11-02"): SPECIAL_CLOSE, + pd.Timestamp("2020-11-09"): SPECIAL_CLOSE, + pd.Timestamp("2020-11-16"): SPECIAL_CLOSE, + pd.Timestamp("2020-11-23"): SPECIAL_CLOSE, + pd.Timestamp("2020-11-30"): SPECIAL_CLOSE, + pd.Timestamp("2020-12-07"): SPECIAL_CLOSE, + pd.Timestamp("2020-12-14"): SPECIAL_CLOSE, + pd.Timestamp("2020-12-21"): SPECIAL_CLOSE, + pd.Timestamp("2020-12-24"): "Christmas Eve", + pd.Timestamp("2020-12-28"): SPECIAL_CLOSE, + pd.Timestamp("2020-12-31"): "New Year's Eve", + } + ) assert special_closes.compare(expected_special_closes).empty def test_get_special_opens_calendar(self): @@ -467,10 +684,12 @@ class TestCalendar(ExchangeCalendar): close_times = ((None, time(16, 30)),) @property - def regular_holidays(self): return HolidayCalendar([]) + def regular_holidays(self): + return HolidayCalendar([]) @property - def adhoc_holidays(self): return [] + def adhoc_holidays(self): + return [] @property def special_opens(self): @@ -484,7 +703,7 @@ def special_opens(self): ] ), ), - (time(11, 30), 0) # Monday + (time(11, 30), 0), # Monday ] @property @@ -492,10 +711,12 @@ def special_opens_adhoc(self): return [ ( self.regular_late_open, - pd.DatetimeIndex([ - pd.Timestamp("2020-01-08"), - pd.Timestamp("2020-08-12"), - ]) + pd.DatetimeIndex( + [ + pd.Timestamp("2020-01-08"), + pd.Timestamp("2020-08-12"), + ] + ), ) ] @@ -503,66 +724,71 @@ def special_opens_adhoc(self): calendar = TestCalendar() special_opens_calendar = get_special_opens_calendar(calendar) - special_opens = special_opens_calendar.holidays(start=pd.Timestamp("2020-01-01"), - end=pd.Timestamp("2020-12-31"), return_name=True) - expected_special_opens = pd.Series({ - pd.Timestamp("2020-01-06"): SPECIAL_OPEN, - pd.Timestamp("2020-01-08"): "ad-hoc special open", - pd.Timestamp("2020-01-13"): SPECIAL_OPEN, - pd.Timestamp("2020-01-20"): SPECIAL_OPEN, - pd.Timestamp("2020-01-27"): SPECIAL_OPEN, - pd.Timestamp("2020-02-03"): SPECIAL_OPEN, - pd.Timestamp("2020-02-10"): SPECIAL_OPEN, - pd.Timestamp("2020-02-17"): SPECIAL_OPEN, - pd.Timestamp("2020-02-24"): SPECIAL_OPEN, - pd.Timestamp("2020-03-02"): SPECIAL_OPEN, - pd.Timestamp("2020-03-09"): SPECIAL_OPEN, - pd.Timestamp("2020-03-16"): SPECIAL_OPEN, - pd.Timestamp("2020-03-23"): SPECIAL_OPEN, - pd.Timestamp("2020-03-30"): SPECIAL_OPEN, - pd.Timestamp("2020-04-06"): SPECIAL_OPEN, - pd.Timestamp("2020-04-13"): SPECIAL_OPEN, - pd.Timestamp("2020-04-20"): SPECIAL_OPEN, - pd.Timestamp("2020-04-27"): SPECIAL_OPEN, - pd.Timestamp("2020-05-04"): SPECIAL_OPEN, - pd.Timestamp("2020-05-11"): SPECIAL_OPEN, - pd.Timestamp("2020-05-18"): SPECIAL_OPEN, - pd.Timestamp("2020-05-25"): SPECIAL_OPEN, - pd.Timestamp("2020-06-01"): SPECIAL_OPEN, - pd.Timestamp("2020-06-08"): SPECIAL_OPEN, - pd.Timestamp("2020-06-15"): SPECIAL_OPEN, - pd.Timestamp("2020-06-22"): SPECIAL_OPEN, - pd.Timestamp("2020-06-29"): SPECIAL_OPEN, - pd.Timestamp("2020-07-06"): SPECIAL_OPEN, - pd.Timestamp("2020-07-13"): SPECIAL_OPEN, - pd.Timestamp("2020-07-20"): SPECIAL_OPEN, - pd.Timestamp("2020-07-27"): SPECIAL_OPEN, - pd.Timestamp("2020-08-03"): SPECIAL_OPEN, - pd.Timestamp("2020-08-10"): SPECIAL_OPEN, - pd.Timestamp("2020-08-12"): "ad-hoc special open", - pd.Timestamp("2020-08-17"): SPECIAL_OPEN, - pd.Timestamp("2020-08-24"): SPECIAL_OPEN, - pd.Timestamp("2020-08-31"): SPECIAL_OPEN, - pd.Timestamp("2020-09-07"): SPECIAL_OPEN, - pd.Timestamp("2020-09-14"): SPECIAL_OPEN, - pd.Timestamp("2020-09-21"): SPECIAL_OPEN, - pd.Timestamp("2020-09-28"): SPECIAL_OPEN, - pd.Timestamp("2020-10-05"): SPECIAL_OPEN, - pd.Timestamp("2020-10-12"): SPECIAL_OPEN, - pd.Timestamp("2020-10-19"): SPECIAL_OPEN, - pd.Timestamp("2020-10-26"): SPECIAL_OPEN, - pd.Timestamp("2020-11-02"): SPECIAL_OPEN, - pd.Timestamp("2020-11-09"): SPECIAL_OPEN, - pd.Timestamp("2020-11-16"): SPECIAL_OPEN, - pd.Timestamp("2020-11-23"): SPECIAL_OPEN, - pd.Timestamp("2020-11-30"): SPECIAL_OPEN, - pd.Timestamp("2020-12-07"): SPECIAL_OPEN, - pd.Timestamp("2020-12-14"): SPECIAL_OPEN, - pd.Timestamp("2020-12-21"): SPECIAL_OPEN, - pd.Timestamp("2020-12-24"): "Christmas Eve", - pd.Timestamp("2020-12-28"): SPECIAL_OPEN, - pd.Timestamp("2020-12-31"): "New Year's Eve", - }) + special_opens = special_opens_calendar.holidays( + start=pd.Timestamp("2020-01-01"), + end=pd.Timestamp("2020-12-31"), + return_name=True, + ) + expected_special_opens = pd.Series( + { + pd.Timestamp("2020-01-06"): SPECIAL_OPEN, + pd.Timestamp("2020-01-08"): "ad-hoc special open", + pd.Timestamp("2020-01-13"): SPECIAL_OPEN, + pd.Timestamp("2020-01-20"): SPECIAL_OPEN, + pd.Timestamp("2020-01-27"): SPECIAL_OPEN, + pd.Timestamp("2020-02-03"): SPECIAL_OPEN, + pd.Timestamp("2020-02-10"): SPECIAL_OPEN, + pd.Timestamp("2020-02-17"): SPECIAL_OPEN, + pd.Timestamp("2020-02-24"): SPECIAL_OPEN, + pd.Timestamp("2020-03-02"): SPECIAL_OPEN, + pd.Timestamp("2020-03-09"): SPECIAL_OPEN, + pd.Timestamp("2020-03-16"): SPECIAL_OPEN, + pd.Timestamp("2020-03-23"): SPECIAL_OPEN, + pd.Timestamp("2020-03-30"): SPECIAL_OPEN, + pd.Timestamp("2020-04-06"): SPECIAL_OPEN, + pd.Timestamp("2020-04-13"): SPECIAL_OPEN, + pd.Timestamp("2020-04-20"): SPECIAL_OPEN, + pd.Timestamp("2020-04-27"): SPECIAL_OPEN, + pd.Timestamp("2020-05-04"): SPECIAL_OPEN, + pd.Timestamp("2020-05-11"): SPECIAL_OPEN, + pd.Timestamp("2020-05-18"): SPECIAL_OPEN, + pd.Timestamp("2020-05-25"): SPECIAL_OPEN, + pd.Timestamp("2020-06-01"): SPECIAL_OPEN, + pd.Timestamp("2020-06-08"): SPECIAL_OPEN, + pd.Timestamp("2020-06-15"): SPECIAL_OPEN, + pd.Timestamp("2020-06-22"): SPECIAL_OPEN, + pd.Timestamp("2020-06-29"): SPECIAL_OPEN, + pd.Timestamp("2020-07-06"): SPECIAL_OPEN, + pd.Timestamp("2020-07-13"): SPECIAL_OPEN, + pd.Timestamp("2020-07-20"): SPECIAL_OPEN, + pd.Timestamp("2020-07-27"): SPECIAL_OPEN, + pd.Timestamp("2020-08-03"): SPECIAL_OPEN, + pd.Timestamp("2020-08-10"): SPECIAL_OPEN, + pd.Timestamp("2020-08-12"): "ad-hoc special open", + pd.Timestamp("2020-08-17"): SPECIAL_OPEN, + pd.Timestamp("2020-08-24"): SPECIAL_OPEN, + pd.Timestamp("2020-08-31"): SPECIAL_OPEN, + pd.Timestamp("2020-09-07"): SPECIAL_OPEN, + pd.Timestamp("2020-09-14"): SPECIAL_OPEN, + pd.Timestamp("2020-09-21"): SPECIAL_OPEN, + pd.Timestamp("2020-09-28"): SPECIAL_OPEN, + pd.Timestamp("2020-10-05"): SPECIAL_OPEN, + pd.Timestamp("2020-10-12"): SPECIAL_OPEN, + pd.Timestamp("2020-10-19"): SPECIAL_OPEN, + pd.Timestamp("2020-10-26"): SPECIAL_OPEN, + pd.Timestamp("2020-11-02"): SPECIAL_OPEN, + pd.Timestamp("2020-11-09"): SPECIAL_OPEN, + pd.Timestamp("2020-11-16"): SPECIAL_OPEN, + pd.Timestamp("2020-11-23"): SPECIAL_OPEN, + pd.Timestamp("2020-11-30"): SPECIAL_OPEN, + pd.Timestamp("2020-12-07"): SPECIAL_OPEN, + pd.Timestamp("2020-12-14"): SPECIAL_OPEN, + pd.Timestamp("2020-12-21"): SPECIAL_OPEN, + pd.Timestamp("2020-12-24"): "Christmas Eve", + pd.Timestamp("2020-12-28"): SPECIAL_OPEN, + pd.Timestamp("2020-12-31"): "New Year's Eve", + } + ) assert special_opens.compare(expected_special_opens).empty def test_get_weekend_days_calendar(self): @@ -571,23 +797,28 @@ class TestCalendar(ExchangeCalendar): tz = timezone("Europe/London") open_times = ((None, time(8)),) close_times = ((None, time(16, 30)),) - weekmask = '1111010' + weekmask = "1111010" calendar = TestCalendar() weekend_days_calendar = get_weekend_days_calendar(calendar) - weekend_days = weekend_days_calendar.holidays(start=pd.Timestamp("2020-01-01"), end=pd.Timestamp("2020-01-31"), - return_name=True) - expected_weekend_days = pd.Series({ - pd.Timestamp("2020-01-03"): WEEKEND_DAY, - pd.Timestamp("2020-01-05"): WEEKEND_DAY, - pd.Timestamp("2020-01-10"): WEEKEND_DAY, - pd.Timestamp("2020-01-12"): WEEKEND_DAY, - pd.Timestamp("2020-01-17"): WEEKEND_DAY, - pd.Timestamp("2020-01-19"): WEEKEND_DAY, - pd.Timestamp("2020-01-24"): WEEKEND_DAY, - pd.Timestamp("2020-01-26"): WEEKEND_DAY, - pd.Timestamp("2020-01-31"): WEEKEND_DAY, - }) + weekend_days = weekend_days_calendar.holidays( + start=pd.Timestamp("2020-01-01"), + end=pd.Timestamp("2020-01-31"), + return_name=True, + ) + expected_weekend_days = pd.Series( + { + pd.Timestamp("2020-01-03"): WEEKEND_DAY, + pd.Timestamp("2020-01-05"): WEEKEND_DAY, + pd.Timestamp("2020-01-10"): WEEKEND_DAY, + pd.Timestamp("2020-01-12"): WEEKEND_DAY, + pd.Timestamp("2020-01-17"): WEEKEND_DAY, + pd.Timestamp("2020-01-19"): WEEKEND_DAY, + pd.Timestamp("2020-01-24"): WEEKEND_DAY, + pd.Timestamp("2020-01-26"): WEEKEND_DAY, + pd.Timestamp("2020-01-31"): WEEKEND_DAY, + } + ) assert weekend_days.compare(expected_weekend_days).empty @@ -595,141 +826,199 @@ def test_get_monthly_expiry_calendar(self): # Test plain vanilla calendar without any special days or close days that may fall onto the same days as monthly # expiry. - monthly_expiry_calendar = HolidayCalendar(rules=get_monthly_expiry_rules(day_of_week=4)) - monthly_expiry = monthly_expiry_calendar.holidays(start=pd.Timestamp("2020-01-01"), - end=pd.Timestamp("2020-12-31"), return_name=True) - expected_monthly_expiry = pd.Series({ - pd.Timestamp("2020-01-17"): MONTHLY_EXPIRY, - pd.Timestamp("2020-02-21"): MONTHLY_EXPIRY, - pd.Timestamp("2020-04-17"): MONTHLY_EXPIRY, - pd.Timestamp("2020-05-15"): MONTHLY_EXPIRY, - pd.Timestamp("2020-07-17"): MONTHLY_EXPIRY, - pd.Timestamp("2020-08-21"): MONTHLY_EXPIRY, - pd.Timestamp("2020-10-16"): MONTHLY_EXPIRY, - pd.Timestamp("2020-11-20"): MONTHLY_EXPIRY, - }) + monthly_expiry_calendar = HolidayCalendar( + rules=get_monthly_expiry_rules(day_of_week=4) + ) + monthly_expiry = monthly_expiry_calendar.holidays( + start=pd.Timestamp("2020-01-01"), + end=pd.Timestamp("2020-12-31"), + return_name=True, + ) + expected_monthly_expiry = pd.Series( + { + pd.Timestamp("2020-01-17"): MONTHLY_EXPIRY, + pd.Timestamp("2020-02-21"): MONTHLY_EXPIRY, + pd.Timestamp("2020-04-17"): MONTHLY_EXPIRY, + pd.Timestamp("2020-05-15"): MONTHLY_EXPIRY, + pd.Timestamp("2020-07-17"): MONTHLY_EXPIRY, + pd.Timestamp("2020-08-21"): MONTHLY_EXPIRY, + pd.Timestamp("2020-10-16"): MONTHLY_EXPIRY, + pd.Timestamp("2020-11-20"): MONTHLY_EXPIRY, + } + ) assert monthly_expiry.compare(expected_monthly_expiry).empty # Test calendar with identity observance. - monthly_expiry_calendar = HolidayCalendar(rules=get_monthly_expiry_rules(day_of_week=4, observance=lambda x: x)) - monthly_expiry = monthly_expiry_calendar.holidays(start=pd.Timestamp("2020-01-01"), - end=pd.Timestamp("2020-12-31"), return_name=True) - expected_monthly_expiry = pd.Series({ - pd.Timestamp("2020-01-17"): MONTHLY_EXPIRY, - pd.Timestamp("2020-02-21"): MONTHLY_EXPIRY, - pd.Timestamp("2020-04-17"): MONTHLY_EXPIRY, - pd.Timestamp("2020-05-15"): MONTHLY_EXPIRY, - pd.Timestamp("2020-07-17"): MONTHLY_EXPIRY, - pd.Timestamp("2020-08-21"): MONTHLY_EXPIRY, - pd.Timestamp("2020-10-16"): MONTHLY_EXPIRY, - pd.Timestamp("2020-11-20"): MONTHLY_EXPIRY, - }) + monthly_expiry_calendar = HolidayCalendar( + rules=get_monthly_expiry_rules(day_of_week=4, observance=lambda x: x) + ) + monthly_expiry = monthly_expiry_calendar.holidays( + start=pd.Timestamp("2020-01-01"), + end=pd.Timestamp("2020-12-31"), + return_name=True, + ) + expected_monthly_expiry = pd.Series( + { + pd.Timestamp("2020-01-17"): MONTHLY_EXPIRY, + pd.Timestamp("2020-02-21"): MONTHLY_EXPIRY, + pd.Timestamp("2020-04-17"): MONTHLY_EXPIRY, + pd.Timestamp("2020-05-15"): MONTHLY_EXPIRY, + pd.Timestamp("2020-07-17"): MONTHLY_EXPIRY, + pd.Timestamp("2020-08-21"): MONTHLY_EXPIRY, + pd.Timestamp("2020-10-16"): MONTHLY_EXPIRY, + pd.Timestamp("2020-11-20"): MONTHLY_EXPIRY, + } + ) assert monthly_expiry.compare(expected_monthly_expiry).empty # Test calendar with an observance that moves the holiday to the previous day. monthly_expiry_calendar = HolidayCalendar( - rules=get_monthly_expiry_rules(day_of_week=4, observance=lambda x: x - pd.Timedelta(days=1))) - monthly_expiry = monthly_expiry_calendar.holidays(start=pd.Timestamp("2020-01-01"), - end=pd.Timestamp("2020-12-31"), return_name=True) - expected_monthly_expiry = pd.Series({ - pd.Timestamp("2020-01-16"): MONTHLY_EXPIRY, - pd.Timestamp("2020-02-20"): MONTHLY_EXPIRY, - pd.Timestamp("2020-04-16"): MONTHLY_EXPIRY, - pd.Timestamp("2020-05-14"): MONTHLY_EXPIRY, - pd.Timestamp("2020-07-16"): MONTHLY_EXPIRY, - pd.Timestamp("2020-08-20"): MONTHLY_EXPIRY, - pd.Timestamp("2020-10-15"): MONTHLY_EXPIRY, - pd.Timestamp("2020-11-19"): MONTHLY_EXPIRY, - }) + rules=get_monthly_expiry_rules( + day_of_week=4, observance=lambda x: x - pd.Timedelta(days=1) + ) + ) + monthly_expiry = monthly_expiry_calendar.holidays( + start=pd.Timestamp("2020-01-01"), + end=pd.Timestamp("2020-12-31"), + return_name=True, + ) + expected_monthly_expiry = pd.Series( + { + pd.Timestamp("2020-01-16"): MONTHLY_EXPIRY, + pd.Timestamp("2020-02-20"): MONTHLY_EXPIRY, + pd.Timestamp("2020-04-16"): MONTHLY_EXPIRY, + pd.Timestamp("2020-05-14"): MONTHLY_EXPIRY, + pd.Timestamp("2020-07-16"): MONTHLY_EXPIRY, + pd.Timestamp("2020-08-20"): MONTHLY_EXPIRY, + pd.Timestamp("2020-10-15"): MONTHLY_EXPIRY, + pd.Timestamp("2020-11-19"): MONTHLY_EXPIRY, + } + ) assert monthly_expiry.compare(expected_monthly_expiry).empty def test_get_quadruple_witching_calendar(self): # Test plain vanilla calendar without any special days or close days that may fall onto the same days as quarterly # expiry. - quarterly_expiry_calendar = HolidayCalendar(rules=get_quadruple_witching_rules(day_of_week=4)) - quarterly_expiry = quarterly_expiry_calendar.holidays(start=pd.Timestamp("2020-01-01"), - end=pd.Timestamp("2020-12-31"), return_name=True) - expected_quarterly_expiry = pd.Series({ - pd.Timestamp("2020-03-20"): QUARTERLY_EXPIRY, - pd.Timestamp("2020-06-19"): QUARTERLY_EXPIRY, - pd.Timestamp("2020-09-18"): QUARTERLY_EXPIRY, - pd.Timestamp("2020-12-18"): QUARTERLY_EXPIRY, - }) + quarterly_expiry_calendar = HolidayCalendar( + rules=get_quadruple_witching_rules(day_of_week=4) + ) + quarterly_expiry = quarterly_expiry_calendar.holidays( + start=pd.Timestamp("2020-01-01"), + end=pd.Timestamp("2020-12-31"), + return_name=True, + ) + expected_quarterly_expiry = pd.Series( + { + pd.Timestamp("2020-03-20"): QUARTERLY_EXPIRY, + pd.Timestamp("2020-06-19"): QUARTERLY_EXPIRY, + pd.Timestamp("2020-09-18"): QUARTERLY_EXPIRY, + pd.Timestamp("2020-12-18"): QUARTERLY_EXPIRY, + } + ) assert quarterly_expiry.compare(expected_quarterly_expiry).empty # Test calendar with identity observance. quarterly_expiry_calendar = HolidayCalendar( - rules=get_quadruple_witching_rules(day_of_week=4, observance=lambda x: x)) - quarterly_expiry = quarterly_expiry_calendar.holidays(start=pd.Timestamp("2020-01-01"), - end=pd.Timestamp("2020-12-31"), return_name=True) - expected_quarterly_expiry = pd.Series({ - pd.Timestamp("2020-03-20"): QUARTERLY_EXPIRY, - pd.Timestamp("2020-06-19"): QUARTERLY_EXPIRY, - pd.Timestamp("2020-09-18"): QUARTERLY_EXPIRY, - pd.Timestamp("2020-12-18"): QUARTERLY_EXPIRY, - }) + rules=get_quadruple_witching_rules(day_of_week=4, observance=lambda x: x) + ) + quarterly_expiry = quarterly_expiry_calendar.holidays( + start=pd.Timestamp("2020-01-01"), + end=pd.Timestamp("2020-12-31"), + return_name=True, + ) + expected_quarterly_expiry = pd.Series( + { + pd.Timestamp("2020-03-20"): QUARTERLY_EXPIRY, + pd.Timestamp("2020-06-19"): QUARTERLY_EXPIRY, + pd.Timestamp("2020-09-18"): QUARTERLY_EXPIRY, + pd.Timestamp("2020-12-18"): QUARTERLY_EXPIRY, + } + ) assert quarterly_expiry.compare(expected_quarterly_expiry).empty # Test calendar with an observance that moves the holiday to the previous day. quarterly_expiry_calendar = HolidayCalendar( - rules=get_quadruple_witching_rules(day_of_week=4, observance=lambda x: x - pd.Timedelta(days=1))) - quarterly_expiry = quarterly_expiry_calendar.holidays(start=pd.Timestamp("2020-01-01"), - end=pd.Timestamp("2020-12-31"), return_name=True) - expected_quarterly_expiry = pd.Series({ - pd.Timestamp("2020-03-19"): QUARTERLY_EXPIRY, - pd.Timestamp("2020-06-18"): QUARTERLY_EXPIRY, - pd.Timestamp("2020-09-17"): QUARTERLY_EXPIRY, - pd.Timestamp("2020-12-17"): QUARTERLY_EXPIRY, - }) + rules=get_quadruple_witching_rules( + day_of_week=4, observance=lambda x: x - pd.Timedelta(days=1) + ) + ) + quarterly_expiry = quarterly_expiry_calendar.holidays( + start=pd.Timestamp("2020-01-01"), + end=pd.Timestamp("2020-12-31"), + return_name=True, + ) + expected_quarterly_expiry = pd.Series( + { + pd.Timestamp("2020-03-19"): QUARTERLY_EXPIRY, + pd.Timestamp("2020-06-18"): QUARTERLY_EXPIRY, + pd.Timestamp("2020-09-17"): QUARTERLY_EXPIRY, + pd.Timestamp("2020-12-17"): QUARTERLY_EXPIRY, + } + ) assert quarterly_expiry.compare(expected_quarterly_expiry).empty def test_get_last_day_of_month_calendar(self): # Test plain vanilla calendar that ignores any special days or close days, even weekends, that may fall onto the # same days. - last_day_of_month_calendar = HolidayCalendar(rules=get_last_day_of_month_rules(name=LAST_DAY_OF_MONTH)) - last_day_of_month = last_day_of_month_calendar.holidays(start=pd.Timestamp("2020-01-01"), - end=pd.Timestamp("2020-12-31"), return_name=True) - expected_last_day_of_month = pd.Series({ - pd.Timestamp("2020-01-31"): LAST_DAY_OF_MONTH, - pd.Timestamp("2020-02-29"): LAST_DAY_OF_MONTH, - pd.Timestamp("2020-03-31"): LAST_DAY_OF_MONTH, - pd.Timestamp("2020-04-30"): LAST_DAY_OF_MONTH, - pd.Timestamp("2020-05-31"): LAST_DAY_OF_MONTH, - pd.Timestamp("2020-06-30"): LAST_DAY_OF_MONTH, - pd.Timestamp("2020-07-31"): LAST_DAY_OF_MONTH, - pd.Timestamp("2020-08-31"): LAST_DAY_OF_MONTH, - pd.Timestamp("2020-09-30"): LAST_DAY_OF_MONTH, - pd.Timestamp("2020-10-31"): LAST_DAY_OF_MONTH, - pd.Timestamp("2020-11-30"): LAST_DAY_OF_MONTH, - pd.Timestamp("2020-12-31"): LAST_DAY_OF_MONTH, - }) + last_day_of_month_calendar = HolidayCalendar( + rules=get_last_day_of_month_rules(name=LAST_DAY_OF_MONTH) + ) + last_day_of_month = last_day_of_month_calendar.holidays( + start=pd.Timestamp("2020-01-01"), + end=pd.Timestamp("2020-12-31"), + return_name=True, + ) + expected_last_day_of_month = pd.Series( + { + pd.Timestamp("2020-01-31"): LAST_DAY_OF_MONTH, + pd.Timestamp("2020-02-29"): LAST_DAY_OF_MONTH, + pd.Timestamp("2020-03-31"): LAST_DAY_OF_MONTH, + pd.Timestamp("2020-04-30"): LAST_DAY_OF_MONTH, + pd.Timestamp("2020-05-31"): LAST_DAY_OF_MONTH, + pd.Timestamp("2020-06-30"): LAST_DAY_OF_MONTH, + pd.Timestamp("2020-07-31"): LAST_DAY_OF_MONTH, + pd.Timestamp("2020-08-31"): LAST_DAY_OF_MONTH, + pd.Timestamp("2020-09-30"): LAST_DAY_OF_MONTH, + pd.Timestamp("2020-10-31"): LAST_DAY_OF_MONTH, + pd.Timestamp("2020-11-30"): LAST_DAY_OF_MONTH, + pd.Timestamp("2020-12-31"): LAST_DAY_OF_MONTH, + } + ) assert last_day_of_month.compare(expected_last_day_of_month).empty # Test calendar with an observance that moves the holiday to the previous day. last_day_of_month_calendar = HolidayCalendar( - rules=get_last_day_of_month_rules(name=LAST_DAY_OF_MONTH, observance=lambda x: x - pd.Timedelta(days=1))) - last_day_of_month = last_day_of_month_calendar.holidays(start=pd.Timestamp("2020-01-01"), - end=pd.Timestamp("2020-12-31"), return_name=True) - expected_last_day_of_month = pd.Series({ - pd.Timestamp("2020-01-30"): LAST_DAY_OF_MONTH, - pd.Timestamp("2020-02-28"): LAST_DAY_OF_MONTH, - pd.Timestamp("2020-03-30"): LAST_DAY_OF_MONTH, - pd.Timestamp("2020-04-29"): LAST_DAY_OF_MONTH, - pd.Timestamp("2020-05-30"): LAST_DAY_OF_MONTH, - pd.Timestamp("2020-06-29"): LAST_DAY_OF_MONTH, - pd.Timestamp("2020-07-30"): LAST_DAY_OF_MONTH, - pd.Timestamp("2020-08-30"): LAST_DAY_OF_MONTH, - pd.Timestamp("2020-09-29"): LAST_DAY_OF_MONTH, - pd.Timestamp("2020-10-30"): LAST_DAY_OF_MONTH, - pd.Timestamp("2020-11-29"): LAST_DAY_OF_MONTH, - pd.Timestamp("2020-12-30"): LAST_DAY_OF_MONTH, - }) + rules=get_last_day_of_month_rules( + name=LAST_DAY_OF_MONTH, observance=lambda x: x - pd.Timedelta(days=1) + ) + ) + last_day_of_month = last_day_of_month_calendar.holidays( + start=pd.Timestamp("2020-01-01"), + end=pd.Timestamp("2020-12-31"), + return_name=True, + ) + expected_last_day_of_month = pd.Series( + { + pd.Timestamp("2020-01-30"): LAST_DAY_OF_MONTH, + pd.Timestamp("2020-02-28"): LAST_DAY_OF_MONTH, + pd.Timestamp("2020-03-30"): LAST_DAY_OF_MONTH, + pd.Timestamp("2020-04-29"): LAST_DAY_OF_MONTH, + pd.Timestamp("2020-05-30"): LAST_DAY_OF_MONTH, + pd.Timestamp("2020-06-29"): LAST_DAY_OF_MONTH, + pd.Timestamp("2020-07-30"): LAST_DAY_OF_MONTH, + pd.Timestamp("2020-08-30"): LAST_DAY_OF_MONTH, + pd.Timestamp("2020-09-29"): LAST_DAY_OF_MONTH, + pd.Timestamp("2020-10-30"): LAST_DAY_OF_MONTH, + pd.Timestamp("2020-11-29"): LAST_DAY_OF_MONTH, + pd.Timestamp("2020-12-30"): LAST_DAY_OF_MONTH, + } + ) assert last_day_of_month.compare(expected_last_day_of_month).empty diff --git a/tests/test_offset.py b/tests/test_offset.py index f49512c..bd82799 100644 --- a/tests/test_offset.py +++ b/tests/test_offset.py @@ -1,47 +1,156 @@ import pandas as pd -from exchange_calendars_extensions.core.offset import get_third_day_of_week_in_month_offset_class, \ - get_last_day_of_month_offset_class +from exchange_calendars_extensions.core.offset import ( + get_third_day_of_week_in_month_offset_class, + get_last_day_of_month_offset_class, +) class TestOffsets: - def test_get_third_day_of_week_in_month_offset_class(self): - assert get_third_day_of_week_in_month_offset_class(3, 1)().holiday(2020) == pd.Timestamp('2020-01-16 00:00:00').date() - assert get_third_day_of_week_in_month_offset_class(4, 1)().holiday(2020) == pd.Timestamp('2020-01-17 00:00:00').date() - assert get_third_day_of_week_in_month_offset_class(3, 2)().holiday(2020) == pd.Timestamp('2020-02-20 00:00:00').date() - assert get_third_day_of_week_in_month_offset_class(4, 2)().holiday(2020) == pd.Timestamp('2020-02-21 00:00:00').date() - assert get_third_day_of_week_in_month_offset_class(3, 3)().holiday(2020) == pd.Timestamp('2020-03-19 00:00:00').date() - assert get_third_day_of_week_in_month_offset_class(4, 3)().holiday(2020) == pd.Timestamp('2020-03-20 00:00:00').date() - assert get_third_day_of_week_in_month_offset_class(3, 4)().holiday(2020) == pd.Timestamp('2020-04-16 00:00:00').date() - assert get_third_day_of_week_in_month_offset_class(4, 4)().holiday(2020) == pd.Timestamp('2020-04-17 00:00:00').date() - assert get_third_day_of_week_in_month_offset_class(3, 5)().holiday(2020) == pd.Timestamp('2020-05-21 00:00:00').date() - assert get_third_day_of_week_in_month_offset_class(4, 5)().holiday(2020) == pd.Timestamp('2020-05-15 00:00:00').date() - assert get_third_day_of_week_in_month_offset_class(3, 6)().holiday(2020) == pd.Timestamp("2020-06-18 00:00:00").date() - assert get_third_day_of_week_in_month_offset_class(4, 6)().holiday(2020) == pd.Timestamp("2020-06-19 00:00:00").date() - assert get_third_day_of_week_in_month_offset_class(3, 7)().holiday(2020) == pd.Timestamp("2020-07-16 00:00:00").date() - assert get_third_day_of_week_in_month_offset_class(4, 7)().holiday(2020) == pd.Timestamp("2020-07-17 00:00:00").date() - assert get_third_day_of_week_in_month_offset_class(3, 8)().holiday(2020) == pd.Timestamp("2020-08-20 00:00:00").date() - assert get_third_day_of_week_in_month_offset_class(4, 8)().holiday(2020) == pd.Timestamp("2020-08-21 00:00:00").date() - assert get_third_day_of_week_in_month_offset_class(3, 9)().holiday(2020) == pd.Timestamp("2020-09-17 00:00:00").date() - assert get_third_day_of_week_in_month_offset_class(4, 9)().holiday(2020) == pd.Timestamp("2020-09-18 00:00:00").date() - assert get_third_day_of_week_in_month_offset_class(3, 10)().holiday(2020) == pd.Timestamp("2020-10-15 00:00:00").date() - assert get_third_day_of_week_in_month_offset_class(4, 10)().holiday(2020) == pd.Timestamp("2020-10-16 00:00:00").date() - assert get_third_day_of_week_in_month_offset_class(3, 11)().holiday(2020) == pd.Timestamp("2020-11-19 00:00:00").date() - assert get_third_day_of_week_in_month_offset_class(4, 11)().holiday(2020) == pd.Timestamp("2020-11-20 00:00:00").date() - assert get_third_day_of_week_in_month_offset_class(3, 12)().holiday(2020) == pd.Timestamp("2020-12-17 00:00:00").date() - assert get_third_day_of_week_in_month_offset_class(4, 12)().holiday(2020) == pd.Timestamp("2020-12-18 00:00:00").date() + assert ( + get_third_day_of_week_in_month_offset_class(3, 1)().holiday(2020) + == pd.Timestamp("2020-01-16 00:00:00").date() + ) + assert ( + get_third_day_of_week_in_month_offset_class(4, 1)().holiday(2020) + == pd.Timestamp("2020-01-17 00:00:00").date() + ) + assert ( + get_third_day_of_week_in_month_offset_class(3, 2)().holiday(2020) + == pd.Timestamp("2020-02-20 00:00:00").date() + ) + assert ( + get_third_day_of_week_in_month_offset_class(4, 2)().holiday(2020) + == pd.Timestamp("2020-02-21 00:00:00").date() + ) + assert ( + get_third_day_of_week_in_month_offset_class(3, 3)().holiday(2020) + == pd.Timestamp("2020-03-19 00:00:00").date() + ) + assert ( + get_third_day_of_week_in_month_offset_class(4, 3)().holiday(2020) + == pd.Timestamp("2020-03-20 00:00:00").date() + ) + assert ( + get_third_day_of_week_in_month_offset_class(3, 4)().holiday(2020) + == pd.Timestamp("2020-04-16 00:00:00").date() + ) + assert ( + get_third_day_of_week_in_month_offset_class(4, 4)().holiday(2020) + == pd.Timestamp("2020-04-17 00:00:00").date() + ) + assert ( + get_third_day_of_week_in_month_offset_class(3, 5)().holiday(2020) + == pd.Timestamp("2020-05-21 00:00:00").date() + ) + assert ( + get_third_day_of_week_in_month_offset_class(4, 5)().holiday(2020) + == pd.Timestamp("2020-05-15 00:00:00").date() + ) + assert ( + get_third_day_of_week_in_month_offset_class(3, 6)().holiday(2020) + == pd.Timestamp("2020-06-18 00:00:00").date() + ) + assert ( + get_third_day_of_week_in_month_offset_class(4, 6)().holiday(2020) + == pd.Timestamp("2020-06-19 00:00:00").date() + ) + assert ( + get_third_day_of_week_in_month_offset_class(3, 7)().holiday(2020) + == pd.Timestamp("2020-07-16 00:00:00").date() + ) + assert ( + get_third_day_of_week_in_month_offset_class(4, 7)().holiday(2020) + == pd.Timestamp("2020-07-17 00:00:00").date() + ) + assert ( + get_third_day_of_week_in_month_offset_class(3, 8)().holiday(2020) + == pd.Timestamp("2020-08-20 00:00:00").date() + ) + assert ( + get_third_day_of_week_in_month_offset_class(4, 8)().holiday(2020) + == pd.Timestamp("2020-08-21 00:00:00").date() + ) + assert ( + get_third_day_of_week_in_month_offset_class(3, 9)().holiday(2020) + == pd.Timestamp("2020-09-17 00:00:00").date() + ) + assert ( + get_third_day_of_week_in_month_offset_class(4, 9)().holiday(2020) + == pd.Timestamp("2020-09-18 00:00:00").date() + ) + assert ( + get_third_day_of_week_in_month_offset_class(3, 10)().holiday(2020) + == pd.Timestamp("2020-10-15 00:00:00").date() + ) + assert ( + get_third_day_of_week_in_month_offset_class(4, 10)().holiday(2020) + == pd.Timestamp("2020-10-16 00:00:00").date() + ) + assert ( + get_third_day_of_week_in_month_offset_class(3, 11)().holiday(2020) + == pd.Timestamp("2020-11-19 00:00:00").date() + ) + assert ( + get_third_day_of_week_in_month_offset_class(4, 11)().holiday(2020) + == pd.Timestamp("2020-11-20 00:00:00").date() + ) + assert ( + get_third_day_of_week_in_month_offset_class(3, 12)().holiday(2020) + == pd.Timestamp("2020-12-17 00:00:00").date() + ) + assert ( + get_third_day_of_week_in_month_offset_class(4, 12)().holiday(2020) + == pd.Timestamp("2020-12-18 00:00:00").date() + ) def test_get_last_day_of_month_offset_class(self): - assert get_last_day_of_month_offset_class(1)().holiday(2020) == pd.Timestamp("2020-01-31 00:00:00").date() - assert get_last_day_of_month_offset_class(2)().holiday(2020) == pd.Timestamp("2020-02-29 00:00:00").date() - assert get_last_day_of_month_offset_class(3)().holiday(2020) == pd.Timestamp("2020-03-31 00:00:00").date() - assert get_last_day_of_month_offset_class(4)().holiday(2020) == pd.Timestamp("2020-04-30 00:00:00").date() - assert get_last_day_of_month_offset_class(5)().holiday(2020) == pd.Timestamp("2020-05-31 00:00:00").date() - assert get_last_day_of_month_offset_class(6)().holiday(2020) == pd.Timestamp("2020-06-30 00:00:00").date() - assert get_last_day_of_month_offset_class(7)().holiday(2020) == pd.Timestamp("2020-07-31 00:00:00").date() - assert get_last_day_of_month_offset_class(8)().holiday(2020) == pd.Timestamp("2020-08-31 00:00:00").date() - assert get_last_day_of_month_offset_class(9)().holiday(2020) == pd.Timestamp("2020-09-30 00:00:00").date() - assert get_last_day_of_month_offset_class(10)().holiday(2020) == pd.Timestamp("2020-10-31 00:00:00").date() - assert get_last_day_of_month_offset_class(11)().holiday(2020) == pd.Timestamp("2020-11-30 00:00:00").date() - assert get_last_day_of_month_offset_class(12)().holiday(2020) == pd.Timestamp("2020-12-31 00:00:00").date() + assert ( + get_last_day_of_month_offset_class(1)().holiday(2020) + == pd.Timestamp("2020-01-31 00:00:00").date() + ) + assert ( + get_last_day_of_month_offset_class(2)().holiday(2020) + == pd.Timestamp("2020-02-29 00:00:00").date() + ) + assert ( + get_last_day_of_month_offset_class(3)().holiday(2020) + == pd.Timestamp("2020-03-31 00:00:00").date() + ) + assert ( + get_last_day_of_month_offset_class(4)().holiday(2020) + == pd.Timestamp("2020-04-30 00:00:00").date() + ) + assert ( + get_last_day_of_month_offset_class(5)().holiday(2020) + == pd.Timestamp("2020-05-31 00:00:00").date() + ) + assert ( + get_last_day_of_month_offset_class(6)().holiday(2020) + == pd.Timestamp("2020-06-30 00:00:00").date() + ) + assert ( + get_last_day_of_month_offset_class(7)().holiday(2020) + == pd.Timestamp("2020-07-31 00:00:00").date() + ) + assert ( + get_last_day_of_month_offset_class(8)().holiday(2020) + == pd.Timestamp("2020-08-31 00:00:00").date() + ) + assert ( + get_last_day_of_month_offset_class(9)().holiday(2020) + == pd.Timestamp("2020-09-30 00:00:00").date() + ) + assert ( + get_last_day_of_month_offset_class(10)().holiday(2020) + == pd.Timestamp("2020-10-31 00:00:00").date() + ) + assert ( + get_last_day_of_month_offset_class(11)().holiday(2020) + == pd.Timestamp("2020-11-30 00:00:00").date() + ) + assert ( + get_last_day_of_month_offset_class(12)().holiday(2020) + == pd.Timestamp("2020-12-31 00:00:00").date() + ) diff --git a/tests/test_util.py b/tests/test_util.py index 6ed79da..36e357f 100644 --- a/tests/test_util.py +++ b/tests/test_util.py @@ -1,12 +1,15 @@ import pandas as pd import pytest -from exchange_calendars_extensions.core.util import get_day_of_week_name, get_month_name, third_day_of_week_in_month, \ - last_day_in_month +from exchange_calendars_extensions.core.util import ( + get_day_of_week_name, + get_month_name, + third_day_of_week_in_month, + last_day_in_month, +) class TestUtils: - def test_get_month_name(self): assert get_month_name(1) == "January" assert get_month_name(2) == "February" @@ -42,102 +45,270 @@ def test_get_day_of_week_name(self): def test_third_day_of_week_in_month(self): # Mondays. - assert third_day_of_week_in_month(0, 1, 2023) == pd.Timestamp("2023-01-16").date() - assert third_day_of_week_in_month(0, 2, 2023) == pd.Timestamp("2023-02-20").date() - assert third_day_of_week_in_month(0, 3, 2023) == pd.Timestamp("2023-03-20").date() - assert third_day_of_week_in_month(0, 4, 2023) == pd.Timestamp("2023-04-17").date() - assert third_day_of_week_in_month(0, 5, 2023) == pd.Timestamp("2023-05-15").date() - assert third_day_of_week_in_month(0, 6, 2023) == pd.Timestamp("2023-06-19").date() - assert third_day_of_week_in_month(0, 7, 2023) == pd.Timestamp("2023-07-17").date() - assert third_day_of_week_in_month(0, 8, 2023) == pd.Timestamp("2023-08-21").date() - assert third_day_of_week_in_month(0, 9, 2023) == pd.Timestamp("2023-09-18").date() - assert third_day_of_week_in_month(0, 10, 2023) == pd.Timestamp("2023-10-16").date() - assert third_day_of_week_in_month(0, 11, 2023) == pd.Timestamp("2023-11-20").date() - assert third_day_of_week_in_month(0, 12, 2023) == pd.Timestamp("2023-12-18").date() + assert ( + third_day_of_week_in_month(0, 1, 2023) == pd.Timestamp("2023-01-16").date() + ) + assert ( + third_day_of_week_in_month(0, 2, 2023) == pd.Timestamp("2023-02-20").date() + ) + assert ( + third_day_of_week_in_month(0, 3, 2023) == pd.Timestamp("2023-03-20").date() + ) + assert ( + third_day_of_week_in_month(0, 4, 2023) == pd.Timestamp("2023-04-17").date() + ) + assert ( + third_day_of_week_in_month(0, 5, 2023) == pd.Timestamp("2023-05-15").date() + ) + assert ( + third_day_of_week_in_month(0, 6, 2023) == pd.Timestamp("2023-06-19").date() + ) + assert ( + third_day_of_week_in_month(0, 7, 2023) == pd.Timestamp("2023-07-17").date() + ) + assert ( + third_day_of_week_in_month(0, 8, 2023) == pd.Timestamp("2023-08-21").date() + ) + assert ( + third_day_of_week_in_month(0, 9, 2023) == pd.Timestamp("2023-09-18").date() + ) + assert ( + third_day_of_week_in_month(0, 10, 2023) == pd.Timestamp("2023-10-16").date() + ) + assert ( + third_day_of_week_in_month(0, 11, 2023) == pd.Timestamp("2023-11-20").date() + ) + assert ( + third_day_of_week_in_month(0, 12, 2023) == pd.Timestamp("2023-12-18").date() + ) # Tuesdays. - assert third_day_of_week_in_month(1, 1, 2023) == pd.Timestamp("2023-01-17").date() - assert third_day_of_week_in_month(1, 2, 2023) == pd.Timestamp("2023-02-21").date() - assert third_day_of_week_in_month(1, 3, 2023) == pd.Timestamp("2023-03-21").date() - assert third_day_of_week_in_month(1, 4, 2023) == pd.Timestamp("2023-04-18").date() - assert third_day_of_week_in_month(1, 5, 2023) == pd.Timestamp("2023-05-16").date() - assert third_day_of_week_in_month(1, 6, 2023) == pd.Timestamp("2023-06-20").date() - assert third_day_of_week_in_month(1, 7, 2023) == pd.Timestamp("2023-07-18").date() - assert third_day_of_week_in_month(1, 8, 2023) == pd.Timestamp("2023-08-15").date() - assert third_day_of_week_in_month(1, 9, 2023) == pd.Timestamp("2023-09-19").date() - assert third_day_of_week_in_month(1, 10, 2023) == pd.Timestamp("2023-10-17").date() - assert third_day_of_week_in_month(1, 11, 2023) == pd.Timestamp("2023-11-21").date() - assert third_day_of_week_in_month(1, 12, 2023) == pd.Timestamp("2023-12-19").date() + assert ( + third_day_of_week_in_month(1, 1, 2023) == pd.Timestamp("2023-01-17").date() + ) + assert ( + third_day_of_week_in_month(1, 2, 2023) == pd.Timestamp("2023-02-21").date() + ) + assert ( + third_day_of_week_in_month(1, 3, 2023) == pd.Timestamp("2023-03-21").date() + ) + assert ( + third_day_of_week_in_month(1, 4, 2023) == pd.Timestamp("2023-04-18").date() + ) + assert ( + third_day_of_week_in_month(1, 5, 2023) == pd.Timestamp("2023-05-16").date() + ) + assert ( + third_day_of_week_in_month(1, 6, 2023) == pd.Timestamp("2023-06-20").date() + ) + assert ( + third_day_of_week_in_month(1, 7, 2023) == pd.Timestamp("2023-07-18").date() + ) + assert ( + third_day_of_week_in_month(1, 8, 2023) == pd.Timestamp("2023-08-15").date() + ) + assert ( + third_day_of_week_in_month(1, 9, 2023) == pd.Timestamp("2023-09-19").date() + ) + assert ( + third_day_of_week_in_month(1, 10, 2023) == pd.Timestamp("2023-10-17").date() + ) + assert ( + third_day_of_week_in_month(1, 11, 2023) == pd.Timestamp("2023-11-21").date() + ) + assert ( + third_day_of_week_in_month(1, 12, 2023) == pd.Timestamp("2023-12-19").date() + ) # Wednesdays. - assert third_day_of_week_in_month(2, 1, 2023) == pd.Timestamp("2023-01-18").date() - assert third_day_of_week_in_month(2, 2, 2023) == pd.Timestamp("2023-02-15").date() - assert third_day_of_week_in_month(2, 3, 2023) == pd.Timestamp("2023-03-15").date() - assert third_day_of_week_in_month(2, 4, 2023) == pd.Timestamp("2023-04-19").date() - assert third_day_of_week_in_month(2, 5, 2023) == pd.Timestamp("2023-05-17").date() - assert third_day_of_week_in_month(2, 6, 2023) == pd.Timestamp("2023-06-21").date() - assert third_day_of_week_in_month(2, 7, 2023) == pd.Timestamp("2023-07-19").date() - assert third_day_of_week_in_month(2, 8, 2023) == pd.Timestamp("2023-08-16").date() - assert third_day_of_week_in_month(2, 9, 2023) == pd.Timestamp("2023-09-20").date() - assert third_day_of_week_in_month(2, 10, 2023) == pd.Timestamp("2023-10-18").date() - assert third_day_of_week_in_month(2, 11, 2023) == pd.Timestamp("2023-11-15").date() - assert third_day_of_week_in_month(2, 12, 2023) == pd.Timestamp("2023-12-20").date() + assert ( + third_day_of_week_in_month(2, 1, 2023) == pd.Timestamp("2023-01-18").date() + ) + assert ( + third_day_of_week_in_month(2, 2, 2023) == pd.Timestamp("2023-02-15").date() + ) + assert ( + third_day_of_week_in_month(2, 3, 2023) == pd.Timestamp("2023-03-15").date() + ) + assert ( + third_day_of_week_in_month(2, 4, 2023) == pd.Timestamp("2023-04-19").date() + ) + assert ( + third_day_of_week_in_month(2, 5, 2023) == pd.Timestamp("2023-05-17").date() + ) + assert ( + third_day_of_week_in_month(2, 6, 2023) == pd.Timestamp("2023-06-21").date() + ) + assert ( + third_day_of_week_in_month(2, 7, 2023) == pd.Timestamp("2023-07-19").date() + ) + assert ( + third_day_of_week_in_month(2, 8, 2023) == pd.Timestamp("2023-08-16").date() + ) + assert ( + third_day_of_week_in_month(2, 9, 2023) == pd.Timestamp("2023-09-20").date() + ) + assert ( + third_day_of_week_in_month(2, 10, 2023) == pd.Timestamp("2023-10-18").date() + ) + assert ( + third_day_of_week_in_month(2, 11, 2023) == pd.Timestamp("2023-11-15").date() + ) + assert ( + third_day_of_week_in_month(2, 12, 2023) == pd.Timestamp("2023-12-20").date() + ) # Thursdays. - assert third_day_of_week_in_month(3, 1, 2023) == pd.Timestamp("2023-01-19").date() - assert third_day_of_week_in_month(3, 2, 2023) == pd.Timestamp("2023-02-16").date() - assert third_day_of_week_in_month(3, 3, 2023) == pd.Timestamp("2023-03-16").date() - assert third_day_of_week_in_month(3, 4, 2023) == pd.Timestamp("2023-04-20").date() - assert third_day_of_week_in_month(3, 5, 2023) == pd.Timestamp("2023-05-18").date() - assert third_day_of_week_in_month(3, 6, 2023) == pd.Timestamp("2023-06-15").date() - assert third_day_of_week_in_month(3, 7, 2023) == pd.Timestamp("2023-07-20").date() - assert third_day_of_week_in_month(3, 8, 2023) == pd.Timestamp("2023-08-17").date() - assert third_day_of_week_in_month(3, 9, 2023) == pd.Timestamp("2023-09-21").date() - assert third_day_of_week_in_month(3, 10, 2023) == pd.Timestamp("2023-10-19").date() - assert third_day_of_week_in_month(3, 11, 2023) == pd.Timestamp("2023-11-16").date() - assert third_day_of_week_in_month(3, 12, 2023) == pd.Timestamp("2023-12-21").date() + assert ( + third_day_of_week_in_month(3, 1, 2023) == pd.Timestamp("2023-01-19").date() + ) + assert ( + third_day_of_week_in_month(3, 2, 2023) == pd.Timestamp("2023-02-16").date() + ) + assert ( + third_day_of_week_in_month(3, 3, 2023) == pd.Timestamp("2023-03-16").date() + ) + assert ( + third_day_of_week_in_month(3, 4, 2023) == pd.Timestamp("2023-04-20").date() + ) + assert ( + third_day_of_week_in_month(3, 5, 2023) == pd.Timestamp("2023-05-18").date() + ) + assert ( + third_day_of_week_in_month(3, 6, 2023) == pd.Timestamp("2023-06-15").date() + ) + assert ( + third_day_of_week_in_month(3, 7, 2023) == pd.Timestamp("2023-07-20").date() + ) + assert ( + third_day_of_week_in_month(3, 8, 2023) == pd.Timestamp("2023-08-17").date() + ) + assert ( + third_day_of_week_in_month(3, 9, 2023) == pd.Timestamp("2023-09-21").date() + ) + assert ( + third_day_of_week_in_month(3, 10, 2023) == pd.Timestamp("2023-10-19").date() + ) + assert ( + third_day_of_week_in_month(3, 11, 2023) == pd.Timestamp("2023-11-16").date() + ) + assert ( + third_day_of_week_in_month(3, 12, 2023) == pd.Timestamp("2023-12-21").date() + ) # Fridays. - assert third_day_of_week_in_month(4, 1, 2023) == pd.Timestamp("2023-01-20").date() - assert third_day_of_week_in_month(4, 2, 2023) == pd.Timestamp("2023-02-17").date() - assert third_day_of_week_in_month(4, 3, 2023) == pd.Timestamp("2023-03-17").date() - assert third_day_of_week_in_month(4, 4, 2023) == pd.Timestamp("2023-04-21").date() - assert third_day_of_week_in_month(4, 5, 2023) == pd.Timestamp("2023-05-19").date() - assert third_day_of_week_in_month(4, 6, 2023) == pd.Timestamp("2023-06-16").date() - assert third_day_of_week_in_month(4, 7, 2023) == pd.Timestamp("2023-07-21").date() - assert third_day_of_week_in_month(4, 8, 2023) == pd.Timestamp("2023-08-18").date() - assert third_day_of_week_in_month(4, 9, 2023) == pd.Timestamp("2023-09-15").date() - assert third_day_of_week_in_month(4, 10, 2023) == pd.Timestamp("2023-10-20").date() - assert third_day_of_week_in_month(4, 11, 2023) == pd.Timestamp("2023-11-17").date() - assert third_day_of_week_in_month(4, 12, 2023) == pd.Timestamp("2023-12-15").date() + assert ( + third_day_of_week_in_month(4, 1, 2023) == pd.Timestamp("2023-01-20").date() + ) + assert ( + third_day_of_week_in_month(4, 2, 2023) == pd.Timestamp("2023-02-17").date() + ) + assert ( + third_day_of_week_in_month(4, 3, 2023) == pd.Timestamp("2023-03-17").date() + ) + assert ( + third_day_of_week_in_month(4, 4, 2023) == pd.Timestamp("2023-04-21").date() + ) + assert ( + third_day_of_week_in_month(4, 5, 2023) == pd.Timestamp("2023-05-19").date() + ) + assert ( + third_day_of_week_in_month(4, 6, 2023) == pd.Timestamp("2023-06-16").date() + ) + assert ( + third_day_of_week_in_month(4, 7, 2023) == pd.Timestamp("2023-07-21").date() + ) + assert ( + third_day_of_week_in_month(4, 8, 2023) == pd.Timestamp("2023-08-18").date() + ) + assert ( + third_day_of_week_in_month(4, 9, 2023) == pd.Timestamp("2023-09-15").date() + ) + assert ( + third_day_of_week_in_month(4, 10, 2023) == pd.Timestamp("2023-10-20").date() + ) + assert ( + third_day_of_week_in_month(4, 11, 2023) == pd.Timestamp("2023-11-17").date() + ) + assert ( + third_day_of_week_in_month(4, 12, 2023) == pd.Timestamp("2023-12-15").date() + ) # Saturdays. - assert third_day_of_week_in_month(5, 1, 2023) == pd.Timestamp("2023-01-21").date() - assert third_day_of_week_in_month(5, 2, 2023) == pd.Timestamp("2023-02-18").date() - assert third_day_of_week_in_month(5, 3, 2023) == pd.Timestamp("2023-03-18").date() - assert third_day_of_week_in_month(5, 4, 2023) == pd.Timestamp("2023-04-15").date() - assert third_day_of_week_in_month(5, 5, 2023) == pd.Timestamp("2023-05-20").date() - assert third_day_of_week_in_month(5, 6, 2023) == pd.Timestamp("2023-06-17").date() - assert third_day_of_week_in_month(5, 7, 2023) == pd.Timestamp("2023-07-15").date() - assert third_day_of_week_in_month(5, 8, 2023) == pd.Timestamp("2023-08-19").date() - assert third_day_of_week_in_month(5, 9, 2023) == pd.Timestamp("2023-09-16").date() - assert third_day_of_week_in_month(5, 10, 2023) == pd.Timestamp("2023-10-21").date() - assert third_day_of_week_in_month(5, 11, 2023) == pd.Timestamp("2023-11-18").date() - assert third_day_of_week_in_month(5, 12, 2023) == pd.Timestamp("2023-12-16").date() + assert ( + third_day_of_week_in_month(5, 1, 2023) == pd.Timestamp("2023-01-21").date() + ) + assert ( + third_day_of_week_in_month(5, 2, 2023) == pd.Timestamp("2023-02-18").date() + ) + assert ( + third_day_of_week_in_month(5, 3, 2023) == pd.Timestamp("2023-03-18").date() + ) + assert ( + third_day_of_week_in_month(5, 4, 2023) == pd.Timestamp("2023-04-15").date() + ) + assert ( + third_day_of_week_in_month(5, 5, 2023) == pd.Timestamp("2023-05-20").date() + ) + assert ( + third_day_of_week_in_month(5, 6, 2023) == pd.Timestamp("2023-06-17").date() + ) + assert ( + third_day_of_week_in_month(5, 7, 2023) == pd.Timestamp("2023-07-15").date() + ) + assert ( + third_day_of_week_in_month(5, 8, 2023) == pd.Timestamp("2023-08-19").date() + ) + assert ( + third_day_of_week_in_month(5, 9, 2023) == pd.Timestamp("2023-09-16").date() + ) + assert ( + third_day_of_week_in_month(5, 10, 2023) == pd.Timestamp("2023-10-21").date() + ) + assert ( + third_day_of_week_in_month(5, 11, 2023) == pd.Timestamp("2023-11-18").date() + ) + assert ( + third_day_of_week_in_month(5, 12, 2023) == pd.Timestamp("2023-12-16").date() + ) # Sundays. - assert third_day_of_week_in_month(6, 1, 2023) == pd.Timestamp("2023-01-15").date() - assert third_day_of_week_in_month(6, 2, 2023) == pd.Timestamp("2023-02-19").date() - assert third_day_of_week_in_month(6, 3, 2023) == pd.Timestamp("2023-03-19").date() - assert third_day_of_week_in_month(6, 4, 2023) == pd.Timestamp("2023-04-16").date() - assert third_day_of_week_in_month(6, 5, 2023) == pd.Timestamp("2023-05-21").date() - assert third_day_of_week_in_month(6, 6, 2023) == pd.Timestamp("2023-06-18").date() - assert third_day_of_week_in_month(6, 7, 2023) == pd.Timestamp("2023-07-16").date() - assert third_day_of_week_in_month(6, 8, 2023) == pd.Timestamp("2023-08-20").date() - assert third_day_of_week_in_month(6, 9, 2023) == pd.Timestamp("2023-09-17").date() - assert third_day_of_week_in_month(6, 10, 2023) == pd.Timestamp("2023-10-15").date() - assert third_day_of_week_in_month(6, 11, 2023) == pd.Timestamp("2023-11-19").date() - assert third_day_of_week_in_month(6, 12, 2023) == pd.Timestamp("2023-12-17").date() + assert ( + third_day_of_week_in_month(6, 1, 2023) == pd.Timestamp("2023-01-15").date() + ) + assert ( + third_day_of_week_in_month(6, 2, 2023) == pd.Timestamp("2023-02-19").date() + ) + assert ( + third_day_of_week_in_month(6, 3, 2023) == pd.Timestamp("2023-03-19").date() + ) + assert ( + third_day_of_week_in_month(6, 4, 2023) == pd.Timestamp("2023-04-16").date() + ) + assert ( + third_day_of_week_in_month(6, 5, 2023) == pd.Timestamp("2023-05-21").date() + ) + assert ( + third_day_of_week_in_month(6, 6, 2023) == pd.Timestamp("2023-06-18").date() + ) + assert ( + third_day_of_week_in_month(6, 7, 2023) == pd.Timestamp("2023-07-16").date() + ) + assert ( + third_day_of_week_in_month(6, 8, 2023) == pd.Timestamp("2023-08-20").date() + ) + assert ( + third_day_of_week_in_month(6, 9, 2023) == pd.Timestamp("2023-09-17").date() + ) + assert ( + third_day_of_week_in_month(6, 10, 2023) == pd.Timestamp("2023-10-15").date() + ) + assert ( + third_day_of_week_in_month(6, 11, 2023) == pd.Timestamp("2023-11-19").date() + ) + assert ( + third_day_of_week_in_month(6, 12, 2023) == pd.Timestamp("2023-12-17").date() + ) def test_last_day_in_month(self): # Regular year. diff --git a/tests/util.py b/tests/util.py index 30a0631..31a065f 100644 --- a/tests/util.py +++ b/tests/util.py @@ -1,12 +1,10 @@ import datetime as dt -import pandas as pd - -from typing import Dict - from typing import Union +import pandas as pd + -def date2args(date: Union[dt.date, pd.Timestamp]) -> Dict[str, int]: +def date2args(date: Union[dt.date, pd.Timestamp]) -> dict[str, int]: """ Convert a date to a dictionary of arguments, including year, month and day. @@ -20,11 +18,7 @@ def date2args(date: Union[dt.date, pd.Timestamp]) -> Dict[str, int]: Dict[str, int] A dictionary of arguments. """ - return { - 'year': date.year, - 'month': date.month, - 'day': date.day - } + return {"year": date.year, "month": date.month, "day": date.day} def roll_backward(d: pd.Timestamp) -> Union[pd.Timestamp, None]: From d5fd0b97ca00d9a65c27d21dda82764233a42c20 Mon Sep 17 00:00:00 2001 From: Jens Keiner Date: Tue, 7 May 2024 14:27:57 +0200 Subject: [PATCH 16/21] Use API version from git repo. --- poetry.lock | 10 ++++++---- pyproject.toml | 2 +- 2 files changed, 7 insertions(+), 5 deletions(-) diff --git a/poetry.lock b/poetry.lock index 98ba1b1..9d23706 100644 --- a/poetry.lock +++ b/poetry.lock @@ -154,7 +154,7 @@ description = "A package that defines parts of the API of the exchange-calendars optional = false python-versions = "~=3.9" files = [] -develop = true +develop = false [package.dependencies] pandas = "^2" @@ -162,8 +162,10 @@ pydantic = ">=2,<3" typing-extensions = ">=4.0,<5" [package.source] -type = "directory" -url = "../exchange_calendars_extensions_api" +type = "git" +url = "https://github.com/jenskeiner/exchange_calendars_extensions_api" +reference = "develop" +resolved_reference = "36d62e249ac04ba3de4b16944e1d47d4f6ba2112" [[package]] name = "filelock" @@ -759,4 +761,4 @@ test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess [metadata] lock-version = "2.0" python-versions = "~=3.9" -content-hash = "65c0b16d21dca55c37b7734491320c8f25aab38b6716bf0878e2175222fb0b20" +content-hash = "e94cf46e2f4bc998e3e7440d0249cb90d087a8d4ec6c6ce28c991e25d4afa93d" diff --git a/pyproject.toml b/pyproject.toml index 251d2ea..4003e30 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -33,7 +33,7 @@ packages = [{include = "exchange_calendars_extensions"}] [tool.poetry.dependencies] python = "~=3.9" -exchange-calendars-extensions-api = {path="../exchange_calendars_extensions_api/", develop=false} # ">=0.2.0,<1.0.0" +exchange-calendars-extensions-api = { git = "https://github.com/jenskeiner/exchange_calendars_extensions_api", branch="develop" } exchange-calendars = ">=4.0.1,<5" typing-extensions = ">=4.0,<5" pydantic = ">=2.0,<3" From aff484b397191c28a5541422ae34f2db77cc68a9 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 7 May 2024 14:08:05 +0000 Subject: [PATCH 17/21] Bump pre-commit from 3.5.0 to 3.7.0 Bumps [pre-commit](https://github.com/pre-commit/pre-commit) from 3.5.0 to 3.7.0. - [Release notes](https://github.com/pre-commit/pre-commit/releases) - [Changelog](https://github.com/pre-commit/pre-commit/blob/main/CHANGELOG.md) - [Commits](https://github.com/pre-commit/pre-commit/compare/v3.5.0...v3.7.0) --- updated-dependencies: - dependency-name: pre-commit dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] --- poetry.lock | 23 +++++++++++++++++------ pyproject.toml | 2 +- 2 files changed, 18 insertions(+), 7 deletions(-) diff --git a/poetry.lock b/poetry.lock index 9d23706..19361fe 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand. [[package]] name = "annotated-types" @@ -395,13 +395,13 @@ testing = ["pytest", "pytest-benchmark"] [[package]] name = "pre-commit" -version = "3.5.0" +version = "3.7.0" description = "A framework for managing and maintaining multi-language pre-commit hooks." optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "pre_commit-3.5.0-py2.py3-none-any.whl", hash = "sha256:841dc9aef25daba9a0238cd27984041fa0467b4199fc4852e27950664919f660"}, - {file = "pre_commit-3.5.0.tar.gz", hash = "sha256:5804465c675b659b0862f07907f96295d490822a450c4c40e747d0b1c6ebcb32"}, + {file = "pre_commit-3.7.0-py2.py3-none-any.whl", hash = "sha256:5eae9e10c2b5ac51577c3452ec0a490455c45a0533f7960f993a0d01e59decab"}, + {file = "pre_commit-3.7.0.tar.gz", hash = "sha256:e209d61b8acdcf742404408531f0c37d49d2c734fd7cff2d6076083d191cb060"}, ] [package.dependencies] @@ -630,6 +630,7 @@ files = [ {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, + {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, @@ -637,8 +638,16 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, + {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, + {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, + {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, + {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, @@ -655,6 +664,7 @@ files = [ {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, + {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, @@ -662,6 +672,7 @@ files = [ {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, + {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, @@ -761,4 +772,4 @@ test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess [metadata] lock-version = "2.0" python-versions = "~=3.9" -content-hash = "e94cf46e2f4bc998e3e7440d0249cb90d087a8d4ec6c6ce28c991e25d4afa93d" +content-hash = "8104bb34b2779cce481c9b6f6754bcece1f7fd0cf6985eead59c7f5b1ae4a943" diff --git a/pyproject.toml b/pyproject.toml index 4003e30..324ce93 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -42,7 +42,7 @@ pydantic = ">=2.0,<3" pytest = ">=7.3.1,<8.3.0" pytest-mock = ">=3.11.1,<3.15.0" pytest-cov = ">=4.1,<5.1" -pre-commit = ">=3.3.3,<3.6.0" +pre-commit = ">=3.3.3,<3.8.0" [tool.pytest.ini_options] addopts = "--cov=exchange_calendars_extensions --cov-report=term-missing" From e538c653acc20d7b578b3a6dc5f6cf21291f65f3 Mon Sep 17 00:00:00 2001 From: Jens Keiner Date: Tue, 7 May 2024 23:39:09 +0200 Subject: [PATCH 18/21] Improve metadata documentation. --- docs/metadata.md | 52 ++++++++++++++++++++++++++++++++++++++++-------- 1 file changed, 44 insertions(+), 8 deletions(-) diff --git a/docs/metadata.md b/docs/metadata.md index 9400914..83a5aa6 100644 --- a/docs/metadata.md +++ b/docs/metadata.md @@ -86,14 +86,50 @@ This is a comment. {'tag1', 'tag2'} ``` -The `meta()` method supports `TimestampLike` start and end arguments which must be either both timezone-naive or +The `meta()` method supports `TimestampLike` `start` and `end` arguments which must be either both timezone-naive or timezone-aware. Otherwise, a `ValueError` is raised. -The returned dictionary includes all days with metadata that overlap with the period between the start and end -timestamps. This definition ensures that the result is the expected even in situations where the passed in start and end -timestamps are not aligned to midnight. In the above example, if start were `2022-01-01 06:00:00` and end were -`2022-01-01 18:00:00`, the result would be the same since the time period that represents the full day `2022-01-01` -overlaps with the period between start and end. +The returned dictionary includes all days with metadata that have a non-empty intersection with the period between +the `start` and `end`. This result is probably what one would usually expect, even in situations where `start` and/or +`end` are not aligned to midnight. In the above example, if `start` were `2022-01-01 06:00:00` and `end` were +`2022-01-01 18:00:00`, the result would be the same since the intersection with the full day `2022-01-01` is non-empty. -The start and end timestamps can also be timezone-aware. In this case, the time period that represents a day with -metadata is always interpreted in the timezone of the corresponding exchange. +When `start` and `end` are timezone-naive, as in the examples above, the timezone of the exchange does not matter. Like +`start` and `end`, the timestamps that mark the beginning and end of a day are used timezone-naive. Effectively, any +comparison uses timestamps with a wall-clock time component. + +In contrast, when `start` and `end` timestamps are timezone-aware, all other timestamps also used timezone-aware and +with the exchange's native timezone. Comparisons are then done between instants, i.e. actual points on the timeline. + +The difference between the two cases is illustrated in the following example which considers the date 2024-03-31. In +timezones that are based on Central European Time (CET), a transition to Central European Summer Time (CEST) occurs on +this date. The transition happens at 02:00:00 CET, which is 03:00:00 CEST, i.e. clocks advance by one hour and the day +is 23 hours long. +```python +import pandas as pd + +import exchange_calendars_extensions.core as ecx +from collections import OrderedDict +from exchange_calendars_extensions.api.changes import DayMeta +ecx.apply_extensions() +import exchange_calendars as ec + +# Add metadata. +day = pd.Timestamp("2024-03-31") +meta = DayMeta(tags=[], comment="This is a comment") +ecx.set_meta('XETR', day, meta) + +calendar = ec.get_calendar('XETR') + +# Get metadata for 2024-03-31, timezone-naive. +assert calendar.meta(start='2024-03-31 00:00:00') == OrderedDict([(day, meta)]) +assert calendar.meta(start='2024-03-31 23:59:59') == OrderedDict([(day, meta)]) + +# Get metadata for 2024-03-31, timezone-aware. +# 2024-03-30 23:00:00 UTC is 2024-03-31 00:00:00 CET. +assert calendar.meta(start=pd.Timestamp('2024-03-30 23:00:00').tz_localize("UTC")) == OrderedDict([(day, meta)]) +# 2024-03-31 21:59:59 UTC is 2024-03-31 23:59:59 CEST. +assert calendar.meta(start=pd.Timestamp('2024-03-31 21:59:59').tz_localize("UTC")) == OrderedDict([(day, meta)]) +# 2024-03-31 22:00:00 UTC is 2024-03-31 00:00:00 CEST. +assert calendar.meta(start=pd.Timestamp('2024-03-31 22:00:00').tz_localize("UTC")) == OrderedDict([]) +``` From 6da146b7551054d4b03e64b174211579dc599968 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 8 May 2024 14:22:00 +0000 Subject: [PATCH 19/21] Bump exchange-calendars from 4.5.3 to 4.5.4 Bumps [exchange-calendars](https://github.com/gerrymanoim/exchange_calendars) from 4.5.3 to 4.5.4. - [Release notes](https://github.com/gerrymanoim/exchange_calendars/releases) - [Changelog](https://github.com/gerrymanoim/exchange_calendars/blob/master/docs/changes_archive.md) - [Commits](https://github.com/gerrymanoim/exchange_calendars/compare/4.5.3...4.5.4) --- updated-dependencies: - dependency-name: exchange-calendars dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- poetry.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/poetry.lock b/poetry.lock index 19361fe..b8dc2fd 100644 --- a/poetry.lock +++ b/poetry.lock @@ -127,13 +127,13 @@ test = ["pytest (>=6)"] [[package]] name = "exchange-calendars" -version = "4.5.3" +version = "4.5.4" description = "Calendars for securities exchanges" optional = false python-versions = "~=3.9" files = [ - {file = "exchange_calendars-4.5.3-py3-none-any.whl", hash = "sha256:f07b8ec6056adc27813fb864d7bcbff9235fa62e9edc5c306e7c1f7e3d32d748"}, - {file = "exchange_calendars-4.5.3.tar.gz", hash = "sha256:d4f950cfe62812fc53462379dc88e0b670128d32852d40503edf5320d3097e85"}, + {file = "exchange_calendars-4.5.4-py3-none-any.whl", hash = "sha256:a4dc02d056540fcd0daa7a40a0bcf98df0fe62c133fac8d596e03631ecbf77ff"}, + {file = "exchange_calendars-4.5.4.tar.gz", hash = "sha256:5b10ab503c0b36809aa49d5710951050bfd447c8feece3b126311838b63cad4a"}, ] [package.dependencies] From 8de63f149911c026da0dd5a5b2db3d394ca88afa Mon Sep 17 00:00:00 2001 From: Jens Keiner Date: Wed, 8 May 2024 16:26:57 +0200 Subject: [PATCH 20/21] Update API dependency. --- poetry.lock | 26 +++++++++++--------------- pyproject.toml | 2 +- 2 files changed, 12 insertions(+), 16 deletions(-) diff --git a/poetry.lock b/poetry.lock index 9d23706..65dee00 100644 --- a/poetry.lock +++ b/poetry.lock @@ -127,13 +127,13 @@ test = ["pytest (>=6)"] [[package]] name = "exchange-calendars" -version = "4.5.3" +version = "4.5.4" description = "Calendars for securities exchanges" optional = false python-versions = "~=3.9" files = [ - {file = "exchange_calendars-4.5.3-py3-none-any.whl", hash = "sha256:f07b8ec6056adc27813fb864d7bcbff9235fa62e9edc5c306e7c1f7e3d32d748"}, - {file = "exchange_calendars-4.5.3.tar.gz", hash = "sha256:d4f950cfe62812fc53462379dc88e0b670128d32852d40503edf5320d3097e85"}, + {file = "exchange_calendars-4.5.4-py3-none-any.whl", hash = "sha256:a4dc02d056540fcd0daa7a40a0bcf98df0fe62c133fac8d596e03631ecbf77ff"}, + {file = "exchange_calendars-4.5.4.tar.gz", hash = "sha256:5b10ab503c0b36809aa49d5710951050bfd447c8feece3b126311838b63cad4a"}, ] [package.dependencies] @@ -149,24 +149,20 @@ dev = ["flake8", "hypothesis", "pip-tools", "pytest", "pytest-benchmark", "pytes [[package]] name = "exchange-calendars-extensions-api" -version = "0" +version = "0.4.0" description = "A package that defines parts of the API of the exchange-calendars-extensions package." optional = false -python-versions = "~=3.9" -files = [] -develop = false +python-versions = "<4.0,>=3.9" +files = [ + {file = "exchange_calendars_extensions_api-0.4.0-py3-none-any.whl", hash = "sha256:f3e03fc2d62006901f0f7434a160e433a08ecd8049fd2d690e3849843e2c8ee4"}, + {file = "exchange_calendars_extensions_api-0.4.0.tar.gz", hash = "sha256:b0d66996d032938a56d31a08d075273c13045561e7a5f11e0ffd0390a6f6ca35"}, +] [package.dependencies] -pandas = "^2" +pandas = ">=2,<3" pydantic = ">=2,<3" typing-extensions = ">=4.0,<5" -[package.source] -type = "git" -url = "https://github.com/jenskeiner/exchange_calendars_extensions_api" -reference = "develop" -resolved_reference = "36d62e249ac04ba3de4b16944e1d47d4f6ba2112" - [[package]] name = "filelock" version = "3.14.0" @@ -761,4 +757,4 @@ test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess [metadata] lock-version = "2.0" python-versions = "~=3.9" -content-hash = "e94cf46e2f4bc998e3e7440d0249cb90d087a8d4ec6c6ce28c991e25d4afa93d" +content-hash = "d25c8d96c0969b70b46d920187d5fb73868a51bba7ddf5c4aca23dc1c2ad522e" diff --git a/pyproject.toml b/pyproject.toml index 4003e30..34fa65d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -33,7 +33,7 @@ packages = [{include = "exchange_calendars_extensions"}] [tool.poetry.dependencies] python = "~=3.9" -exchange-calendars-extensions-api = { git = "https://github.com/jenskeiner/exchange_calendars_extensions_api", branch="develop" } +exchange-calendars-extensions-api = ">=0.4.0,<1" exchange-calendars = ">=4.0.1,<5" typing-extensions = ">=4.0,<5" pydantic = ">=2.0,<3" From 6c685482a8ebb7c52ce13314b43dc46fc5cbcb5a Mon Sep 17 00:00:00 2001 From: Jens Keiner Date: Wed, 8 May 2024 16:30:17 +0200 Subject: [PATCH 21/21] Regenerating lock file. --- poetry.lock | 33 +++++++++------------------------ 1 file changed, 9 insertions(+), 24 deletions(-) diff --git a/poetry.lock b/poetry.lock index b8dc2fd..4d0ba9d 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. [[package]] name = "annotated-types" @@ -149,24 +149,20 @@ dev = ["flake8", "hypothesis", "pip-tools", "pytest", "pytest-benchmark", "pytes [[package]] name = "exchange-calendars-extensions-api" -version = "0" +version = "0.4.0" description = "A package that defines parts of the API of the exchange-calendars-extensions package." optional = false -python-versions = "~=3.9" -files = [] -develop = false +python-versions = "<4.0,>=3.9" +files = [ + {file = "exchange_calendars_extensions_api-0.4.0-py3-none-any.whl", hash = "sha256:f3e03fc2d62006901f0f7434a160e433a08ecd8049fd2d690e3849843e2c8ee4"}, + {file = "exchange_calendars_extensions_api-0.4.0.tar.gz", hash = "sha256:b0d66996d032938a56d31a08d075273c13045561e7a5f11e0ffd0390a6f6ca35"}, +] [package.dependencies] -pandas = "^2" +pandas = ">=2,<3" pydantic = ">=2,<3" typing-extensions = ">=4.0,<5" -[package.source] -type = "git" -url = "https://github.com/jenskeiner/exchange_calendars_extensions_api" -reference = "develop" -resolved_reference = "36d62e249ac04ba3de4b16944e1d47d4f6ba2112" - [[package]] name = "filelock" version = "3.14.0" @@ -630,7 +626,6 @@ files = [ {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, - {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, @@ -638,16 +633,8 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, - {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, - {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, - {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, - {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, @@ -664,7 +651,6 @@ files = [ {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, - {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, @@ -672,7 +658,6 @@ files = [ {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, - {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, @@ -772,4 +757,4 @@ test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess [metadata] lock-version = "2.0" python-versions = "~=3.9" -content-hash = "8104bb34b2779cce481c9b6f6754bcece1f7fd0cf6985eead59c7f5b1ae4a943" +content-hash = "9e7a11a754cfa73fc70b9d70f6050d80d4cf21a8168e0ac2e6a262e7a49d51cb"