From 74845190eca0f9d755e51fc966de1cfd13a2569b Mon Sep 17 00:00:00 2001 From: Steve Clarke <84364906+s7clarke10@users.noreply.github.com> Date: Mon, 14 Aug 2023 20:43:09 +1200 Subject: [PATCH 01/31] Convert to using msgspec instead of orjson --- CHANGELOG.md | 3 +++ pylintrc | 2 +- setup.py | 2 +- singer/catalog.py | 4 ++-- singer/messages.py | 35 +++++++++++++++++++++++++---------- singer/metrics.py | 6 +++--- singer/schema.py | 4 ++-- singer/utils.py | 4 ++-- tests/test_singer.py | 8 ++++---- 9 files changed, 43 insertions(+), 25 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index e0064b8..7f5806b 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,8 @@ # Changelog +## 3.0.0 (2023-08-23) + * Using msgspec instead of orjson or other serializers for speed benefit + ## 2.0.2 (2022-03-23) * Using orjson instead of simplejson or other serializers for speed benefit * Fix: Output decimal.Decimal as int or float not str diff --git a/pylintrc b/pylintrc index 35ad045..b566aa3 100644 --- a/pylintrc +++ b/pylintrc @@ -322,7 +322,7 @@ ignore-mixin-members=yes # (useful for modules/projects where namespaces are manipulated during runtime # and thus existing member attributes cannot be deduced by static analysis. It # supports qualified module names, as well as Unix pattern matching. -ignored-modules=orjson +ignored-modules=msgspec # List of classes names for which member attributes should not be checked # (useful for classes with attributes dynamically set). This supports can work diff --git a/setup.py b/setup.py index db1ed32..6f4d553 100644 --- a/setup.py +++ b/setup.py @@ -20,7 +20,7 @@ install_requires=[ 'pytz', 'jsonschema==3.2.0', - 'orjson==3.8.0', + 'msgspec>=0.18.0', 'python-dateutil>=2.6.0', 'backoff==2.1.2', 'ciso8601', diff --git a/singer/catalog.py b/singer/catalog.py index 77424a9..33732ea 100644 --- a/singer/catalog.py +++ b/singer/catalog.py @@ -1,5 +1,5 @@ '''Provides an object model for a Singer Catalog.''' -import orjson +import msgspec import sys from . import metadata as metadata_module @@ -15,7 +15,7 @@ def write_catalog(catalog): if not catalog.streams: LOGGER.warning('Catalog being written with no streams.') - catalog_json = orjson.dumps(catalog.to_dict(), option=orjson.OPT_INDENT_2) + catalog_json = msgspec.json.format(msgspec.json.encode(catalog.to_dict())) sys.stdout.buffer.write(catalog_json) sys.stdout.buffer.flush() diff --git a/singer/messages.py b/singer/messages.py index e7b9642..b352573 100644 --- a/singer/messages.py +++ b/singer/messages.py @@ -1,7 +1,7 @@ import sys import pytz -import orjson +import msgspec import decimal import ciso8601 @@ -9,6 +9,9 @@ from .logger import get_logger LOGGER = get_logger() +# Message buffer for msgspec +msg_buffer = bytearray(64) + class Message(): '''Base class for messages.''' @@ -240,7 +243,7 @@ def parse_message(msg): # lossy conversions. However, this will affect # very few data points and we have chosen to # leave conversion as is for now. - obj = orjson.loads(msg) + obj = msgspec.json.decode(msg) msg_type = _required_key(obj, 'type') if msg_type == 'RECORD': @@ -292,16 +295,28 @@ def parse_message(msg): return None -def format_message(message, option=0): - def default(obj): - if isinstance(obj, decimal.Decimal): - return int(obj) if float(obj).is_integer() else float(obj) - raise TypeError - - return orjson.dumps(message.asdict(), option=option, default=default) +def format_message(message: Message, option=0) -> bytes: + """Format a message as a JSON string. + + Args: + message: The message to format. + option: 0 = json message + 1 = json message with newline + + Returns: + The formatted message. + """ + if option==0: + return msgspec.encode(message.to_dict()) + elif option==1: + msgspec.encode_into(message.to_dict(), msg_buffer) + msg_buffer.extend(b"\n") + return msg_buffer + else: + raise Exception('Not implemented: 0=Standard, 1=Message with newline') def write_message(message): - sys.stdout.buffer.write(format_message(message, option=orjson.OPT_APPEND_NEWLINE)) + sys.stdout.buffer.write(format_message(message, option=1)) sys.stdout.buffer.flush() diff --git a/singer/metrics.py b/singer/metrics.py index 93d6e64..2fd1d4e 100644 --- a/singer/metrics.py +++ b/singer/metrics.py @@ -40,7 +40,7 @@ ''' -import orjson +import msgspec import re import time from collections import namedtuple @@ -84,7 +84,7 @@ def log(logger, point): 'value': point.value, 'tags': point.tags } - logger.info('METRIC: %s', orjson.dumps(result)) + logger.info('METRIC: %s', msgspec.json.decode(result)) class Counter(): @@ -237,7 +237,7 @@ def parse(line): if match: json_str = match.group(1) try: - raw = orjson.loads(json_str) + raw = msgspec.json.encode(json_str) return Point( metric_type=raw.get('type'), metric=raw.get('metric'), diff --git a/singer/schema.py b/singer/schema.py index 108f50f..0d2c66b 100644 --- a/singer/schema.py +++ b/singer/schema.py @@ -1,7 +1,7 @@ # pylint: disable=redefined-builtin, too-many-arguments, invalid-name '''Provides an object model for JSON Schema''' -import orjson +import msgspec # These are standard keys defined in the JSON Schema spec STANDARD_KEYS = [ @@ -57,7 +57,7 @@ def __init__(self, type=None, format=None, properties=None, items=None, self.patternProperties = patternProperties def __str__(self): - return orjson.dumps(self.to_dict()).decode('utf-8') + return msgspec.json.encode(self.to_dict()).decode('utf-8') def __repr__(self): pairs = [k + '=' + repr(v) for k, v in self.__dict__.items()] diff --git a/singer/utils.py b/singer/utils.py index 7579280..d4a734b 100644 --- a/singer/utils.py +++ b/singer/utils.py @@ -2,7 +2,7 @@ import collections import datetime import functools -import orjson +import msgspec import time from warnings import warn @@ -106,7 +106,7 @@ def chunk(array, num): def load_json(path): with open(path, encoding='utf-8') as fil: - return orjson.loads(fil.read()) + return msgspec.json.decode(fil.read()) def update_state(state, entity, dtime): diff --git a/tests/test_singer.py b/tests/test_singer.py index dd72d4a..9827321 100644 --- a/tests/test_singer.py +++ b/tests/test_singer.py @@ -1,5 +1,5 @@ import singer -import orjson +import msgspec import unittest import dateutil @@ -164,12 +164,12 @@ def test_parse_small_decimal(self): def test_parse_absurdly_large_decimal(self): value_str = '9' * 1024 + '.' + '9' * 1024 - with self.assertRaises(orjson.JSONDecodeError): + with self.assertRaises(msgspec.JSONDecodeError): self.create_record(value_str) def test_parse_absurdly_large_int(self): value_str = '9' * 1024 - with self.assertRaises(orjson.JSONDecodeError): + with self.assertRaises(msgspec.JSONDecodeError): self.create_record(value_str) def test_parse_bulk_decs(self): @@ -204,7 +204,7 @@ def test_format_message(self): singer.format_message(record_message, option=0)) self.assertEqual(b'{"type":"RECORD","stream":"users","record":{"name":"foo"}}\n', - singer.format_message(record_message, option=orjson.OPT_APPEND_NEWLINE)) + singer.format_message(record_message, option=1)) if __name__ == '__main__': From b8cec32b05abff8361e854f8a7d032f2d5ad6921 Mon Sep 17 00:00:00 2001 From: Steve Clarke <84364906+s7clarke10@users.noreply.github.com> Date: Tue, 15 Aug 2023 16:08:43 +1200 Subject: [PATCH 02/31] Working output with Singer Decimal enabled. --- singer/catalog.py | 2 +- singer/messages.py | 10 +++++++--- singer/metrics.py | 4 ++-- 3 files changed, 10 insertions(+), 6 deletions(-) diff --git a/singer/catalog.py b/singer/catalog.py index 33732ea..afaa6ad 100644 --- a/singer/catalog.py +++ b/singer/catalog.py @@ -95,7 +95,7 @@ def __eq__(self, other): @classmethod def load(cls, filename): with open(filename, encoding='utf-8') as fp: # pylint: disable=invalid-name - return Catalog.from_dict(orjson.loads(fp.read())) + return Catalog.from_dict(msgspec.json.decode(fp.read())) @classmethod def from_dict(cls, data): diff --git a/singer/messages.py b/singer/messages.py index b352573..d5e5d15 100644 --- a/singer/messages.py +++ b/singer/messages.py @@ -9,6 +9,9 @@ from .logger import get_logger LOGGER = get_logger() +# Set JSON Serializer +encoder = msgspec.json.Encoder() + # Message buffer for msgspec msg_buffer = bytearray(64) @@ -295,7 +298,7 @@ def parse_message(msg): return None -def format_message(message: Message, option=0) -> bytes: +def format_message(message, option=0): """Format a message as a JSON string. Args: @@ -306,10 +309,11 @@ def format_message(message: Message, option=0) -> bytes: Returns: The formatted message. """ + if option==0: - return msgspec.encode(message.to_dict()) + return encoder.encode(message.asdict()) elif option==1: - msgspec.encode_into(message.to_dict(), msg_buffer) + encoder.encode_into(message.asdict(), msg_buffer) msg_buffer.extend(b"\n") return msg_buffer else: diff --git a/singer/metrics.py b/singer/metrics.py index 2fd1d4e..785b29e 100644 --- a/singer/metrics.py +++ b/singer/metrics.py @@ -84,7 +84,7 @@ def log(logger, point): 'value': point.value, 'tags': point.tags } - logger.info('METRIC: %s', msgspec.json.decode(result)) + logger.info('METRIC: %s', msgspec.json.encode(result)) class Counter(): @@ -237,7 +237,7 @@ def parse(line): if match: json_str = match.group(1) try: - raw = msgspec.json.encode(json_str) + raw = msgspec.json.decode(json_str) return Point( metric_type=raw.get('type'), metric=raw.get('metric'), From f898d0a1133454a8fa4672b02f967e6d5405c7fe Mon Sep 17 00:00:00 2001 From: Steve Clarke <84364906+s7clarke10@users.noreply.github.com> Date: Tue, 15 Aug 2023 19:57:51 +1200 Subject: [PATCH 03/31] Supporting Singer Decimal --- README.md | 16 +++++++++++++++ singer/messages.py | 49 ++++++++++++++++++++++++++++++++++++++++++---- singer/utils.py | 35 +++++++++++++++++++++++++++++++++ 3 files changed, 96 insertions(+), 4 deletions(-) diff --git a/README.md b/README.md index f121eea..63800f8 100644 --- a/README.md +++ b/README.md @@ -48,6 +48,22 @@ singer.write_state({'my_table': 'd'}) library to define it. However, if the environment variable `LOGGING_CONF_FILE` is found and set then the **pipelinewise-singer-python** would use the path provided in the env variable as the logging configuration for the logger. +### Singer Decimal + +Enabling the use_singer_decimal = True in a tap will output **decimal** and **floats** as a string +rather than their numeric representation. + +**Optional Setting**: + +A boolean setting: when enabled `true` in the config will outputs decimal and floating point numbers as strings to avoid loss of precision and scale. +For supporting taps, there are hints in the schema message, format = "singer.decimal", and additionalProperties scale_precision dictionary providing precision and scale. For decimal data, the target can use this +information to correctly replicate decimal data without loss. For the Floats and Number data type without precision and scale it is recommended that post processing formats the datatype based on an inspection of the data because the true data size is unknown / dynamic. + +```json +{ + "use_singer_decimal": true, +} +``` License ------- diff --git a/singer/messages.py b/singer/messages.py index d5e5d15..a1aa047 100644 --- a/singer/messages.py +++ b/singer/messages.py @@ -9,8 +9,8 @@ from .logger import get_logger LOGGER = get_logger() -# Set JSON Serializer -encoder = msgspec.json.Encoder() +# A Global variable to hold the msgspec encoder. +ENCODER = None # Message buffer for msgspec msg_buffer = bytearray(64) @@ -300,6 +300,8 @@ def parse_message(msg): def format_message(message, option=0): """Format a message as a JSON string. + The msgspec encoder is cached so it is + not created for every message. Args: message: The message to format. @@ -310,16 +312,55 @@ def format_message(message, option=0): The formatted message. """ + if not ENCODER: + set_msgspec_encoder() + if option==0: - return encoder.encode(message.asdict()) + return ENCODER.encode(message.asdict()) elif option==1: - encoder.encode_into(message.asdict(), msg_buffer) + ENCODER.encode_into(message.asdict(), msg_buffer) msg_buffer.extend(b"\n") return msg_buffer else: raise Exception('Not implemented: 0=Standard, 1=Message with newline') + +def set_msgspec_encoder(): + """Sets a JSON serializer encoder for all encoding. + Checks whether the use_singer_decimal setting has + been enabled to output decimals in a numeric format. + + Default: Output decimals, floats in numeric format. + If use_singer_decimal = true output as strings. + + Args: + None. + + Returns: + None. + """ + + global ENCODER + use_singer_decimal = u.get_singer_decimal_setting() + + if use_singer_decimal: + ENCODER = msgspec.json.Encoder() + LOGGER.info( + 'Singer Decimal Enabled! Floats and Decimals will be output as strings' + ) + else: + ENCODER = msgspec.json.Encoder(decimal_format="number") def write_message(message): + """Writes the message to stdout. Before writing the + message it is formatted using the msgspec encoder. This + method outputs each message followed by newline. + + Args: + message: The message to be serialized. + + Returns: + None. + """ sys.stdout.buffer.write(format_message(message, option=1)) sys.stdout.buffer.flush() diff --git a/singer/utils.py b/singer/utils.py index d4a734b..b709f44 100644 --- a/singer/utils.py +++ b/singer/utils.py @@ -15,6 +15,7 @@ DATETIME_PARSE = '%Y-%m-%dT%H:%M:%SZ' DATETIME_FMT = '%04Y-%m-%dT%H:%M:%S.%fZ' DATETIME_FMT_SAFE = '%Y-%m-%dT%H:%M:%S.%fZ' +USE_SINGER_DECIMAL = False def now(): return datetime.datetime.utcnow().replace(tzinfo=pytz.UTC) @@ -180,6 +181,10 @@ def parse_args(required_config_keys): args.catalog = Catalog.load(args.catalog) check_config(args.config, required_config_keys) + + # Store the use_singer_decimal setting if available + use_singer_decimal = args.config.get('use_singer_decimal',False) + set_singer_decimal_setting(use_singer_decimal) return args @@ -302,3 +307,33 @@ def should_sync_field(inclusion, selected, default=False): # if there was no selected value, use the default return default + + +def get_singer_decimal_setting(): + """ + Returns True if use_singer_decimal config is enabled. + + When the config use_singer_decimal is False or not set, the output + of decimal and floats will be number format rather than a + string. + + Default: False + """ + + return USE_SINGER_DECIMAL + +def set_singer_decimal_setting(config_singer_decimal=False): + """ + Updates the Singer Decimal default of True if config is enabled. + + When the config use_singer_decimal is False or not set, the output + of decimal and floats will be number format rather than a + string. + + Default: False + """ + + global USE_SINGER_DECIMAL + + USE_SINGER_DECIMAL = config_singer_decimal + From 277718b25111e8847314dc6de65d721967cc5def Mon Sep 17 00:00:00 2001 From: Steve Clarke <84364906+s7clarke10@users.noreply.github.com> Date: Wed, 16 Aug 2023 12:25:50 +1200 Subject: [PATCH 04/31] Use a dynamic buffer --- singer/messages.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/singer/messages.py b/singer/messages.py index a1aa047..a8904b0 100644 --- a/singer/messages.py +++ b/singer/messages.py @@ -12,8 +12,10 @@ # A Global variable to hold the msgspec encoder. ENCODER = None -# Message buffer for msgspec -msg_buffer = bytearray(64) +# Allocate a single shared buffer Message buffer for msgspec +# This buffer will dynamically expand as required but will not shrink. +# https://jcristharif.com/msgspec/perf-tips.html +msg_buffer = bytearray() class Message(): '''Base class for messages.''' From a53df6cf3db52ce72ffc9eeba353371acd236459 Mon Sep 17 00:00:00 2001 From: Steve Clarke <84364906+s7clarke10@users.noreply.github.com> Date: Wed, 16 Aug 2023 13:22:54 +1200 Subject: [PATCH 05/31] Fixing linting issues and removing Python3.7 support --- .github/workflows/main.yml | 2 +- ...publish.yml => pythonpublish.yml.disabled} | 0 singer/messages.py | 12 +++++------ singer/utils.py | 21 +++++++++---------- 4 files changed, 17 insertions(+), 18 deletions(-) rename .github/workflows/{pythonpublish.yml => pythonpublish.yml.disabled} (100%) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 6ec7515..2ba2a55 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -14,7 +14,7 @@ jobs: strategy: fail-fast: false matrix: - python-version: ["3.7", "3.8", "3.9", "3.10"] + python-version: ["3.8", "3.9", "3.10", "3.11"] runs-on: ubuntu-latest diff --git a/.github/workflows/pythonpublish.yml b/.github/workflows/pythonpublish.yml.disabled similarity index 100% rename from .github/workflows/pythonpublish.yml rename to .github/workflows/pythonpublish.yml.disabled diff --git a/singer/messages.py b/singer/messages.py index a8904b0..186eff0 100644 --- a/singer/messages.py +++ b/singer/messages.py @@ -2,7 +2,6 @@ import pytz import msgspec -import decimal import ciso8601 import singer.utils as u @@ -325,13 +324,14 @@ def format_message(message, option=0): return msg_buffer else: raise Exception('Not implemented: 0=Standard, 1=Message with newline') - + return None + def set_msgspec_encoder(): """Sets a JSON serializer encoder for all encoding. - Checks whether the use_singer_decimal setting has + Checks whether the use_singer_decimal setting has been enabled to output decimals in a numeric format. - Default: Output decimals, floats in numeric format. + Default: Output decimals, floats in numeric format. If use_singer_decimal = true output as strings. Args: @@ -341,7 +341,7 @@ def set_msgspec_encoder(): None. """ - global ENCODER + global ENCODER # pylint: disable=W0603 use_singer_decimal = u.get_singer_decimal_setting() if use_singer_decimal: @@ -362,7 +362,7 @@ def write_message(message): Returns: None. - """ + """ sys.stdout.buffer.write(format_message(message, option=1)) sys.stdout.buffer.flush() diff --git a/singer/utils.py b/singer/utils.py index b709f44..bad6228 100644 --- a/singer/utils.py +++ b/singer/utils.py @@ -181,7 +181,7 @@ def parse_args(required_config_keys): args.catalog = Catalog.load(args.catalog) check_config(args.config, required_config_keys) - + # Store the use_singer_decimal setting if available use_singer_decimal = args.config.get('use_singer_decimal',False) set_singer_decimal_setting(use_singer_decimal) @@ -307,33 +307,32 @@ def should_sync_field(inclusion, selected, default=False): # if there was no selected value, use the default return default - + def get_singer_decimal_setting(): """ Returns True if use_singer_decimal config is enabled. - + When the config use_singer_decimal is False or not set, the output of decimal and floats will be number format rather than a string. - + Default: False """ - + return USE_SINGER_DECIMAL - + def set_singer_decimal_setting(config_singer_decimal=False): """ Updates the Singer Decimal default of True if config is enabled. - + When the config use_singer_decimal is False or not set, the output of decimal and floats will be number format rather than a string. - + Default: False """ - global USE_SINGER_DECIMAL - - USE_SINGER_DECIMAL = config_singer_decimal + global USE_SINGER_DECIMAL # pylint: disable=W0603 + USE_SINGER_DECIMAL = config_singer_decimal From a8b387a79fefecd74de9e1727f64110d5ef69ce4 Mon Sep 17 00:00:00 2001 From: Steve Clarke <84364906+s7clarke10@users.noreply.github.com> Date: Wed, 16 Aug 2023 16:27:33 +1200 Subject: [PATCH 06/31] Resolving Linting issues --- setup.py | 2 +- singer/messages.py | 10 ++++++---- 2 files changed, 7 insertions(+), 5 deletions(-) diff --git a/setup.py b/setup.py index 6f4d553..559c16c 100644 --- a/setup.py +++ b/setup.py @@ -8,7 +8,7 @@ setup(name="pipelinewise-singer-python", version='2.0.1', description="Singer.io utility library - PipelineWise compatible", - python_requires=">=3.7.0, <3.11", + python_requires=">=3.7.0, <3.12", long_description=long_description, long_description_content_type="text/markdown", author="TransferWise", diff --git a/singer/messages.py b/singer/messages.py index 186eff0..f81e09f 100644 --- a/singer/messages.py +++ b/singer/messages.py @@ -299,6 +299,7 @@ def parse_message(msg): return None + def format_message(message, option=0): """Format a message as a JSON string. The msgspec encoder is cached so it is @@ -318,13 +319,13 @@ def format_message(message, option=0): if option==0: return ENCODER.encode(message.asdict()) - elif option==1: + if option==1: ENCODER.encode_into(message.asdict(), msg_buffer) msg_buffer.extend(b"\n") return msg_buffer - else: - raise Exception('Not implemented: 0=Standard, 1=Message with newline') - return None + + raise Exception('Not implemented: 0=Standard, 1=Message with newline') + def set_msgspec_encoder(): """Sets a JSON serializer encoder for all encoding. @@ -352,6 +353,7 @@ def set_msgspec_encoder(): else: ENCODER = msgspec.json.Encoder(decimal_format="number") + def write_message(message): """Writes the message to stdout. Before writing the message it is formatted using the msgspec encoder. This From d866217da01d8f898051ee6f96663bf0da895d02 Mon Sep 17 00:00:00 2001 From: Steve Clarke <84364906+s7clarke10@users.noreply.github.com> Date: Mon, 24 Jun 2024 14:56:16 +1200 Subject: [PATCH 07/31] Bumping setup.py version for release --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 559c16c..e674460 100644 --- a/setup.py +++ b/setup.py @@ -6,7 +6,7 @@ long_description = fh.read() setup(name="pipelinewise-singer-python", - version='2.0.1', + version='3.0.0', description="Singer.io utility library - PipelineWise compatible", python_requires=">=3.7.0, <3.12", long_description=long_description, From 7e0cc0df7a7910ef5aa07cdf7c92b2cda6f6ef43 Mon Sep 17 00:00:00 2001 From: Steve Clarke <84364906+s7clarke10@users.noreply.github.com> Date: Mon, 24 Jun 2024 15:55:45 +1200 Subject: [PATCH 08/31] Correcting msgspec returns for Validation Issues --- tests/test_singer.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/test_singer.py b/tests/test_singer.py index 9827321..4d00eb1 100644 --- a/tests/test_singer.py +++ b/tests/test_singer.py @@ -164,12 +164,12 @@ def test_parse_small_decimal(self): def test_parse_absurdly_large_decimal(self): value_str = '9' * 1024 + '.' + '9' * 1024 - with self.assertRaises(msgspec.JSONDecodeError): + with self.assertRaises(msgspec.ValidationError): self.create_record(value_str) def test_parse_absurdly_large_int(self): value_str = '9' * 1024 - with self.assertRaises(msgspec.JSONDecodeError): + with self.assertRaises(msgspec.ValidationError): self.create_record(value_str) def test_parse_bulk_decs(self): From 0db019d6d156e934aa52225af76f6c9abf2752c3 Mon Sep 17 00:00:00 2001 From: Steve Clarke <84364906+s7clarke10@users.noreply.github.com> Date: Mon, 24 Jun 2024 16:02:58 +1200 Subject: [PATCH 09/31] Correcting test for test_parse_absurdly_large_int --- tests/test_singer.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/tests/test_singer.py b/tests/test_singer.py index 4d00eb1..a5b4309 100644 --- a/tests/test_singer.py +++ b/tests/test_singer.py @@ -169,8 +169,9 @@ def test_parse_absurdly_large_decimal(self): def test_parse_absurdly_large_int(self): value_str = '9' * 1024 - with self.assertRaises(msgspec.ValidationError): - self.create_record(value_str) + value = self.create_record(value_str) + self.assertEqual(int(value_str), value) + self.assertEqual(int, type(value)) def test_parse_bulk_decs(self): value_strs = [ From cf9f0312abc88a3511d3de7c7404b3a54c0cd76d Mon Sep 17 00:00:00 2001 From: Steve Clarke <84364906+s7clarke10@users.noreply.github.com> Date: Thu, 27 Jun 2024 18:38:07 +1200 Subject: [PATCH 10/31] Correcting pylint and pytests for msgspec serialisation (#18) * Correcting pylint and pytests * Reducing coverage failure settings * Supporting Python 3.12, deprecating 3.7 * Supporting Python 3.12, deprecating 3.7 * Support for JSONSchema 4 * Correcting linting issues * Changing action versions * Unique coverage data file * Combine all reports * Combine all reports fix * Combine all reports fix v2 * Experimenting with coverage comments * Adding Markdown for publishing * Adding Permissions * Tweaking Coverage Report * Correcting Singer Tests --------- Co-authored-by: Steve Clarke --- .github/workflows/main.yml | 48 +- CHANGELOG.md | 2 + pylintrc | 911 +++++++++++++++++++++++-------------- setup.py | 18 +- singer/messages.py | 4 +- singer/schema.py | 21 +- singer/transform.py | 25 +- tests/test_catalog.py | 6 +- tests/test_schema.py | 28 +- tests/test_singer.py | 13 +- 10 files changed, 680 insertions(+), 396 deletions(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 2ba2a55..f2d7707 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -14,13 +14,13 @@ jobs: strategy: fail-fast: false matrix: - python-version: ["3.8", "3.9", "3.10", "3.11"] + python-version: ["3.8", "3.9", "3.10", "3.11", "3.12"] runs-on: ubuntu-latest steps: - name: Checking out repo - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Set up Python ${{ matrix.python-version }} uses: actions/setup-python@v4 @@ -39,17 +39,21 @@ jobs: run: coverage run --parallel -m pytest - name: Upload coverage data - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: - name: coverage-data + name: coverage-data-${{ matrix.python-version }} path: ".coverage.*" coverage: runs-on: ubuntu-latest + permissions: + contents: write + pull-requests: write + repository-projects: write needs: build steps: - name: Check out the repository - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Set up Python uses: actions/setup-python@v4 @@ -62,9 +66,10 @@ jobs: python3 -m pip install -U .[dev] - name: Download coverage data - uses: actions/download-artifact@v3.0.0 + uses: actions/download-artifact@v4 with: - name: coverage-data + pattern: coverage-data-* + merge-multiple: true - name: Combine coverage data run: | @@ -72,12 +77,35 @@ jobs: - name: Generate XML coverage report run: | - coverage xml + coverage xml --fail-under=75 - name: Display human readable report run: | - coverage report + coverage report --fail-under=75 + + - name: Code Coverage Summary Report + uses: irongut/CodeCoverageSummary@v1.2.0 + with: + filename: coverage.xml + badge: true + fail_below_min: true + format: markdown + hide_branch_rate: false + hide_complexity: true + indicators: true + output: both + thresholds: '60 80' + + - name: Add Coverage PR Comment + uses: marocchino/sticky-pull-request-comment@v2 + if: github.event_name == 'pull_request' + with: + recreate: true + path: code-coverage-results.md + + - name: Write to Job Summary + run: cat code-coverage-results.md >> $GITHUB_STEP_SUMMARY # Optional if you want to use codecov.io # - name: Upload coverage report - # uses: codecov/codecov-action@v3 + # uses: codecov/codecov-action@v4 diff --git a/CHANGELOG.md b/CHANGELOG.md index 7f5806b..5a9fbad 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,8 @@ ## 3.0.0 (2023-08-23) * Using msgspec instead of orjson or other serializers for speed benefit + * Support for Python 3.12 + * Deprecating Python 3.7 ## 2.0.2 (2022-03-23) * Using orjson instead of simplejson or other serializers for speed benefit diff --git a/pylintrc b/pylintrc index b566aa3..1bfc6d2 100644 --- a/pylintrc +++ b/pylintrc @@ -1,71 +1,463 @@ -# Based on Apache 2.0 licensed code from https://github.com/ClusterHQ/flocker +[MAIN] -[MASTER] +# Analyse import fallback blocks. This can be used to support both Python 2 and +# 3 compatible code, which means that the block might have code that exists +# only in one or another interpreter, leading to false positives when analysed. +analyse-fallback-blocks=no -# Specify a configuration file. -#rcfile= +# Clear in-memory caches upon conclusion of linting. Useful if running pylint +# in a server-like mode. +clear-cache-post-run=no + +# Load and enable all available extensions. Use --list-extensions to see a list +# all available extensions. +#enable-all-extensions= + +# In error mode, messages with a category besides ERROR or FATAL are +# suppressed, and no reports are done by default. Error mode is compatible with +# disabling specific errors. +#errors-only= + +# Always return a 0 (non-error) status code, even if lint errors are found. +# This is primarily useful in continuous integration scripts. +#exit-zero= + +# A comma-separated list of package or module names from where C extensions may +# be loaded. Extensions are loading into the active Python interpreter and may +# run arbitrary code. +extension-pkg-allow-list= + +# A comma-separated list of package or module names from where C extensions may +# be loaded. Extensions are loading into the active Python interpreter and may +# run arbitrary code. (This is an alternative name to extension-pkg-allow-list +# for backward compatibility.) +extension-pkg-whitelist=ujson + +# Return non-zero exit code if any of these messages/categories are detected, +# even if score is above --fail-under value. Syntax same as enable. Messages +# specified are enabled, while categories only check already-enabled messages. +fail-on= + +# Specify a score threshold under which the program will exit with error. +fail-under=10 + +# Interpret the stdin as a python script, whose filename needs to be passed as +# the module_or_package argument. +#from-stdin= + +# Files or directories to be skipped. They should be base names, not paths. +ignore= + +# Add files or directories matching the regular expressions patterns to the +# ignore-list. The regex matches against paths and can be in Posix or Windows +# format. Because '\\' represents the directory delimiter on Windows systems, +# it can't be used as an escape character. +ignore-paths= + +# Files or directories matching the regular expression patterns are skipped. +# The regex matches against base names, not paths. The default value ignores +# Emacs file locks +ignore-patterns=^\.# + +# List of module names for which member attributes should not be checked and +# will not be imported (useful for modules/projects where namespaces are +# manipulated during runtime and thus existing member attributes cannot be +# deduced by static analysis). It supports qualified module names, as well as +# Unix pattern matching. +ignored-modules=msgspec # Python code to execute, usually for sys.path manipulation such as # pygtk.require(). -# init-hook= +#init-hook= -# Add files or directories to the blacklist. They should be base names, not paths. -ignore= +# Use multiple processes to speed up Pylint. Specifying 0 will auto-detect the +# number of processors available to use, and will cap the count on Windows to +# avoid hangs. +jobs=1 + +# Control the amount of potential inferred values when inferring a single +# object. This can help the performance when dealing with large functions or +# complex, nested conditions. +limit-inference-results=100 + +# List of plugins (as comma separated values of python module names) to load, +# usually to register additional checkers. +load-plugins=pylint.extensions.no_self_use # Pickle collected data for later comparisons. persistent=no -# List of plugins (as comma separated values of python modules names) to load, -# usually to register additional checkers. -load-plugins= +# Resolve imports to .pyi stubs if available. May reduce no-member messages and +# increase not-an-iterable messages. +prefer-stubs=no -# Use multiple processes to speed up Pylint. -# DO NOT CHANGE THIS VALUES >1 HIDE RESULTS!!!!! -jobs=1 +# Minimum Python version to use for version dependent checks. Will default to +# the version used to run pylint. +py-version=3.8 + +# Discover python modules and packages in the file system subtree. +recursive=no + +# Add paths to the list of the source roots. Supports globbing patterns. The +# source root is an absolute path or a path relative to the current working +# directory used to determine a package namespace for modules located under the +# source root. +source-roots= + +# When enabled, pylint would attempt to guess common misconfiguration and emit +# user-friendly hints instead of false-positive error messages. +suggestion-mode=yes # Allow loading of arbitrary C extensions. Extensions are imported into the # active Python interpreter and may run arbitrary code. unsafe-load-any-extension=no -# A comma-separated list of package or module names from where C extensions may -# be loaded. Extensions are loading into the active Python interpreter and may -# run arbitrary code -extension-pkg-whitelist=ujson +# In verbose mode, extra non-checker-related info will be displayed. +#verbose= + + +[BASIC] + +# Naming style matching correct argument names. +argument-naming-style=snake_case + +# Regular expression matching correct argument names. Overrides argument- +# naming-style. If left empty, argument names will be checked with the set +# naming style. +argument-rgx=[a-z_][a-z0-9_]{2,30}$ + +# Naming style matching correct attribute names. +attr-naming-style=snake_case + +# Regular expression matching correct attribute names. Overrides attr-naming- +# style. If left empty, attribute names will be checked with the set naming +# style. +attr-rgx=[a-z_][a-z0-9_]{2,30}$ + +# Bad variable names which should always be refused, separated by a comma. +bad-names=foo, + bar, + baz, + toto, + tutu, + tata + +# Bad variable names regexes, separated by a comma. If names match any regex, +# they will always be refused +bad-names-rgxs= + +# Naming style matching correct class attribute names. +class-attribute-naming-style=any + +# Regular expression matching correct class attribute names. Overrides class- +# attribute-naming-style. If left empty, class attribute names will be checked +# with the set naming style. +class-attribute-rgx=([A-Za-z_][A-Za-z0-9_]{2,30}|(__.*__))$ + +# Naming style matching correct class constant names. +class-const-naming-style=UPPER_CASE + +# Regular expression matching correct class constant names. Overrides class- +# const-naming-style. If left empty, class constant names will be checked with +# the set naming style. +#class-const-rgx= + +# Naming style matching correct class names. +class-naming-style=PascalCase + +# Regular expression matching correct class names. Overrides class-naming- +# style. If left empty, class names will be checked with the set naming style. +class-rgx=[A-Z_][a-zA-Z0-9]+$ + +# Naming style matching correct constant names. +const-naming-style=UPPER_CASE + +# Regular expression matching correct constant names. Overrides const-naming- +# style. If left empty, constant names will be checked with the set naming +# style. +const-rgx=(([A-Z_][A-Z0-9_]*)|(__.*__))$ + +# Minimum line length for functions/classes that require docstrings, shorter +# ones are exempt. +docstring-min-length=-1 + +# Naming style matching correct function names. +function-naming-style=snake_case + +# Regular expression matching correct function names. Overrides function- +# naming-style. If left empty, function names will be checked with the set +# naming style. +function-rgx=[a-z_][a-z0-9_]{2,40}$ + +# Good variable names which should always be accepted, separated by a comma. +good-names=i, + j, + k, + ex, + Run, + _ + +# Good variable names regexes, separated by a comma. If names match any regex, +# they will always be accepted +good-names-rgxs= + +# Include a hint for the correct naming format with invalid-name. +include-naming-hint=no + +# Naming style matching correct inline iteration names. +inlinevar-naming-style=any + +# Regular expression matching correct inline iteration names. Overrides +# inlinevar-naming-style. If left empty, inline iteration names will be checked +# with the set naming style. +inlinevar-rgx=[A-Za-z_][A-Za-z0-9_]*$ + +# Naming style matching correct method names. +method-naming-style=snake_case + +# Regular expression matching correct method names. Overrides method-naming- +# style. If left empty, method names will be checked with the set naming style. +method-rgx=[a-z_][a-z0-9_]{2,80}$ + +# Naming style matching correct module names. +module-naming-style=snake_case + +# Regular expression matching correct module names. Overrides module-naming- +# style. If left empty, module names will be checked with the set naming style. +module-rgx=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$ + +# Colon-delimited sets of names that determine each other's naming style when +# the name regexes allow several styles. +name-group= + +# Regular expression which should only match function or class names that do +# not require a docstring. +no-docstring-rgx=^_ + +# List of decorators that produce properties, such as abc.abstractproperty. Add +# to this list to register other decorators that produce valid properties. +# These decorators are taken in consideration only for invalid-name. +property-classes=abc.abstractproperty + +# Regular expression matching correct type alias names. If left empty, type +# alias names will be checked with the set naming style. +#typealias-rgx= + +# Regular expression matching correct type variable names. If left empty, type +# variable names will be checked with the set naming style. +#typevar-rgx= + +# Naming style matching correct variable names. +variable-naming-style=snake_case + +# Regular expression matching correct variable names. Overrides variable- +# naming-style. If left empty, variable names will be checked with the set +# naming style. +variable-rgx=[a-z_][a-z0-9_]{2,30}$ + + +[CLASSES] + +# Warn about protected attribute access inside special methods +check-protected-access-in-special-methods=no + +# List of method names used to declare (i.e. assign) instance attributes. +defining-attr-methods=__init__, + __new__, + setUp + +# List of member names, which should be excluded from the protected access +# warning. +exclude-protected=_asdict, + _fields, + _replace, + _source, + _make + +# List of valid names for the first argument in a class method. +valid-classmethod-first-arg=cls + +# List of valid names for the first argument in a metaclass class method. +valid-metaclass-classmethod-first-arg=mcs + + +[DESIGN] + +# List of regular expressions of class ancestor names to ignore when counting +# public methods (see R0903) +exclude-too-few-public-methods= + +# List of qualified class names to ignore when counting class parents (see +# R0901) +ignored-parents= + +# Maximum number of arguments for function / method. +max-args=7 + +# Maximum number of attributes for a class (see R0902). +max-attributes=7 + +# Maximum number of boolean expressions in an if statement (see R0916). +max-bool-expr=5 + +# Maximum number of branch for function / method body. +max-branches=12 + +# Maximum number of locals for function / method body. +max-locals=15 + +# Maximum number of parents for a class (see R0901). +max-parents=7 -# Allow optimization of some AST trees. This will activate a peephole AST -# optimizer, which will apply various small optimizations. For instance, it can -# be used to obtain the result of joining multiple strings with the addition -# operator. Joining a lot of strings can lead to a maximum recursion error in -# Pylint and this flag can prevent that. It has one side effect, the resulting -# AST will be different than the one from reality. -optimize-ast=no +# Maximum number of public methods for a class (see R0904). +max-public-methods=20 + +# Maximum number of return / yield for function / method body. +max-returns=6 + +# Maximum number of statements in function / method body. +max-statements=50 + +# Minimum number of public methods for a class (see R0903). +min-public-methods=2 + + +[EXCEPTIONS] + +# Exceptions that will emit a warning when caught. +overgeneral-exceptions=Exception + + +[FORMAT] + +# Expected format of line ending, e.g. empty (any line ending), LF or CRLF. +expected-line-ending-format= + +# Regexp for a line that is allowed to be longer than the limit. +ignore-long-lines=^\s*(# )??$ + +# Number of spaces of indent required inside a hanging or continued line. +indent-after-paren=4 + +# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1 +# tab). +indent-string=' ' + +# Maximum number of characters on a single line. +max-line-length=120 + +# Maximum number of lines in a module. +max-module-lines=1000 + +# Allow the body of a class to be on the same line as the declaration if body +# contains single statement. +single-line-class-stmt=no + +# Allow the body of an if to be on the same line as the test if there is no +# else. +single-line-if-stmt=no + + +[IMPORTS] + +# List of modules that can be imported at any level, not just the top level +# one. +allow-any-import-level= + +# Allow explicit reexports by alias from a package __init__. +allow-reexport-from-package=no + +# Allow wildcard imports from modules that define __all__. +allow-wildcard-with-all=no + +# Deprecated modules which should not be used, separated by a comma. +deprecated-modules=regsub, + TERMIOS, + Bastion, + rexec + +# Output a graph (.gv or any supported image format) of external dependencies +# to the given file (report RP0402 must not be disabled). +ext-import-graph= + +# Output a graph (.gv or any supported image format) of all (i.e. internal and +# external) dependencies to the given file (report RP0402 must not be +# disabled). +import-graph= + +# Output a graph (.gv or any supported image format) of internal dependencies +# to the given file (report RP0402 must not be disabled). +int-import-graph= + +# Force import order to recognize a module as part of the standard +# compatibility libraries. +known-standard-library= + +# Force import order to recognize a module as part of a third party library. +known-third-party=enchant + +# Couples of modules and preferred modules, separated by a comma. +preferred-modules= + + +[LOGGING] + +# The type of string formatting that logging methods do. `old` means using % +# formatting, `new` is for `{}` formatting. +logging-format-style=old + +# Logging modules to check that the string format arguments are in logging +# function parameter format. +logging-modules=logging [MESSAGES CONTROL] # Only show warnings with the listed confidence levels. Leave empty to show -# all. Valid levels: HIGH, INFERENCE, INFERENCE_FAILURE, UNDEFINED -confidence= - -# Enable the message, report, category or checker with the given id(s). You can -# either give multiple identifier separated by comma (,) or put this option -# multiple time. See also the "--disable" option for examples. -disable=wrong-import-order, - broad-except, +# all. Valid levels: HIGH, CONTROL_FLOW, INFERENCE, INFERENCE_FAILURE, +# UNDEFINED. +confidence=HIGH, + CONTROL_FLOW, + INFERENCE, + INFERENCE_FAILURE, + UNDEFINED + +# Disable the message, report, category or checker with the given id(s). You +# can either give multiple identifiers separated by comma (,) or put this +# option multiple times (only on the command line, not in the configuration +# file where it should appear only once). You can also use "--disable=all" to +# disable everything first and then re-enable specific checks. For example, if +# you want to run only the similarities checker, you can use "--disable=all +# --enable=similarities". If you want to run only the classes checker, but have +# no Warning level messages displayed, use "--disable=all --enable=classes +# --disable=W". +disable=raw-checker-failed, + bad-inline-option, + locally-disabled, + file-ignored, + suppressed-message, + useless-suppression, + deprecated-pragma, + use-implicit-booleaness-not-comparison-to-string, + use-implicit-booleaness-not-comparison-to-zero, + use-symbolic-message-instead, + wrong-import-order, + broad-exception-caught, missing-module-docstring, - duplicate-code, # not useful until a major code refactoring - + duplicate-code, c-extension-no-member, missing-class-docstring, missing-function-docstring, - noo-else-return, too-few-public-methods, too-many-arguments, too-many-branches, too-many-return-statements, - protected-access, - + protected-access +# Enable the message, report, category or checker with the given id(s). You can +# either give multiple identifier separated by comma (,) or put this option +# multiple time (only on the command line, not in the configuration file where +# it should appear only once). See also the "--disable" option for examples. enable=import-error, import-self, reimported, @@ -78,7 +470,6 @@ enable=import-error, used-before-assignment, cell-var-from-loop, global-variable-undefined, - redefine-in-handler, unused-import, unused-wildcard-import, global-variable-not-assigned, @@ -87,7 +478,6 @@ enable=import-error, global-at-module-level, bad-open-mode, redundant-unittest-assert, - boolean-datetime deprecated-method, anomalous-unicode-escape-in-string, anomalous-backslash-in-string, @@ -111,7 +501,6 @@ enable=import-error, assert-on-tuple, dangerous-default-value, duplicate-key, - useless-else-on-loop expression-not-assigned, confusing-with-statement, unnecessary-lambda, @@ -123,16 +512,11 @@ enable=import-error, exec-used, using-constant-test, bad-super-call, - missing-super-argument, - slots-on-old-class, - super-on-old-class, - property-on-old-class, not-an-iterable, not-a-mapping, format-needs-mapping, truncated-format-string, missing-format-string-key, - mixed-format-string, too-few-format-args, bad-str-strip-call, too-many-format-args, @@ -142,36 +526,10 @@ enable=import-error, bad-format-string, missing-format-attribute, missing-format-argument-key, - unused-format-string-argument unused-format-string-key, invalid-format-index, bad-indentation, - mixed-indentation, unnecessary-semicolon, - lowercase-l-suffix, - invalid-encoded-data, - unpacking-in-except, - import-star-module-level, - long-suffix, - old-octal-literal, - old-ne-operator, - backtick, - old-raise-syntax, - metaclass-assignment, - next-method-called, - dict-iter-method, - dict-view-method, - indexing-exception, - raising-string, - using-cmp-argument, - cmp-method, - coerce-method, - delslice-method, - getslice-method, - hex-method, - nonzero-method, - t-method, - setslice-method, logging-format-truncated, logging-too-few-args, logging-too-many-args, @@ -210,353 +568,212 @@ enable=import-error, non-parent-init-called, bad-except-order, catching-non-exception, - bad-exception-context, + bad-exception-cause, notimplemented-raised, raising-bad-type, raising-non-exception, misplaced-bare-raise, duplicate-except, - nonstandard-exception, binary-op-exception, bare-except, not-async-context-manager, yield-inside-async-function -# Needs investigation: -# abstract-method (might be indicating a bug? probably not though) -# protected-access (requires some refactoring) -# attribute-defined-outside-init (requires some refactoring) -# super-init-not-called (requires some cleanup) - -# Things we'd like to enable someday: -# redefined-builtin (requires a bunch of work to clean up our code first) -# redefined-outer-name (requires a bunch of work to clean up our code first) -# undefined-variable (re-enable when pylint fixes https://github.com/PyCQA/pylint/issues/760) -# no-name-in-module (giving us spurious warnings https://github.com/PyCQA/pylint/issues/73) -# unused-argument (need to clean up or code a lot, e.g. prefix unused_?) -# function-redefined (@overload causes lots of spurious warnings) -# too-many-function-args (@overload causes spurious warnings... I think) -# parameter-unpacking (needed for eventual Python 3 compat) -# print-statement (needed for eventual Python 3 compat) -# filter-builtin-not-iterating (Python 3) -# map-builtin-not-iterating (Python 3) -# range-builtin-not-iterating (Python 3) -# zip-builtin-not-iterating (Python 3) -# many others relevant to Python 3 -# unused-variable (a little work to cleanup, is all) - -# ... -[REPORTS] - -# Set the output format. Available formats are text, parseable, colorized, msvs -# (visual studio) and html. You can also give a reporter class, eg -# mypackage.mymodule.MyReporterClass. -output-format=parseable - -# Put messages in a separate file for each module / package specified on the -# command line instead of printing them on stdout. Reports (if any) will be -# written in a file name "pylint_global.[txt|html]". -files-output=no - -# Tells whether to display a full report or only the messages -reports=no - -# Python expression which should return a note less than 10 (10 is the highest -# note). You have access to the variables errors warning, statement which -# respectively contain the number of errors / warnings messages and the total -# number of statements analyzed. This is used by the global evaluation report -# (RP0004). -evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10) - -# Template used to display messages. This is a python new-style format string -# used to format the message information. See doc for all details -#msg-template= - - -[LOGGING] - -# Logging modules to check that the string format arguments are in logging -# function parameter format -logging-modules=logging - - -[FORMAT] - -# Maximum number of characters on a single line. -max-line-length=120 -# Regexp for a line that is allowed to be longer than the limit. -ignore-long-lines=^\s*(# )??$ +[METHOD_ARGS] -# Allow the body of an if to be on the same line as the test if there is no -# else. -single-line-if-stmt=no +# List of qualified names (i.e., library.method) which require a timeout +# parameter e.g. 'requests.api.get,requests.api.post' +timeout-methods=requests.api.delete,requests.api.get,requests.api.head,requests.api.options,requests.api.patch,requests.api.post,requests.api.put,requests.api.request -# List of optional constructs for which whitespace checking is disabled. `dict- -# separator` is used to allow tabulation in dicts, etc.: {1 : 1,\n222: 2}. -# `trailing-comma` allows a space between comma and closing bracket: (a, ). -# `empty-line` allows space-only lines. -no-space-check=trailing-comma,dict-separator -# Maximum number of lines in a module -max-module-lines=1000 - -# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1 -# tab). -indent-string=' ' +[MISCELLANEOUS] -# Number of spaces of indent required inside a hanging or continued line. -indent-after-paren=4 +# List of note tags to take in consideration, separated by a comma. +notes=FIXME, + XXX -# Expected format of line ending, e.g. empty (any line ending), LF or CRLF. -expected-line-ending-format= +# Regular expression of note tags to take in consideration. +notes-rgx= -[TYPECHECK] +[REFACTORING] -# Tells whether missing members accessed in mixin class should be ignored. A -# mixin class is detected if its name ends with "mixin" (case insensitive). -ignore-mixin-members=yes +# Maximum number of nested blocks for function / method body +max-nested-blocks=5 -# List of module names for which member attributes should not be checked -# (useful for modules/projects where namespaces are manipulated during runtime -# and thus existing member attributes cannot be deduced by static analysis. It -# supports qualified module names, as well as Unix pattern matching. -ignored-modules=msgspec +# Complete name of functions that never returns. When checking for +# inconsistent-return-statements if a never returning function is called then +# it will be considered as an explicit return statement and no message will be +# printed. +never-returning-functions=sys.exit,argparse.parse_error -# List of classes names for which member attributes should not be checked -# (useful for classes with attributes dynamically set). This supports can work -# with qualified names. -ignored-classes= +# Let 'consider-using-join' be raised when the separator to join on would be +# non-empty (resulting in expected fixes of the type: ``"- " + " - +# ".join(items)``) +suggest-join-with-non-empty-separator=yes -# List of members which are set dynamically and missed by pylint inference -# system, and so shouldn't trigger E1101 when accessed. Python regular -# expressions are accepted. -generated-members= +[REPORTS] -[VARIABLES] +# Python expression which should return a score less than or equal to 10. You +# have access to the variables 'fatal', 'error', 'warning', 'refactor', +# 'convention', and 'info' which contain the number of messages in each +# category, as well as 'statement' which is the total number of statements +# analyzed. This score is used by the global evaluation report (RP0004). +evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10) -# Tells whether we should check for unused import in __init__ files. -init-import=no +# Template used to display messages. This is a python new-style format string +# used to format the message information. See doc for all details. +msg-template= -# A regular expression matching the name of dummy variables (i.e. expectedly -# not used). -dummy-variables-rgx=_$|dummy +# Set the output format. Available formats are: text, parseable, colorized, +# json2 (improved json format), json (old json format) and msvs (visual +# studio). You can also give a reporter class, e.g. +# mypackage.mymodule.MyReporterClass. +#output-format= -# List of additional names supposed to be defined in builtins. Remember that -# you should avoid to define new builtins when possible. -additional-builtins= +# Tells whether to display a full report or only the messages. +reports=no -# List of strings which can identify a callback function by name. A callback -# name must start or end with one of those strings. -callbacks=cb_,_cb +# Activate the evaluation score. +score=yes [SIMILARITIES] -# Minimum lines number of a similarity. -min-similarity-lines=4 - -# Ignore comments when computing similarities. +# Comments are removed from the similarity computation ignore-comments=yes -# Ignore docstrings when computing similarities. +# Docstrings are removed from the similarity computation ignore-docstrings=yes -# Ignore imports when computing similarities. +# Imports are removed from the similarity computation ignore-imports=no +# Signatures are removed from the similarity computation +ignore-signatures=yes + +# Minimum lines number of a similarity. +min-similarity-lines=4 + [SPELLING] -# Spelling dictionary name. Available dictionaries: none. To make it working -# install python-enchant package. +# Limits count of emitted suggestions for spelling mistakes. +max-spelling-suggestions=4 + +# Spelling dictionary name. No available dictionaries : You need to install +# both the python package and the system dependency for enchant to work. spelling-dict= +# List of comma separated words that should be considered directives if they +# appear at the beginning of a comment and should not be checked. +spelling-ignore-comment-directives=fmt: on,fmt: off,noqa:,noqa,nosec,isort:skip,mypy: + # List of comma separated words that should not be checked. spelling-ignore-words= -# A path to a file that contains private dictionary; one word per line. +# A path to a file that contains the private dictionary; one word per line. spelling-private-dict-file= -# Tells whether to store unknown words to indicated private dictionary in -# --spelling-private-dict-file option instead of raising a message. +# Tells whether to store unknown words to the private dictionary (see the +# --spelling-private-dict-file option) instead of raising a message. spelling-store-unknown-words=no -[MISCELLANEOUS] - -# List of note tags to take in consideration, separated by a comma. -notes=FIXME,XXX - - -[BASIC] - -# List of builtins function names that should not be used, separated by a comma -bad-functions=map,filter,input - -# Good variable names which should always be accepted, separated by a comma -good-names=i,j,k,ex,Run,_ - -# Bad variable names which should always be refused, separated by a comma -bad-names=foo,bar,baz,toto,tutu,tata - -# Colon-delimited sets of names that determine each other's naming style when -# the name regexes allow several styles. -name-group= - -# Include a hint for the correct naming format with invalid-name -include-naming-hint=no - -# Regular expression matching correct function names -function-rgx=[a-z_][a-z0-9_]{2,40}$ - -# Naming hint for function names -function-name-hint=[a-z_][a-z0-9_]{2,40}$ - -# Regular expression matching correct variable names -variable-rgx=[a-z_][a-z0-9_]{2,30}$ - -# Naming hint for variable names -variable-name-hint=[a-z_][a-z0-9_]{2,30}$ - -# Regular expression matching correct constant names -const-rgx=(([A-Z_][A-Z0-9_]*)|(__.*__))$ - -# Naming hint for constant names -const-name-hint=(([A-Z_][A-Z0-9_]*)|(__.*__))$ - -# Regular expression matching correct attribute names -attr-rgx=[a-z_][a-z0-9_]{2,30}$ - -# Naming hint for attribute names -attr-name-hint=[a-z_][a-z0-9_]{2,30}$ +[STRING] -# Regular expression matching correct argument names -argument-rgx=[a-z_][a-z0-9_]{2,30}$ +# This flag controls whether inconsistent-quotes generates a warning when the +# character used as a quote delimiter is used inconsistently within a module. +check-quote-consistency=no -# Naming hint for argument names -argument-name-hint=[a-z_][a-z0-9_]{2,30}$ - -# Regular expression matching correct class attribute names -class-attribute-rgx=([A-Za-z_][A-Za-z0-9_]{2,30}|(__.*__))$ - -# Naming hint for class attribute names -class-attribute-name-hint=([A-Za-z_][A-Za-z0-9_]{2,30}|(__.*__))$ - -# Regular expression matching correct inline iteration names -inlinevar-rgx=[A-Za-z_][A-Za-z0-9_]*$ - -# Naming hint for inline iteration names -inlinevar-name-hint=[A-Za-z_][A-Za-z0-9_]*$ - -# Regular expression matching correct class names -class-rgx=[A-Z_][a-zA-Z0-9]+$ - -# Naming hint for class names -class-name-hint=[A-Z_][a-zA-Z0-9]+$ - -# Regular expression matching correct module names -module-rgx=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$ +# This flag controls whether the implicit-str-concat should generate a warning +# on implicit string concatenation in sequences defined over several lines. +check-str-concat-over-line-jumps=no -# Naming hint for module names -module-name-hint=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$ -# Regular expression matching correct method names -method-rgx=[a-z_][a-z0-9_]{2,80}$ +[TYPECHECK] -# Naming hint for method names -method-name-hint=[a-z_][a-z0-9_]{2,80}$ +# List of decorators that produce context managers, such as +# contextlib.contextmanager. Add to this list to register other decorators that +# produce valid context managers. +contextmanager-decorators=contextlib.contextmanager -# Regular expression which should only match function or class names that do -# not require a docstring. -no-docstring-rgx=^_ +# List of members which are set dynamically and missed by pylint inference +# system, and so shouldn't trigger E1101 when accessed. Python regular +# expressions are accepted. +generated-members= -# Minimum line length for functions/classes that require docstrings, shorter -# ones are exempt. -docstring-min-length=-1 +# Tells whether to warn about missing members when the owner of the attribute +# is inferred to be None. +ignore-none=yes + +# This flag controls whether pylint should warn about no-member and similar +# checks whenever an opaque object is returned when inferring. The inference +# can return multiple potential results while evaluating a Python object, but +# some branches might not be evaluated, which results in partial inference. In +# that case, it might be useful to still emit no-member and other checks for +# the rest of the inferred objects. +ignore-on-opaque-inference=yes + +# List of symbolic message names to ignore for Mixin members. +ignored-checks-for-mixins=no-member, + not-async-context-manager, + not-context-manager, + attribute-defined-outside-init + +# List of class names for which member attributes should not be checked (useful +# for classes with dynamically set attributes). This supports the use of +# qualified names. +ignored-classes= +# Show a hint with possible names when a member name was not found. The aspect +# of finding the hint is based on edit distance. +missing-member-hint=yes -[ELIF] +# The minimum edit distance a name should have in order to be considered a +# similar match for a missing member name. +missing-member-hint-distance=1 -# Maximum number of nested blocks for function / method body -max-nested-blocks=5 +# The total number of similar names that should be taken in consideration when +# showing a hint for a missing member. +missing-member-max-choices=1 +# Regex pattern to define which classes are considered mixins. +mixin-class-rgx=.*[Mm]ixin -[IMPORTS] +# List of decorators that change the signature of a decorated function. +signature-mutators= -# Deprecated modules which should not be used, separated by a comma -deprecated-modules=regsub,TERMIOS,Bastion,rexec -# Create a graph of every (i.e. internal and external) dependencies in the -# given file (report RP0402 must not be disabled) -import-graph= +[VARIABLES] -# Create a graph of external dependencies in the given file (report RP0402 must -# not be disabled) -ext-import-graph= +# List of additional names supposed to be defined in builtins. Remember that +# you should avoid defining new builtins when possible. +additional-builtins= -# Create a graph of internal dependencies in the given file (report RP0402 must -# not be disabled) -int-import-graph= +# Tells whether unused global variables should be treated as a violation. +allow-global-unused-variables=yes +# List of names allowed to shadow builtins +allowed-redefined-builtins= -[DESIGN] +# List of strings which can identify a callback function by name. A callback +# name must start or end with one of those strings. +callbacks=cb_, + _cb -# Maximum number of arguments for function / method -max-args=7 +# A regular expression matching the name of dummy variables (i.e. expected to +# not be used). +dummy-variables-rgx=_$|dummy -# Argument names that match this expression will be ignored. Default to name -# with leading underscore +# Argument names that match this expression will be ignored. ignored-argument-names=_.* -# Maximum number of locals for function / method body -max-locals=15 - -# Maximum number of return / yield for function / method body -max-returns=6 - -# Maximum number of branch for function / method body -max-branches=12 - -# Maximum number of statements in function / method body -max-statements=50 - -# Maximum number of parents for a class (see R0901). -max-parents=7 - -# Maximum number of attributes for a class (see R0902). -max-attributes=7 - -# Minimum number of public methods for a class (see R0903). -min-public-methods=2 - -# Maximum number of public methods for a class (see R0904). -max-public-methods=20 - -# Maximum number of boolean expressions in a if statement -max-bool-expr=5 - - -[CLASSES] - -# List of method names used to declare (i.e. assign) instance attributes. -defining-attr-methods=__init__,__new__,setUp - -# List of valid names for the first argument in a class method. -valid-classmethod-first-arg=cls - -# List of valid names for the first argument in a metaclass class method. -valid-metaclass-classmethod-first-arg=mcs - -# List of member names, which should be excluded from the protected access -# warning. -exclude-protected=_asdict,_fields,_replace,_source,_make - - -[EXCEPTIONS] +# Tells whether we should check for unused import in __init__ files. +init-import=no -# Exceptions that will emit a warning when being caught. Defaults to -# "Exception" -overgeneral-exceptions=Exception +# List of qualified module names which can have objects that can redefine +# builtins. +redefining-builtins-modules=six.moves,past.builtins,future.builtins,builtins,io diff --git a/setup.py b/setup.py index e674460..1f0c124 100644 --- a/setup.py +++ b/setup.py @@ -8,7 +8,7 @@ setup(name="pipelinewise-singer-python", version='3.0.0', description="Singer.io utility library - PipelineWise compatible", - python_requires=">=3.7.0, <3.12", + python_requires=">=3.8.0, <3.13", long_description=long_description, long_description_content_type="text/markdown", author="TransferWise", @@ -18,18 +18,18 @@ ], url="https://github.com/transferwise/pipelinewise-singer-python", install_requires=[ - 'pytz', - 'jsonschema==3.2.0', + 'pytz>=2018.4', + 'jsonschema>=4.19.2,==4.*', 'msgspec>=0.18.0', - 'python-dateutil>=2.6.0', - 'backoff==2.1.2', - 'ciso8601', + 'python-dateutil>2.7.3,==2.*', + 'backoff>=2.2.1,==2.*', + 'ciso8601>=2.3.1,==2.*', ], extras_require={ 'dev': [ - 'pylint==2.11.1', - 'pytest==7.1.2', - 'coverage[toml]~=6.3', + 'pylint==3.0.2,==3.*', + 'pytest>=7.1.2,==7.*', + 'coverage[toml]>=6.3,<8.0', 'ipython', 'ipdb', 'unify==0.5' diff --git a/singer/messages.py b/singer/messages.py index f81e09f..0be534c 100644 --- a/singer/messages.py +++ b/singer/messages.py @@ -3,6 +3,7 @@ import pytz import msgspec import ciso8601 +import decimal import singer.utils as u from .logger import get_logger @@ -247,7 +248,8 @@ def parse_message(msg): # lossy conversions. However, this will affect # very few data points and we have chosen to # leave conversion as is for now. - obj = msgspec.json.decode(msg) + dec = msgspec.json.Decoder(float_hook=decimal.Decimal) + obj = dec.decode(msg) msg_type = _required_key(obj, 'type') if msg_type == 'RECORD': diff --git a/singer/schema.py b/singer/schema.py index 0d2c66b..1dfe076 100644 --- a/singer/schema.py +++ b/singer/schema.py @@ -5,6 +5,7 @@ # These are standard keys defined in the JSON Schema spec STANDARD_KEYS = [ + 'title', 'selected', 'inclusion', 'description', @@ -17,9 +18,16 @@ 'minLength', 'format', 'type', + 'default', + 'required', + 'enum', + 'pattern', + 'contentMediaType', + 'contentEncoding', 'additionalProperties', 'anyOf', 'patternProperties', + 'allOf', ] @@ -32,13 +40,15 @@ class Schema(): # pylint: disable=too-many-instance-attributes ''' # pylint: disable=too-many-locals - def __init__(self, type=None, format=None, properties=None, items=None, + def __init__(self, type=None, default=None, format=None, properties=None, items=None, selected=None, inclusion=None, description=None, minimum=None, maximum=None, exclusiveMinimum=None, exclusiveMaximum=None, multipleOf=None, maxLength=None, minLength=None, additionalProperties=None, - anyOf=None, patternProperties=None): + anyOf=None, allOf=None, patternProperties=None, required=None, enum=None, + title=None, pattern=None, contentMediaType=None, contentEncoding=None): self.type = type + self.default = default self.properties = properties self.items = items self.selected = selected @@ -52,9 +62,16 @@ def __init__(self, type=None, format=None, properties=None, items=None, self.maxLength = maxLength self.minLength = minLength self.anyOf = anyOf + self.anyOf = allOf self.format = format self.additionalProperties = additionalProperties self.patternProperties = patternProperties + self.required = required + self.enum = enum + self.title = title + self.pattern = pattern + self.contentMediaType = contentMediaType + self.contentEncoding = contentEncoding def __str__(self): return msgspec.json.encode(self.to_dict()).decode('utf-8') diff --git a/singer/transform.py b/singer/transform.py index f117570..7446023 100644 --- a/singer/transform.py +++ b/singer/transform.py @@ -1,7 +1,9 @@ import datetime import logging import re -from jsonschema import RefResolver +#from jsonschema import RefResolver +from referencing import Registry +from referencing.jsonschema import DRAFT202012 import singer.metadata from singer.logger import get_logger @@ -53,6 +55,7 @@ class SchemaKey: properties = 'properties' pattern_properties = 'patternProperties' any_of = 'anyOf' + all_of = 'allOf' class Error: def __init__(self, path, data, schema=None, logging_level=logging.INFO): @@ -351,14 +354,24 @@ def resolve_schema_references(schema, refs=None): Returns: schema ''' +# refs = refs or {} +# return _resolve_schema_references(schema, RefResolver('', schema, store=refs)) refs = refs or {} - return _resolve_schema_references(schema, RefResolver('', schema, store=refs)) + registry: Registry = Registry() + schema_resource = DRAFT202012.create_resource(schema) + registry = registry.with_resource("", schema_resource) + registry = registry.with_resources( + [(k, DRAFT202012.create_resource(v)) for k, v in refs.items()] + ) + + resolver = registry.resolver() + return _resolve_schema_references(schema, resolver) def _resolve_schema_references(schema, resolver): if SchemaKey.ref in schema: reference_path = schema.pop(SchemaKey.ref, None) - resolved = resolver.resolve(reference_path)[1] - schema.update(resolved) + resolved = resolver.lookup(reference_path) + schema.update(resolved.contents) return _resolve_schema_references(schema, resolver) if SchemaKey.properties in schema: @@ -376,4 +389,8 @@ def _resolve_schema_references(schema, resolver): for i, element in enumerate(schema[SchemaKey.any_of]): schema[SchemaKey.any_of][i] = _resolve_schema_references(element, resolver) + if SchemaKey.all_of in schema: + for i, element in enumerate(schema[SchemaKey.all_of]): + schema[SchemaKey.all_of][i] = _resolve_schema_references(element, resolver) + return schema diff --git a/tests/test_catalog.py b/tests/test_catalog.py index cd6dc50..8a72e1a 100644 --- a/tests/test_catalog.py +++ b/tests/test_catalog.py @@ -25,7 +25,7 @@ def test_one_selected_stream(self): CatalogEntry(tap_stream_id='c',schema=Schema(),metadata=[])]) state = {} selected_streams = catalog.get_selected_streams(state) - self.assertEquals([e for e in selected_streams],[selected_entry]) + self.assertEqual([e for e in selected_streams],[selected_entry]) def test_resumes_currently_syncing_stream(self): selected_entry_a = CatalogEntry(tap_stream_id='a', @@ -44,7 +44,7 @@ def test_resumes_currently_syncing_stream(self): selected_entry_c]) state = {'currently_syncing': 'c'} selected_streams = catalog.get_selected_streams(state) - self.assertEquals([e for e in selected_streams][0],selected_entry_c) + self.assertEqual([e for e in selected_streams][0],selected_entry_c) class TestToDictAndFromDict(unittest.TestCase): @@ -141,4 +141,4 @@ def test(self): CatalogEntry(tap_stream_id='b'), CatalogEntry(tap_stream_id='c')]) entry = catalog.get_stream('b') - self.assertEquals('b', entry.tap_stream_id) + self.assertEqual('b', entry.tap_stream_id) diff --git a/tests/test_schema.py b/tests/test_schema.py index bf9edc6..abf7abd 100644 --- a/tests/test_schema.py +++ b/tests/test_schema.py @@ -44,41 +44,41 @@ class TestSchema(unittest.TestCase): additionalProperties=True) def test_to_string(self): - self.assertEquals('{"maxLength":32,"type":"string"}', str(self.string_obj)) + self.assertEqual('{"maxLength":32,"type":"string"}', str(self.string_obj)) def test_string_to_dict(self): - self.assertEquals(self.string_dict, self.string_obj.to_dict()) + self.assertEqual(self.string_dict, self.string_obj.to_dict()) def test_integer_to_dict(self): - self.assertEquals(self.integer_dict, self.integer_obj.to_dict()) + self.assertEqual(self.integer_dict, self.integer_obj.to_dict()) def test_array_to_dict(self): - self.assertEquals(self.array_dict, self.array_obj.to_dict()) + self.assertEqual(self.array_dict, self.array_obj.to_dict()) def test_object_to_dict(self): - self.assertEquals(self.object_dict, self.object_obj.to_dict()) + self.assertEqual(self.object_dict, self.object_obj.to_dict()) def test_string_from_dict(self): - self.assertEquals(self.string_obj, Schema.from_dict(self.string_dict)) + self.assertEqual(self.string_obj, Schema.from_dict(self.string_dict)) def test_integer_from_dict(self): - self.assertEquals(self.integer_obj, Schema.from_dict(self.integer_dict)) + self.assertEqual(self.integer_obj, Schema.from_dict(self.integer_dict)) def test_array_from_dict(self): - self.assertEquals(self.array_obj, Schema.from_dict(self.array_dict)) + self.assertEqual(self.array_obj, Schema.from_dict(self.array_dict)) def test_object_from_dict(self): - self.assertEquals(self.object_obj, Schema.from_dict(self.object_dict)) + self.assertEqual(self.object_obj, Schema.from_dict(self.object_dict)) def test_repr_atomic(self): - self.assertEquals(self.string_obj, eval(repr(self.string_obj))) + self.assertEqual(self.string_obj, eval(repr(self.string_obj))) def test_repr_recursive(self): - self.assertEquals(self.object_obj, eval(repr(self.object_obj))) + self.assertEqual(self.object_obj, eval(repr(self.object_obj))) def test_object_from_dict_with_defaults(self): schema = Schema.from_dict(self.object_dict, inclusion='automatic') - self.assertEquals('whatever', schema.inclusion, + self.assertEqual('whatever', schema.inclusion, msg='The schema value should override the default') - self.assertEquals('automatic', schema.properties['a_string'].inclusion) - self.assertEquals('automatic', schema.properties['an_array'].items.inclusion) + self.assertEqual('automatic', schema.properties['a_string'].inclusion) + self.assertEqual('automatic', schema.properties['an_array'].items.inclusion) diff --git a/tests/test_singer.py b/tests/test_singer.py index a5b4309..3f1e76b 100644 --- a/tests/test_singer.py +++ b/tests/test_singer.py @@ -2,6 +2,7 @@ import msgspec import unittest import dateutil +import decimal class TestSinger(unittest.TestCase): @@ -152,20 +153,20 @@ def test_parse_int_zero(self): def test_parse_regular_decimal(self): value = self.create_record('3.14') - self.assertEqual(3.14, value) + self.assertEqual(decimal.Decimal('3.14'), value) def test_parse_large_decimal(self): value = self.create_record('9999999999999999.9999') - self.assertEqual(9999999999999999.9999, value) + self.assertEqual(decimal.Decimal('9999999999999999.9999'), value) def test_parse_small_decimal(self): value = self.create_record('-9999999999999999.9999') - self.assertEqual(-9999999999999999.9999, value) + self.assertEqual(decimal.Decimal('-9999999999999999.9999'), value) def test_parse_absurdly_large_decimal(self): value_str = '9' * 1024 + '.' + '9' * 1024 - with self.assertRaises(msgspec.ValidationError): - self.create_record(value_str) + value = self.create_record(value_str) + self.assertEqual(decimal.Decimal(value_str), value) def test_parse_absurdly_large_int(self): value_str = '9' * 1024 @@ -191,7 +192,7 @@ def test_parse_bulk_decs(self): ] for value_str in value_strs: value = self.create_record(value_str) - self.assertEqual(float(value_str), value) + self.assertEqual(decimal.Decimal(value_str), value) def test_format_message(self): record_message = singer.RecordMessage( From 2186be4df65a802341b95a878eeb468aacb556c8 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 27 Jun 2024 19:17:05 +1200 Subject: [PATCH 11/31] Bump actions/setup-python from 4 to 5 (#21) Bumps [actions/setup-python](https://github.com/actions/setup-python) from 4 to 5. - [Release notes](https://github.com/actions/setup-python/releases) - [Commits](https://github.com/actions/setup-python/compare/v4...v5) --- updated-dependencies: - dependency-name: actions/setup-python dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/main.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index f2d7707..0751032 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -23,7 +23,7 @@ jobs: uses: actions/checkout@v4 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} @@ -56,7 +56,7 @@ jobs: uses: actions/checkout@v4 - name: Set up Python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: "3.8" From 3789834081d7058778d275af407b9bcf7e390d06 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 1 Jul 2024 08:23:22 +1200 Subject: [PATCH 12/31] Bump irongut/CodeCoverageSummary from 1.2.0 to 1.3.0 (#22) Bumps [irongut/CodeCoverageSummary](https://github.com/irongut/codecoveragesummary) from 1.2.0 to 1.3.0. - [Release notes](https://github.com/irongut/codecoveragesummary/releases) - [Commits](https://github.com/irongut/codecoveragesummary/compare/v1.2.0...v1.3.0) --- updated-dependencies: - dependency-name: irongut/CodeCoverageSummary dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/main.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 0751032..c49ae23 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -84,7 +84,7 @@ jobs: coverage report --fail-under=75 - name: Code Coverage Summary Report - uses: irongut/CodeCoverageSummary@v1.2.0 + uses: irongut/CodeCoverageSummary@v1.3.0 with: filename: coverage.xml badge: true From dd3b60f8c129ba94bd3e229f17447b90b9512318 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 1 Jul 2024 08:29:09 +1200 Subject: [PATCH 13/31] Update pytest requirement from ==7.*,>=7.1.2 to >=7,<9 (#20) Updates the requirements on [pytest](https://github.com/pytest-dev/pytest) to permit the latest version. - [Release notes](https://github.com/pytest-dev/pytest/releases) - [Changelog](https://github.com/pytest-dev/pytest/blob/main/CHANGELOG.rst) - [Commits](https://github.com/pytest-dev/pytest/compare/7.1.2...8.2.2) --- updated-dependencies: - dependency-name: pytest dependency-type: direct:development ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 1f0c124..927f84c 100644 --- a/setup.py +++ b/setup.py @@ -28,7 +28,7 @@ extras_require={ 'dev': [ 'pylint==3.0.2,==3.*', - 'pytest>=7.1.2,==7.*', + 'pytest>=7,<9', 'coverage[toml]>=6.3,<8.0', 'ipython', 'ipdb', From 554fe14fefa6616ce3f3b77f472c2c7e6c401208 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 1 Jul 2024 08:32:35 +1200 Subject: [PATCH 14/31] Update pylint requirement from ==3.*,==3.0.2 to ==3.2.5 (#23) Updates the requirements on [pylint](https://github.com/pylint-dev/pylint) to permit the latest version. - [Release notes](https://github.com/pylint-dev/pylint/releases) - [Commits](https://github.com/pylint-dev/pylint/compare/v3.0.2...v3.2.5) --- updated-dependencies: - dependency-name: pylint dependency-type: direct:development ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 927f84c..857d32b 100644 --- a/setup.py +++ b/setup.py @@ -27,7 +27,7 @@ ], extras_require={ 'dev': [ - 'pylint==3.0.2,==3.*', + 'pylint==3.2.5', 'pytest>=7,<9', 'coverage[toml]>=6.3,<8.0', 'ipython', From 59efc29c6ba8a3f037887e9675f92ad93a66c4c4 Mon Sep 17 00:00:00 2001 From: Steve Clarke <84364906+s7clarke10@users.noreply.github.com> Date: Mon, 8 Jul 2024 18:57:49 +1200 Subject: [PATCH 15/31] Feature/setup to pyproject (#24) * Moving to pyproject and poetry * Reformatting and adding tox tests * Adding testing for different python versions * Updating github actions to run tox * Tweaking pipeline to use poetry * Updating Documentation --------- Co-authored-by: Steve Clarke --- .github/workflows/main.yml | 20 +- CHANGELOG.md | 10 + CONTRIBUTING.md | 2 +- Makefile | 2 +- README.md | 36 +- poetry.lock | 1319 ++++++++++++++++++++++++++++++++++++ pyproject.toml | 60 ++ setup.py | 45 -- singer/__init__.py | 80 +-- singer/bookmarks.py | 44 +- singer/catalog.py | 97 +-- singer/logger.py | 10 +- singer/messages.py | 261 ++++--- singer/metadata.py | 48 +- singer/metrics.py | 98 +-- singer/requests.py | 6 +- singer/schema.py | 123 ++-- singer/statediff.py | 20 +- singer/transform.py | 233 ++++--- singer/utils.py | 91 +-- tests/test_bookmarks.py | 176 +++-- tests/test_catalog.py | 219 +++--- tests/test_metadata.py | 382 ++++++----- tests/test_metrics.py | 120 +++- tests/test_schema.py | 65 +- tests/test_singer.py | 237 ++++--- tests/test_statediff.py | 95 +-- tests/test_transform.py | 644 ++++++++++++------ tests/test_utils.py | 16 +- tox.ini | 33 + 30 files changed, 3298 insertions(+), 1294 deletions(-) create mode 100644 poetry.lock delete mode 100644 setup.py create mode 100644 tox.ini diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index c49ae23..9db9d47 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -29,14 +29,14 @@ jobs: - name: Install and upgrade dependencies run: | - python -m pip install -U pip setuptools wheel - python3 -m pip install -U .[dev] + python -m pip install -U pip poetry + python3 -m poetry install - - name: Analysing the code with pylint - run: pylint singer + - name: Analysing the code with tox tests + run: poetry run tox -e py - name: Runs tests with coverage - run: coverage run --parallel -m pytest + run: poetry run coverage run --parallel -m pytest - name: Upload coverage data uses: actions/upload-artifact@v4 @@ -62,8 +62,8 @@ jobs: - name: Install and upgrade dependencies run: | - python -m pip install -U pip setuptools wheel - python3 -m pip install -U .[dev] + python -m pip install -U pip poetry + python3 -m poetry install - name: Download coverage data uses: actions/download-artifact@v4 @@ -73,15 +73,15 @@ jobs: - name: Combine coverage data run: | - coverage combine + poetry run coverage combine - name: Generate XML coverage report run: | - coverage xml --fail-under=75 + poetry run coverage xml --fail-under=75 - name: Display human readable report run: | - coverage report --fail-under=75 + poetry run coverage report --fail-under=75 - name: Code Coverage Summary Report uses: irongut/CodeCoverageSummary@v1.3.0 diff --git a/CHANGELOG.md b/CHANGELOG.md index 5a9fbad..6df03f0 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,15 @@ # Changelog +## 4.0.0 (2024-07-08) + * Moving from setup.py to pyproject.toml and poetry for installation + * Introducing tox for platform independent testing of pep8 compliance. + * Introducing black for formatting + * Introducing isort for compliant and order includes + * Introducing flake8 for linting alongside pylint + * Introducing mypy for type checking + * Updating github actions to run tox tests rather than directly running pytests. + * Code formatting updates and changes to pass / met the standards above. + ## 3.0.0 (2023-08-23) * Using msgspec instead of orjson or other serializers for speed benefit * Support for Python 3.12 diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 1dc7443..97a004f 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -4,5 +4,5 @@ 1. Install and use [pre-commit](https://pre-commit.com/) to keep your changes in the style of the project. 2. Write tests to cover any new code or code changes. -3. Please make sure that all tests pass and that the code passes linting with `make`. +3. Please make sure that all tests pass and that the code passes linting with `poetry run tox`. 4. Open up the PR. diff --git a/Makefile b/Makefile index 3689877..3aecb65 100644 --- a/Makefile +++ b/Makefile @@ -2,7 +2,7 @@ check_prereqs: bash -c '[[ -n $$VIRTUAL_ENV ]]' - bash -c '[[ $$(python3 --version) == *3.[5-7]* ]]' + bash -c '[[ $$(python3 --version) == *3.[8-12]* ]]' install: check_prereqs python3 -m pip install -e '.[dev]' diff --git a/README.md b/README.md index 63800f8..d4d1eb2 100644 --- a/README.md +++ b/README.md @@ -25,7 +25,7 @@ Next, install this library: source ~/.virtualenvs/singer-python/bin/activate git clone http://github.com/singer-io/singer-python cd singer-python -make install +pip install ``` ### Usage example @@ -65,6 +65,40 @@ information to correctly replicate decimal data without loss. For the Floats and } ``` +## Developer Resources + +### Initialize your Development Environment + +```bash +pip install poetry +poetry install +``` + +### Create and Run Tests + +Create tests within the `tests/` directory and +then run: + +```bash +poetry run pytest +``` + +or + +```bash +poetry run coverage run --parallel -m pytest +``` + +### Continuous Integration +Run through the full suite of tests and linters by running + +```bash +poetry run tox +``` + +These must pass in order for PR's to be merged. + + License ------- diff --git a/poetry.lock b/poetry.lock new file mode 100644 index 0000000..a389ac9 --- /dev/null +++ b/poetry.lock @@ -0,0 +1,1319 @@ +# This file is automatically @generated by Poetry 1.5.1 and should not be changed by hand. + +[[package]] +name = "appnope" +version = "0.1.4" +description = "Disable App Nap on macOS >= 10.9" +optional = false +python-versions = ">=3.6" +files = [ + {file = "appnope-0.1.4-py2.py3-none-any.whl", hash = "sha256:502575ee11cd7a28c0205f379b525beefebab9d161b7c964670864014ed7213c"}, + {file = "appnope-0.1.4.tar.gz", hash = "sha256:1de3860566df9caf38f01f86f65e0e13e379af54f9e4bee1e66b48f2efffd1ee"}, +] + +[[package]] +name = "astroid" +version = "3.2.2" +description = "An abstract syntax tree for Python with inference support." +optional = false +python-versions = ">=3.8.0" +files = [ + {file = "astroid-3.2.2-py3-none-any.whl", hash = "sha256:e8a0083b4bb28fcffb6207a3bfc9e5d0a68be951dd7e336d5dcf639c682388c0"}, + {file = "astroid-3.2.2.tar.gz", hash = "sha256:8ead48e31b92b2e217b6c9733a21afafe479d52d6e164dd25fb1a770c7c3cf94"}, +] + +[package.dependencies] +typing-extensions = {version = ">=4.0.0", markers = "python_version < \"3.11\""} + +[[package]] +name = "asttokens" +version = "2.4.1" +description = "Annotate AST trees with source code positions" +optional = false +python-versions = "*" +files = [ + {file = "asttokens-2.4.1-py2.py3-none-any.whl", hash = "sha256:051ed49c3dcae8913ea7cd08e46a606dba30b79993209636c4875bc1d637bc24"}, + {file = "asttokens-2.4.1.tar.gz", hash = "sha256:b03869718ba9a6eb027e134bfdf69f38a236d681c83c160d510768af11254ba0"}, +] + +[package.dependencies] +six = ">=1.12.0" + +[package.extras] +astroid = ["astroid (>=1,<2)", "astroid (>=2,<4)"] +test = ["astroid (>=1,<2)", "astroid (>=2,<4)", "pytest"] + +[[package]] +name = "attrs" +version = "23.2.0" +description = "Classes Without Boilerplate" +optional = false +python-versions = ">=3.7" +files = [ + {file = "attrs-23.2.0-py3-none-any.whl", hash = "sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1"}, + {file = "attrs-23.2.0.tar.gz", hash = "sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30"}, +] + +[package.extras] +cov = ["attrs[tests]", "coverage[toml] (>=5.3)"] +dev = ["attrs[tests]", "pre-commit"] +docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"] +tests = ["attrs[tests-no-zope]", "zope-interface"] +tests-mypy = ["mypy (>=1.6)", "pytest-mypy-plugins"] +tests-no-zope = ["attrs[tests-mypy]", "cloudpickle", "hypothesis", "pympler", "pytest (>=4.3.0)", "pytest-xdist[psutil]"] + +[[package]] +name = "backcall" +version = "0.2.0" +description = "Specifications for callback functions passed in to an API" +optional = false +python-versions = "*" +files = [ + {file = "backcall-0.2.0-py2.py3-none-any.whl", hash = "sha256:fbbce6a29f263178a1f7915c1940bde0ec2b2a967566fe1c65c1dfb7422bd255"}, + {file = "backcall-0.2.0.tar.gz", hash = "sha256:5cbdbf27be5e7cfadb448baf0aa95508f91f2bbc6c6437cd9cd06e2a4c215e1e"}, +] + +[[package]] +name = "backoff" +version = "2.2.1" +description = "Function decoration for backoff and retry" +optional = false +python-versions = ">=3.7,<4.0" +files = [ + {file = "backoff-2.2.1-py3-none-any.whl", hash = "sha256:63579f9a0628e06278f7e47b7d7d5b6ce20dc65c5e96a6f3ca99a6adca0396e8"}, + {file = "backoff-2.2.1.tar.gz", hash = "sha256:03f829f5bb1923180821643f8753b0502c3b682293992485b0eef2807afa5cba"}, +] + +[[package]] +name = "black" +version = "24.4.2" +description = "The uncompromising code formatter." +optional = false +python-versions = ">=3.8" +files = [ + {file = "black-24.4.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:dd1b5a14e417189db4c7b64a6540f31730713d173f0b63e55fabd52d61d8fdce"}, + {file = "black-24.4.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8e537d281831ad0e71007dcdcbe50a71470b978c453fa41ce77186bbe0ed6021"}, + {file = "black-24.4.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eaea3008c281f1038edb473c1aa8ed8143a5535ff18f978a318f10302b254063"}, + {file = "black-24.4.2-cp310-cp310-win_amd64.whl", hash = "sha256:7768a0dbf16a39aa5e9a3ded568bb545c8c2727396d063bbaf847df05b08cd96"}, + {file = "black-24.4.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:257d724c2c9b1660f353b36c802ccece186a30accc7742c176d29c146df6e474"}, + {file = "black-24.4.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:bdde6f877a18f24844e381d45e9947a49e97933573ac9d4345399be37621e26c"}, + {file = "black-24.4.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e151054aa00bad1f4e1f04919542885f89f5f7d086b8a59e5000e6c616896ffb"}, + {file = "black-24.4.2-cp311-cp311-win_amd64.whl", hash = "sha256:7e122b1c4fb252fd85df3ca93578732b4749d9be076593076ef4d07a0233c3e1"}, + {file = "black-24.4.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:accf49e151c8ed2c0cdc528691838afd217c50412534e876a19270fea1e28e2d"}, + {file = "black-24.4.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:88c57dc656038f1ab9f92b3eb5335ee9b021412feaa46330d5eba4e51fe49b04"}, + {file = "black-24.4.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:be8bef99eb46d5021bf053114442914baeb3649a89dc5f3a555c88737e5e98fc"}, + {file = "black-24.4.2-cp312-cp312-win_amd64.whl", hash = "sha256:415e686e87dbbe6f4cd5ef0fbf764af7b89f9057b97c908742b6008cc554b9c0"}, + {file = "black-24.4.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:bf10f7310db693bb62692609b397e8d67257c55f949abde4c67f9cc574492cc7"}, + {file = "black-24.4.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:98e123f1d5cfd42f886624d84464f7756f60ff6eab89ae845210631714f6db94"}, + {file = "black-24.4.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:48a85f2cb5e6799a9ef05347b476cce6c182d6c71ee36925a6c194d074336ef8"}, + {file = "black-24.4.2-cp38-cp38-win_amd64.whl", hash = "sha256:b1530ae42e9d6d5b670a34db49a94115a64596bc77710b1d05e9801e62ca0a7c"}, + {file = "black-24.4.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:37aae07b029fa0174d39daf02748b379399b909652a806e5708199bd93899da1"}, + {file = "black-24.4.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:da33a1a5e49c4122ccdfd56cd021ff1ebc4a1ec4e2d01594fef9b6f267a9e741"}, + {file = "black-24.4.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ef703f83fc32e131e9bcc0a5094cfe85599e7109f896fe8bc96cc402f3eb4b6e"}, + {file = "black-24.4.2-cp39-cp39-win_amd64.whl", hash = "sha256:b9176b9832e84308818a99a561e90aa479e73c523b3f77afd07913380ae2eab7"}, + {file = "black-24.4.2-py3-none-any.whl", hash = "sha256:d36ed1124bb81b32f8614555b34cc4259c3fbc7eec17870e8ff8ded335b58d8c"}, + {file = "black-24.4.2.tar.gz", hash = "sha256:c872b53057f000085da66a19c55d68f6f8ddcac2642392ad3a355878406fbd4d"}, +] + +[package.dependencies] +click = ">=8.0.0" +mypy-extensions = ">=0.4.3" +packaging = ">=22.0" +pathspec = ">=0.9.0" +platformdirs = ">=2" +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} +typing-extensions = {version = ">=4.0.1", markers = "python_version < \"3.11\""} + +[package.extras] +colorama = ["colorama (>=0.4.3)"] +d = ["aiohttp (>=3.7.4)", "aiohttp (>=3.7.4,!=3.9.0)"] +jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] +uvloop = ["uvloop (>=0.15.2)"] + +[[package]] +name = "cachetools" +version = "5.3.3" +description = "Extensible memoizing collections and decorators" +optional = false +python-versions = ">=3.7" +files = [ + {file = "cachetools-5.3.3-py3-none-any.whl", hash = "sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945"}, + {file = "cachetools-5.3.3.tar.gz", hash = "sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105"}, +] + +[[package]] +name = "chardet" +version = "5.2.0" +description = "Universal encoding detector for Python 3" +optional = false +python-versions = ">=3.7" +files = [ + {file = "chardet-5.2.0-py3-none-any.whl", hash = "sha256:e1cf59446890a00105fe7b7912492ea04b6e6f06d4b742b2c788469e34c82970"}, + {file = "chardet-5.2.0.tar.gz", hash = "sha256:1b3b6ff479a8c414bc3fa2c0852995695c4a026dcd6d0633b2dd092ca39c1cf7"}, +] + +[[package]] +name = "ciso8601" +version = "2.3.1" +description = "Fast ISO8601 date time parser for Python written in C" +optional = false +python-versions = "*" +files = [ + {file = "ciso8601-2.3.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:57db9a28e87f9e4fccba643fb70a9ba1515adc5e1325508eb2c10dd96620314c"}, + {file = "ciso8601-2.3.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8c59646197ddbf84909b6c31d55f744cfeef51811e3910b61d0f58f2885823fd"}, + {file = "ciso8601-2.3.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6a25da209193134842cd573464a5323f46fcc3ed781b633f15a34793ba7e1064"}, + {file = "ciso8601-2.3.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3ae83f4e60fc7e260a4188e4ec4ac1bdd40bdb382eeda92fc266c5aa2f0a1ee"}, + {file = "ciso8601-2.3.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:2c1ef17d1ea52a39b2dce6535583631ae4bfb65c76f0ee8c99413a6861a46c9e"}, + {file = "ciso8601-2.3.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:3771049ba29bd1077588c0a24be1d53f7493e7cc686b2caa92f7cae129636a0e"}, + {file = "ciso8601-2.3.1-cp310-cp310-win_amd64.whl", hash = "sha256:55381365366dacb57207cec610d26c9a6c0d237cb65a0cf67a2baaa5299f2366"}, + {file = "ciso8601-2.3.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9f25647803c9a5aaaed130c53bbec7ea06a4f95ba5c7016f59e444b4ef7ac39e"}, + {file = "ciso8601-2.3.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:473288cd63efe6a2cf3f4b5f90394e53095358ccb13d6128f87a2da85d0f389b"}, + {file = "ciso8601-2.3.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:121d27c55f4455eaa27ba3bd602beca915df9a352f235e935636a4660321070e"}, + {file = "ciso8601-2.3.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ef44cb4dc83f37019a356c7a72692cbe17072456f4879ca6bc0339f67eee5d00"}, + {file = "ciso8601-2.3.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:364702e338212b6c1a8643d9399ada21560cf132f363853473560625cb4207f1"}, + {file = "ciso8601-2.3.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:8acb45545e6a654310c6ef788aacb2d73686646c414ceacdd9f5f78a83165af5"}, + {file = "ciso8601-2.3.1-cp311-cp311-win_amd64.whl", hash = "sha256:99addd8b113f85fac549167073f317a318cd2b5841552598ceb97b97c5708a38"}, + {file = "ciso8601-2.3.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:f39bb5936debf21c52e5d52b89f26857c303da80c43a72883946096a6ef5e561"}, + {file = "ciso8601-2.3.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:21cf83ca945bb26ecd95364ae2c9ed0276378e5fe35ce1b64d4c6d5b33038ea3"}, + {file = "ciso8601-2.3.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:013410263cba46748d2de29e9894341ae41223356cde7970478c32bd0984d10c"}, + {file = "ciso8601-2.3.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b26935687ef1837b56997d8c61f1d789e698be58b261410e629eda9c89812141"}, + {file = "ciso8601-2.3.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:0d980a2a88030d4d8b2434623c250866a75b4979d289eba69bec445c51ace99f"}, + {file = "ciso8601-2.3.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:87721de54e008fb1c4c3978553b05a9c417aa25b76ddf5702d6f7e8d9b109288"}, + {file = "ciso8601-2.3.1-cp312-cp312-win_amd64.whl", hash = "sha256:9f107a4c051e7c0416824279264d94f4ed3da0fbd82bd96ec3c3293426826de4"}, + {file = "ciso8601-2.3.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:02ecbd7c8336c4e1c6bb725b898e29414ee92bdc0be6c72fb07036836b1ac867"}, + {file = "ciso8601-2.3.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:36525b1f380f4601533f4631c69911e44efb9cb50beab1da3248b0daa32bced4"}, + {file = "ciso8601-2.3.1-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:874d20c6339e9096baaadfd1b9610bb8d5b373a0f2858cc06de8142b98d2129c"}, + {file = "ciso8601-2.3.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:46a3663c2cf838f0149e1cdb8e4bdc95716e03cf2d5f803a6eb755d825896ebe"}, + {file = "ciso8601-2.3.1-cp36-cp36m-win_amd64.whl", hash = "sha256:e8e76825f80ce313d75bbbef1d3b8bd9e0ce31dbc157d1981e9593922c9983e7"}, + {file = "ciso8601-2.3.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:6850889813f3135e0aa18f0aaec64249dd81d36a1b9bce60bb45182930c86663"}, + {file = "ciso8601-2.3.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2c690ac24ec3407f68cdfd5e032c6cb18126ef33d6c4b3db0669b9cbb8c96bd4"}, + {file = "ciso8601-2.3.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:024c52d5d0670f15ca3dc53eff7345b6eaee22fba929675f6a408f9d1e159d98"}, + {file = "ciso8601-2.3.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:e7ae2c3442d042de5330672d0d28486ed92f9d7c6dc010943aa618fd361d4638"}, + {file = "ciso8601-2.3.1-cp37-cp37m-win_amd64.whl", hash = "sha256:22128f0def36fa3c4cf0c482a216e8b8ad722def08bc11c07438eff82bdcd02a"}, + {file = "ciso8601-2.3.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:025859ec286a994aa3f2120c0f27d053b719cabc975398338374f2cc1f961125"}, + {file = "ciso8601-2.3.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2a64ff58904d4418d60fa9619014ae820ae21f7aef58da46df78a4c647f951ec"}, + {file = "ciso8601-2.3.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:d1f85c0b7fa742bbfd18177137ccbaa3f867dd06157f91595075bb959a733048"}, + {file = "ciso8601-2.3.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4ac59453664781dfddebee51f9a36e41819993823fdb09ddc0ce0e4bd3ff0c3"}, + {file = "ciso8601-2.3.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:eaecca7e0c3ef9e8f5e963e212b083684e849f9a9bb25834d3042363223a73cd"}, + {file = "ciso8601-2.3.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:ad8f417c45eea973a694599b96f40d841215bfee352cb9963383e8d66b309981"}, + {file = "ciso8601-2.3.1-cp38-cp38-win_amd64.whl", hash = "sha256:b869396e9756a7c0696d8eb69ce1d8980bea5e25c86e5996b10d78c900a4362c"}, + {file = "ciso8601-2.3.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7eb7b5ef8714d3d1fe9f3256b7a679ad783da899a0b7503a5ace78186735f840"}, + {file = "ciso8601-2.3.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:02828107880848ff497971ebc98e6dc851ad7af8ec14a58089e0e11f3111cad6"}, + {file = "ciso8601-2.3.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:566b4a8b2f9717e54ffcdd732a7c8051a91da30a60a4f1dafb62e303a1dbac69"}, + {file = "ciso8601-2.3.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58a749d63f28c2eda71416c9d6014113b0748abf5fd14c502b01bd515502fedf"}, + {file = "ciso8601-2.3.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:cb135de0e3b8feb7e74a4f7a234e8c8545957fe8d26316a1a549553f425c629d"}, + {file = "ciso8601-2.3.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:695583810836a42945084b33621b22b0309701c6916689f6a3588fa44c5bc413"}, + {file = "ciso8601-2.3.1-cp39-cp39-win_amd64.whl", hash = "sha256:21204d98496cf5c0511dc21533be55c2a2d34b8c65603946a116812ffbae3b2d"}, + {file = "ciso8601-2.3.1-pp310-pypy310_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c29ea2b03dee2dc0a5d3e4a0b7d7768c597781e9fa451fe1025600f7cb55a89"}, + {file = "ciso8601-2.3.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:7533256af90724b8b7a707dcd1be4b67989447595c8e1e1c28399d4fd51dac50"}, + {file = "ciso8601-2.3.1-pp37-pypy37_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d4bc9d577c0d1e57532513fc2899f5231727e28981a426767f7fa13dacb18c06"}, + {file = "ciso8601-2.3.1-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:4e30501eed43eea7ef64f032c81cd1d8b2020035cbdcefad40db72e2f3bc97ff"}, + {file = "ciso8601-2.3.1-pp38-pypy38_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:070f568de3bc269268296cb9265704dc5fcb9d4c12b1f1c67536624174df5d09"}, + {file = "ciso8601-2.3.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:9065053c034c80c0afd74c71a4906675d07078a05cfd1cb5ff70661378cdbe60"}, + {file = "ciso8601-2.3.1-pp39-pypy39_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4ac00d293cdb3d1a5c78e09b3d75c7b0292ab45d5b26853b436ff5087eba2165"}, + {file = "ciso8601-2.3.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:06941e2ee46701f083aeb21d13eb762d74d5ed6c46ff22119f27a42ed6edc8f9"}, + {file = "ciso8601-2.3.1.tar.gz", hash = "sha256:3212c7ffe5d8080270548b5f2692ffd2039683b6628a8d2ad456122cc5793c4c"}, +] + +[[package]] +name = "click" +version = "8.1.7" +description = "Composable command line interface toolkit" +optional = false +python-versions = ">=3.7" +files = [ + {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, + {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + +[[package]] +name = "coverage" +version = "7.5.4" +description = "Code coverage measurement for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "coverage-7.5.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6cfb5a4f556bb51aba274588200a46e4dd6b505fb1a5f8c5ae408222eb416f99"}, + {file = "coverage-7.5.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2174e7c23e0a454ffe12267a10732c273243b4f2d50d07544a91198f05c48f47"}, + {file = "coverage-7.5.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2214ee920787d85db1b6a0bd9da5f8503ccc8fcd5814d90796c2f2493a2f4d2e"}, + {file = "coverage-7.5.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1137f46adb28e3813dec8c01fefadcb8c614f33576f672962e323b5128d9a68d"}, + {file = "coverage-7.5.4-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b385d49609f8e9efc885790a5a0e89f2e3ae042cdf12958b6034cc442de428d3"}, + {file = "coverage-7.5.4-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:b4a474f799456e0eb46d78ab07303286a84a3140e9700b9e154cfebc8f527016"}, + {file = "coverage-7.5.4-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:5cd64adedf3be66f8ccee418473c2916492d53cbafbfcff851cbec5a8454b136"}, + {file = "coverage-7.5.4-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:e564c2cf45d2f44a9da56f4e3a26b2236504a496eb4cb0ca7221cd4cc7a9aca9"}, + {file = "coverage-7.5.4-cp310-cp310-win32.whl", hash = "sha256:7076b4b3a5f6d2b5d7f1185fde25b1e54eb66e647a1dfef0e2c2bfaf9b4c88c8"}, + {file = "coverage-7.5.4-cp310-cp310-win_amd64.whl", hash = "sha256:018a12985185038a5b2bcafab04ab833a9a0f2c59995b3cec07e10074c78635f"}, + {file = "coverage-7.5.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:db14f552ac38f10758ad14dd7b983dbab424e731588d300c7db25b6f89e335b5"}, + {file = "coverage-7.5.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3257fdd8e574805f27bb5342b77bc65578e98cbc004a92232106344053f319ba"}, + {file = "coverage-7.5.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3a6612c99081d8d6134005b1354191e103ec9705d7ba2754e848211ac8cacc6b"}, + {file = "coverage-7.5.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d45d3cbd94159c468b9b8c5a556e3f6b81a8d1af2a92b77320e887c3e7a5d080"}, + {file = "coverage-7.5.4-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ed550e7442f278af76d9d65af48069f1fb84c9f745ae249c1a183c1e9d1b025c"}, + {file = "coverage-7.5.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:7a892be37ca35eb5019ec85402c3371b0f7cda5ab5056023a7f13da0961e60da"}, + {file = "coverage-7.5.4-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:8192794d120167e2a64721d88dbd688584675e86e15d0569599257566dec9bf0"}, + {file = "coverage-7.5.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:820bc841faa502e727a48311948e0461132a9c8baa42f6b2b84a29ced24cc078"}, + {file = "coverage-7.5.4-cp311-cp311-win32.whl", hash = "sha256:6aae5cce399a0f065da65c7bb1e8abd5c7a3043da9dceb429ebe1b289bc07806"}, + {file = "coverage-7.5.4-cp311-cp311-win_amd64.whl", hash = "sha256:d2e344d6adc8ef81c5a233d3a57b3c7d5181f40e79e05e1c143da143ccb6377d"}, + {file = "coverage-7.5.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:54317c2b806354cbb2dc7ac27e2b93f97096912cc16b18289c5d4e44fc663233"}, + {file = "coverage-7.5.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:042183de01f8b6d531e10c197f7f0315a61e8d805ab29c5f7b51a01d62782747"}, + {file = "coverage-7.5.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a6bb74ed465d5fb204b2ec41d79bcd28afccf817de721e8a807d5141c3426638"}, + {file = "coverage-7.5.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b3d45ff86efb129c599a3b287ae2e44c1e281ae0f9a9bad0edc202179bcc3a2e"}, + {file = "coverage-7.5.4-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5013ed890dc917cef2c9f765c4c6a8ae9df983cd60dbb635df8ed9f4ebc9f555"}, + {file = "coverage-7.5.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1014fbf665fef86cdfd6cb5b7371496ce35e4d2a00cda501cf9f5b9e6fced69f"}, + {file = "coverage-7.5.4-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:3684bc2ff328f935981847082ba4fdc950d58906a40eafa93510d1b54c08a66c"}, + {file = "coverage-7.5.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:581ea96f92bf71a5ec0974001f900db495488434a6928a2ca7f01eee20c23805"}, + {file = "coverage-7.5.4-cp312-cp312-win32.whl", hash = "sha256:73ca8fbc5bc622e54627314c1a6f1dfdd8db69788f3443e752c215f29fa87a0b"}, + {file = "coverage-7.5.4-cp312-cp312-win_amd64.whl", hash = "sha256:cef4649ec906ea7ea5e9e796e68b987f83fa9a718514fe147f538cfeda76d7a7"}, + {file = "coverage-7.5.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:cdd31315fc20868c194130de9ee6bfd99755cc9565edff98ecc12585b90be882"}, + {file = "coverage-7.5.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:02ff6e898197cc1e9fa375581382b72498eb2e6d5fc0b53f03e496cfee3fac6d"}, + {file = "coverage-7.5.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d05c16cf4b4c2fc880cb12ba4c9b526e9e5d5bb1d81313d4d732a5b9fe2b9d53"}, + {file = "coverage-7.5.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c5986ee7ea0795a4095ac4d113cbb3448601efca7f158ec7f7087a6c705304e4"}, + {file = "coverage-7.5.4-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5df54843b88901fdc2f598ac06737f03d71168fd1175728054c8f5a2739ac3e4"}, + {file = "coverage-7.5.4-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:ab73b35e8d109bffbda9a3e91c64e29fe26e03e49addf5b43d85fc426dde11f9"}, + {file = "coverage-7.5.4-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:aea072a941b033813f5e4814541fc265a5c12ed9720daef11ca516aeacd3bd7f"}, + {file = "coverage-7.5.4-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:16852febd96acd953b0d55fc842ce2dac1710f26729b31c80b940b9afcd9896f"}, + {file = "coverage-7.5.4-cp38-cp38-win32.whl", hash = "sha256:8f894208794b164e6bd4bba61fc98bf6b06be4d390cf2daacfa6eca0a6d2bb4f"}, + {file = "coverage-7.5.4-cp38-cp38-win_amd64.whl", hash = "sha256:e2afe743289273209c992075a5a4913e8d007d569a406ffed0bd080ea02b0633"}, + {file = "coverage-7.5.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b95c3a8cb0463ba9f77383d0fa8c9194cf91f64445a63fc26fb2327e1e1eb088"}, + {file = "coverage-7.5.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3d7564cc09dd91b5a6001754a5b3c6ecc4aba6323baf33a12bd751036c998be4"}, + {file = "coverage-7.5.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:44da56a2589b684813f86d07597fdf8a9c6ce77f58976727329272f5a01f99f7"}, + {file = "coverage-7.5.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e16f3d6b491c48c5ae726308e6ab1e18ee830b4cdd6913f2d7f77354b33f91c8"}, + {file = "coverage-7.5.4-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dbc5958cb471e5a5af41b0ddaea96a37e74ed289535e8deca404811f6cb0bc3d"}, + {file = "coverage-7.5.4-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:a04e990a2a41740b02d6182b498ee9796cf60eefe40cf859b016650147908029"}, + {file = "coverage-7.5.4-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:ddbd2f9713a79e8e7242d7c51f1929611e991d855f414ca9996c20e44a895f7c"}, + {file = "coverage-7.5.4-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:b1ccf5e728ccf83acd313c89f07c22d70d6c375a9c6f339233dcf792094bcbf7"}, + {file = "coverage-7.5.4-cp39-cp39-win32.whl", hash = "sha256:56b4eafa21c6c175b3ede004ca12c653a88b6f922494b023aeb1e836df953ace"}, + {file = "coverage-7.5.4-cp39-cp39-win_amd64.whl", hash = "sha256:65e528e2e921ba8fd67d9055e6b9f9e34b21ebd6768ae1c1723f4ea6ace1234d"}, + {file = "coverage-7.5.4-pp38.pp39.pp310-none-any.whl", hash = "sha256:79b356f3dd5b26f3ad23b35c75dbdaf1f9e2450b6bcefc6d0825ea0aa3f86ca5"}, + {file = "coverage-7.5.4.tar.gz", hash = "sha256:a44963520b069e12789d0faea4e9fdb1e410cdc4aab89d94f7f55cbb7fef0353"}, +] + +[package.extras] +toml = ["tomli"] + +[[package]] +name = "decorator" +version = "5.1.1" +description = "Decorators for Humans" +optional = false +python-versions = ">=3.5" +files = [ + {file = "decorator-5.1.1-py3-none-any.whl", hash = "sha256:b8c3f85900b9dc423225913c5aace94729fe1fa9763b38939a95226f02d37186"}, + {file = "decorator-5.1.1.tar.gz", hash = "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330"}, +] + +[[package]] +name = "dill" +version = "0.3.8" +description = "serialize all of Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "dill-0.3.8-py3-none-any.whl", hash = "sha256:c36ca9ffb54365bdd2f8eb3eff7d2a21237f8452b57ace88b1ac615b7e815bd7"}, + {file = "dill-0.3.8.tar.gz", hash = "sha256:3ebe3c479ad625c4553aca177444d89b486b1d84982eeacded644afc0cf797ca"}, +] + +[package.extras] +graph = ["objgraph (>=1.7.2)"] +profile = ["gprof2dot (>=2022.7.29)"] + +[[package]] +name = "distlib" +version = "0.3.8" +description = "Distribution utilities" +optional = false +python-versions = "*" +files = [ + {file = "distlib-0.3.8-py2.py3-none-any.whl", hash = "sha256:034db59a0b96f8ca18035f36290806a9a6e6bd9d1ff91e45a7f172eb17e51784"}, + {file = "distlib-0.3.8.tar.gz", hash = "sha256:1530ea13e350031b6312d8580ddb6b27a104275a31106523b8f123787f494f64"}, +] + +[[package]] +name = "exceptiongroup" +version = "1.2.1" +description = "Backport of PEP 654 (exception groups)" +optional = false +python-versions = ">=3.7" +files = [ + {file = "exceptiongroup-1.2.1-py3-none-any.whl", hash = "sha256:5258b9ed329c5bbdd31a309f53cbfb0b155341807f6ff7606a1e801a891b29ad"}, + {file = "exceptiongroup-1.2.1.tar.gz", hash = "sha256:a4785e48b045528f5bfe627b6ad554ff32def154f42372786903b7abcfe1aa16"}, +] + +[package.extras] +test = ["pytest (>=6)"] + +[[package]] +name = "executing" +version = "2.0.1" +description = "Get the currently executing AST node of a frame, and other information" +optional = false +python-versions = ">=3.5" +files = [ + {file = "executing-2.0.1-py2.py3-none-any.whl", hash = "sha256:eac49ca94516ccc753f9fb5ce82603156e590b27525a8bc32cce8ae302eb61bc"}, + {file = "executing-2.0.1.tar.gz", hash = "sha256:35afe2ce3affba8ee97f2d69927fa823b08b472b7b994e36a52a964b93d16147"}, +] + +[package.extras] +tests = ["asttokens (>=2.1.0)", "coverage", "coverage-enable-subprocess", "ipython", "littleutils", "pytest", "rich"] + +[[package]] +name = "filelock" +version = "3.15.4" +description = "A platform independent file lock." +optional = false +python-versions = ">=3.8" +files = [ + {file = "filelock-3.15.4-py3-none-any.whl", hash = "sha256:6ca1fffae96225dab4c6eaf1c4f4f28cd2568d3ec2a44e15a08520504de468e7"}, + {file = "filelock-3.15.4.tar.gz", hash = "sha256:2207938cbc1844345cb01a5a95524dae30f0ce089eba5b00378295a17e3e90cb"}, +] + +[package.extras] +docs = ["furo (>=2023.9.10)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] +testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8.0.1)", "pytest (>=7.4.3)", "pytest-asyncio (>=0.21)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)", "virtualenv (>=20.26.2)"] +typing = ["typing-extensions (>=4.8)"] + +[[package]] +name = "flake8" +version = "7.1.0" +description = "the modular source code checker: pep8 pyflakes and co" +optional = false +python-versions = ">=3.8.1" +files = [ + {file = "flake8-7.1.0-py2.py3-none-any.whl", hash = "sha256:2e416edcc62471a64cea09353f4e7bdba32aeb079b6e360554c659a122b1bc6a"}, + {file = "flake8-7.1.0.tar.gz", hash = "sha256:48a07b626b55236e0fb4784ee69a465fbf59d79eec1f5b4785c3d3bc57d17aa5"}, +] + +[package.dependencies] +mccabe = ">=0.7.0,<0.8.0" +pycodestyle = ">=2.12.0,<2.13.0" +pyflakes = ">=3.2.0,<3.3.0" + +[[package]] +name = "importlib-resources" +version = "6.4.0" +description = "Read resources from Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "importlib_resources-6.4.0-py3-none-any.whl", hash = "sha256:50d10f043df931902d4194ea07ec57960f66a80449ff867bfe782b4c486ba78c"}, + {file = "importlib_resources-6.4.0.tar.gz", hash = "sha256:cdb2b453b8046ca4e3798eb1d84f3cce1446a0e8e7b5ef4efb600f19fc398145"}, +] + +[package.dependencies] +zipp = {version = ">=3.1.0", markers = "python_version < \"3.10\""} + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] +testing = ["jaraco.test (>=5.4)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-ruff (>=0.2.1)", "zipp (>=3.17)"] + +[[package]] +name = "iniconfig" +version = "2.0.0" +description = "brain-dead simple config-ini parsing" +optional = false +python-versions = ">=3.7" +files = [ + {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, + {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, +] + +[[package]] +name = "ipdb" +version = "0.13.13" +description = "IPython-enabled pdb" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "ipdb-0.13.13-py3-none-any.whl", hash = "sha256:45529994741c4ab6d2388bfa5d7b725c2cf7fe9deffabdb8a6113aa5ed449ed4"}, + {file = "ipdb-0.13.13.tar.gz", hash = "sha256:e3ac6018ef05126d442af680aad863006ec19d02290561ac88b8b1c0b0cfc726"}, +] + +[package.dependencies] +decorator = {version = "*", markers = "python_version > \"3.6\""} +ipython = {version = ">=7.31.1", markers = "python_version > \"3.6\""} +tomli = {version = "*", markers = "python_version > \"3.6\" and python_version < \"3.11\""} + +[[package]] +name = "ipython" +version = "8.12.3" +description = "IPython: Productive Interactive Computing" +optional = false +python-versions = ">=3.8" +files = [ + {file = "ipython-8.12.3-py3-none-any.whl", hash = "sha256:b0340d46a933d27c657b211a329d0be23793c36595acf9e6ef4164bc01a1804c"}, + {file = "ipython-8.12.3.tar.gz", hash = "sha256:3910c4b54543c2ad73d06579aa771041b7d5707b033bd488669b4cf544e3b363"}, +] + +[package.dependencies] +appnope = {version = "*", markers = "sys_platform == \"darwin\""} +backcall = "*" +colorama = {version = "*", markers = "sys_platform == \"win32\""} +decorator = "*" +jedi = ">=0.16" +matplotlib-inline = "*" +pexpect = {version = ">4.3", markers = "sys_platform != \"win32\""} +pickleshare = "*" +prompt-toolkit = ">=3.0.30,<3.0.37 || >3.0.37,<3.1.0" +pygments = ">=2.4.0" +stack-data = "*" +traitlets = ">=5" +typing-extensions = {version = "*", markers = "python_version < \"3.10\""} + +[package.extras] +all = ["black", "curio", "docrepr", "ipykernel", "ipyparallel", "ipywidgets", "matplotlib", "matplotlib (!=3.2.0)", "nbconvert", "nbformat", "notebook", "numpy (>=1.21)", "pandas", "pytest (<7)", "pytest (<7.1)", "pytest-asyncio", "qtconsole", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "stack-data", "testpath", "trio", "typing-extensions"] +black = ["black"] +doc = ["docrepr", "ipykernel", "matplotlib", "pytest (<7)", "pytest (<7.1)", "pytest-asyncio", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "stack-data", "testpath", "typing-extensions"] +kernel = ["ipykernel"] +nbconvert = ["nbconvert"] +nbformat = ["nbformat"] +notebook = ["ipywidgets", "notebook"] +parallel = ["ipyparallel"] +qtconsole = ["qtconsole"] +test = ["pytest (<7.1)", "pytest-asyncio", "testpath"] +test-extra = ["curio", "matplotlib (!=3.2.0)", "nbformat", "numpy (>=1.21)", "pandas", "pytest (<7.1)", "pytest-asyncio", "testpath", "trio"] + +[[package]] +name = "isort" +version = "5.13.2" +description = "A Python utility / library to sort Python imports." +optional = false +python-versions = ">=3.8.0" +files = [ + {file = "isort-5.13.2-py3-none-any.whl", hash = "sha256:8ca5e72a8d85860d5a3fa69b8745237f2939afe12dbf656afbcb47fe72d947a6"}, + {file = "isort-5.13.2.tar.gz", hash = "sha256:48fdfcb9face5d58a4f6dde2e72a1fb8dcaf8ab26f95ab49fab84c2ddefb0109"}, +] + +[package.extras] +colors = ["colorama (>=0.4.6)"] + +[[package]] +name = "jedi" +version = "0.19.1" +description = "An autocompletion tool for Python that can be used for text editors." +optional = false +python-versions = ">=3.6" +files = [ + {file = "jedi-0.19.1-py2.py3-none-any.whl", hash = "sha256:e983c654fe5c02867aef4cdfce5a2fbb4a50adc0af145f70504238f18ef5e7e0"}, + {file = "jedi-0.19.1.tar.gz", hash = "sha256:cf0496f3651bc65d7174ac1b7d043eff454892c708a87d1b683e57b569927ffd"}, +] + +[package.dependencies] +parso = ">=0.8.3,<0.9.0" + +[package.extras] +docs = ["Jinja2 (==2.11.3)", "MarkupSafe (==1.1.1)", "Pygments (==2.8.1)", "alabaster (==0.7.12)", "babel (==2.9.1)", "chardet (==4.0.0)", "commonmark (==0.8.1)", "docutils (==0.17.1)", "future (==0.18.2)", "idna (==2.10)", "imagesize (==1.2.0)", "mock (==1.0.1)", "packaging (==20.9)", "pyparsing (==2.4.7)", "pytz (==2021.1)", "readthedocs-sphinx-ext (==2.1.4)", "recommonmark (==0.5.0)", "requests (==2.25.1)", "six (==1.15.0)", "snowballstemmer (==2.1.0)", "sphinx (==1.8.5)", "sphinx-rtd-theme (==0.4.3)", "sphinxcontrib-serializinghtml (==1.1.4)", "sphinxcontrib-websupport (==1.2.4)", "urllib3 (==1.26.4)"] +qa = ["flake8 (==5.0.4)", "mypy (==0.971)", "types-setuptools (==67.2.0.1)"] +testing = ["Django", "attrs", "colorama", "docopt", "pytest (<7.0.0)"] + +[[package]] +name = "jsonschema" +version = "4.22.0" +description = "An implementation of JSON Schema validation for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "jsonschema-4.22.0-py3-none-any.whl", hash = "sha256:ff4cfd6b1367a40e7bc6411caec72effadd3db0bbe5017de188f2d6108335802"}, + {file = "jsonschema-4.22.0.tar.gz", hash = "sha256:5b22d434a45935119af990552c862e5d6d564e8f6601206b305a61fdf661a2b7"}, +] + +[package.dependencies] +attrs = ">=22.2.0" +importlib-resources = {version = ">=1.4.0", markers = "python_version < \"3.9\""} +jsonschema-specifications = ">=2023.03.6" +pkgutil-resolve-name = {version = ">=1.3.10", markers = "python_version < \"3.9\""} +referencing = ">=0.28.4" +rpds-py = ">=0.7.1" + +[package.extras] +format = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3987", "uri-template", "webcolors (>=1.11)"] +format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "uri-template", "webcolors (>=1.11)"] + +[[package]] +name = "jsonschema-specifications" +version = "2023.12.1" +description = "The JSON Schema meta-schemas and vocabularies, exposed as a Registry" +optional = false +python-versions = ">=3.8" +files = [ + {file = "jsonschema_specifications-2023.12.1-py3-none-any.whl", hash = "sha256:87e4fdf3a94858b8a2ba2778d9ba57d8a9cafca7c7489c46ba0d30a8bc6a9c3c"}, + {file = "jsonschema_specifications-2023.12.1.tar.gz", hash = "sha256:48a76787b3e70f5ed53f1160d2b81f586e4ca6d1548c5de7085d1682674764cc"}, +] + +[package.dependencies] +importlib-resources = {version = ">=1.4.0", markers = "python_version < \"3.9\""} +referencing = ">=0.31.0" + +[[package]] +name = "matplotlib-inline" +version = "0.1.7" +description = "Inline Matplotlib backend for Jupyter" +optional = false +python-versions = ">=3.8" +files = [ + {file = "matplotlib_inline-0.1.7-py3-none-any.whl", hash = "sha256:df192d39a4ff8f21b1895d72e6a13f5fcc5099f00fa84384e0ea28c2cc0653ca"}, + {file = "matplotlib_inline-0.1.7.tar.gz", hash = "sha256:8423b23ec666be3d16e16b60bdd8ac4e86e840ebd1dd11a30b9f117f2fa0ab90"}, +] + +[package.dependencies] +traitlets = "*" + +[[package]] +name = "mccabe" +version = "0.7.0" +description = "McCabe checker, plugin for flake8" +optional = false +python-versions = ">=3.6" +files = [ + {file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"}, + {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, +] + +[[package]] +name = "msgspec" +version = "0.18.6" +description = "A fast serialization and validation library, with builtin support for JSON, MessagePack, YAML, and TOML." +optional = false +python-versions = ">=3.8" +files = [ + {file = "msgspec-0.18.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:77f30b0234eceeff0f651119b9821ce80949b4d667ad38f3bfed0d0ebf9d6d8f"}, + {file = "msgspec-0.18.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1a76b60e501b3932782a9da039bd1cd552b7d8dec54ce38332b87136c64852dd"}, + {file = "msgspec-0.18.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:06acbd6edf175bee0e36295d6b0302c6de3aaf61246b46f9549ca0041a9d7177"}, + {file = "msgspec-0.18.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40a4df891676d9c28a67c2cc39947c33de516335680d1316a89e8f7218660410"}, + {file = "msgspec-0.18.6-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:a6896f4cd5b4b7d688018805520769a8446df911eb93b421c6c68155cdf9dd5a"}, + {file = "msgspec-0.18.6-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:3ac4dd63fd5309dd42a8c8c36c1563531069152be7819518be0a9d03be9788e4"}, + {file = "msgspec-0.18.6-cp310-cp310-win_amd64.whl", hash = "sha256:fda4c357145cf0b760000c4ad597e19b53adf01382b711f281720a10a0fe72b7"}, + {file = "msgspec-0.18.6-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:e77e56ffe2701e83a96e35770c6adb655ffc074d530018d1b584a8e635b4f36f"}, + {file = "msgspec-0.18.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d5351afb216b743df4b6b147691523697ff3a2fc5f3d54f771e91219f5c23aaa"}, + {file = "msgspec-0.18.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c3232fabacef86fe8323cecbe99abbc5c02f7698e3f5f2e248e3480b66a3596b"}, + {file = "msgspec-0.18.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e3b524df6ea9998bbc99ea6ee4d0276a101bcc1aa8d14887bb823914d9f60d07"}, + {file = "msgspec-0.18.6-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:37f67c1d81272131895bb20d388dd8d341390acd0e192a55ab02d4d6468b434c"}, + {file = "msgspec-0.18.6-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d0feb7a03d971c1c0353de1a8fe30bb6579c2dc5ccf29b5f7c7ab01172010492"}, + {file = "msgspec-0.18.6-cp311-cp311-win_amd64.whl", hash = "sha256:41cf758d3f40428c235c0f27bc6f322d43063bc32da7b9643e3f805c21ed57b4"}, + {file = "msgspec-0.18.6-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d86f5071fe33e19500920333c11e2267a31942d18fed4d9de5bc2fbab267d28c"}, + {file = "msgspec-0.18.6-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce13981bfa06f5eb126a3a5a38b1976bddb49a36e4f46d8e6edecf33ccf11df1"}, + {file = "msgspec-0.18.6-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e97dec6932ad5e3ee1e3c14718638ba333befc45e0661caa57033cd4cc489466"}, + {file = "msgspec-0.18.6-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ad237100393f637b297926cae1868b0d500f764ccd2f0623a380e2bcfb2809ca"}, + {file = "msgspec-0.18.6-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:db1d8626748fa5d29bbd15da58b2d73af25b10aa98abf85aab8028119188ed57"}, + {file = "msgspec-0.18.6-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:d70cb3d00d9f4de14d0b31d38dfe60c88ae16f3182988246a9861259c6722af6"}, + {file = "msgspec-0.18.6-cp312-cp312-win_amd64.whl", hash = "sha256:1003c20bfe9c6114cc16ea5db9c5466e49fae3d7f5e2e59cb70693190ad34da0"}, + {file = "msgspec-0.18.6-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f7d9faed6dfff654a9ca7d9b0068456517f63dbc3aa704a527f493b9200b210a"}, + {file = "msgspec-0.18.6-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:9da21f804c1a1471f26d32b5d9bc0480450ea77fbb8d9db431463ab64aaac2cf"}, + {file = "msgspec-0.18.6-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:46eb2f6b22b0e61c137e65795b97dc515860bf6ec761d8fb65fdb62aa094ba61"}, + {file = "msgspec-0.18.6-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c8355b55c80ac3e04885d72db515817d9fbb0def3bab936bba104e99ad22cf46"}, + {file = "msgspec-0.18.6-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:9080eb12b8f59e177bd1eb5c21e24dd2ba2fa88a1dbc9a98e05ad7779b54c681"}, + {file = "msgspec-0.18.6-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:cc001cf39becf8d2dcd3f413a4797c55009b3a3cdbf78a8bf5a7ca8fdb76032c"}, + {file = "msgspec-0.18.6-cp38-cp38-win_amd64.whl", hash = "sha256:fac5834e14ac4da1fca373753e0c4ec9c8069d1fe5f534fa5208453b6065d5be"}, + {file = "msgspec-0.18.6-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:974d3520fcc6b824a6dedbdf2b411df31a73e6e7414301abac62e6b8d03791b4"}, + {file = "msgspec-0.18.6-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:fd62e5818731a66aaa8e9b0a1e5543dc979a46278da01e85c3c9a1a4f047ef7e"}, + {file = "msgspec-0.18.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7481355a1adcf1f08dedd9311193c674ffb8bf7b79314b4314752b89a2cf7f1c"}, + {file = "msgspec-0.18.6-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6aa85198f8f154cf35d6f979998f6dadd3dc46a8a8c714632f53f5d65b315c07"}, + {file = "msgspec-0.18.6-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0e24539b25c85c8f0597274f11061c102ad6b0c56af053373ba4629772b407be"}, + {file = "msgspec-0.18.6-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c61ee4d3be03ea9cd089f7c8e36158786cd06e51fbb62529276452bbf2d52ece"}, + {file = "msgspec-0.18.6-cp39-cp39-win_amd64.whl", hash = "sha256:b5c390b0b0b7da879520d4ae26044d74aeee5144f83087eb7842ba59c02bc090"}, + {file = "msgspec-0.18.6.tar.gz", hash = "sha256:a59fc3b4fcdb972d09138cb516dbde600c99d07c38fd9372a6ef500d2d031b4e"}, +] + +[package.extras] +dev = ["attrs", "coverage", "furo", "gcovr", "ipython", "msgpack", "mypy", "pre-commit", "pyright", "pytest", "pyyaml", "sphinx", "sphinx-copybutton", "sphinx-design", "tomli", "tomli-w"] +doc = ["furo", "ipython", "sphinx", "sphinx-copybutton", "sphinx-design"] +test = ["attrs", "msgpack", "mypy", "pyright", "pytest", "pyyaml", "tomli", "tomli-w"] +toml = ["tomli", "tomli-w"] +yaml = ["pyyaml"] + +[[package]] +name = "mypy" +version = "1.10.1" +description = "Optional static typing for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "mypy-1.10.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e36f229acfe250dc660790840916eb49726c928e8ce10fbdf90715090fe4ae02"}, + {file = "mypy-1.10.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:51a46974340baaa4145363b9e051812a2446cf583dfaeba124af966fa44593f7"}, + {file = "mypy-1.10.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:901c89c2d67bba57aaaca91ccdb659aa3a312de67f23b9dfb059727cce2e2e0a"}, + {file = "mypy-1.10.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0cd62192a4a32b77ceb31272d9e74d23cd88c8060c34d1d3622db3267679a5d9"}, + {file = "mypy-1.10.1-cp310-cp310-win_amd64.whl", hash = "sha256:a2cbc68cb9e943ac0814c13e2452d2046c2f2b23ff0278e26599224cf164e78d"}, + {file = "mypy-1.10.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:bd6f629b67bb43dc0d9211ee98b96d8dabc97b1ad38b9b25f5e4c4d7569a0c6a"}, + {file = "mypy-1.10.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a1bbb3a6f5ff319d2b9d40b4080d46cd639abe3516d5a62c070cf0114a457d84"}, + {file = "mypy-1.10.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8edd4e9bbbc9d7b79502eb9592cab808585516ae1bcc1446eb9122656c6066f"}, + {file = "mypy-1.10.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:6166a88b15f1759f94a46fa474c7b1b05d134b1b61fca627dd7335454cc9aa6b"}, + {file = "mypy-1.10.1-cp311-cp311-win_amd64.whl", hash = "sha256:5bb9cd11c01c8606a9d0b83ffa91d0b236a0e91bc4126d9ba9ce62906ada868e"}, + {file = "mypy-1.10.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d8681909f7b44d0b7b86e653ca152d6dff0eb5eb41694e163c6092124f8246d7"}, + {file = "mypy-1.10.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:378c03f53f10bbdd55ca94e46ec3ba255279706a6aacaecac52ad248f98205d3"}, + {file = "mypy-1.10.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6bacf8f3a3d7d849f40ca6caea5c055122efe70e81480c8328ad29c55c69e93e"}, + {file = "mypy-1.10.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:701b5f71413f1e9855566a34d6e9d12624e9e0a8818a5704d74d6b0402e66c04"}, + {file = "mypy-1.10.1-cp312-cp312-win_amd64.whl", hash = "sha256:3c4c2992f6ea46ff7fce0072642cfb62af7a2484efe69017ed8b095f7b39ef31"}, + {file = "mypy-1.10.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:604282c886497645ffb87b8f35a57ec773a4a2721161e709a4422c1636ddde5c"}, + {file = "mypy-1.10.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37fd87cab83f09842653f08de066ee68f1182b9b5282e4634cdb4b407266bade"}, + {file = "mypy-1.10.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8addf6313777dbb92e9564c5d32ec122bf2c6c39d683ea64de6a1fd98b90fe37"}, + {file = "mypy-1.10.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5cc3ca0a244eb9a5249c7c583ad9a7e881aa5d7b73c35652296ddcdb33b2b9c7"}, + {file = "mypy-1.10.1-cp38-cp38-win_amd64.whl", hash = "sha256:1b3a2ffce52cc4dbaeee4df762f20a2905aa171ef157b82192f2e2f368eec05d"}, + {file = "mypy-1.10.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:fe85ed6836165d52ae8b88f99527d3d1b2362e0cb90b005409b8bed90e9059b3"}, + {file = "mypy-1.10.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c2ae450d60d7d020d67ab440c6e3fae375809988119817214440033f26ddf7bf"}, + {file = "mypy-1.10.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6be84c06e6abd72f960ba9a71561c14137a583093ffcf9bbfaf5e613d63fa531"}, + {file = "mypy-1.10.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2189ff1e39db399f08205e22a797383613ce1cb0cb3b13d8bcf0170e45b96cc3"}, + {file = "mypy-1.10.1-cp39-cp39-win_amd64.whl", hash = "sha256:97a131ee36ac37ce9581f4220311247ab6cba896b4395b9c87af0675a13a755f"}, + {file = "mypy-1.10.1-py3-none-any.whl", hash = "sha256:71d8ac0b906354ebda8ef1673e5fde785936ac1f29ff6987c7483cfbd5a4235a"}, + {file = "mypy-1.10.1.tar.gz", hash = "sha256:1f8f492d7db9e3593ef42d4f115f04e556130f2819ad33ab84551403e97dd4c0"}, +] + +[package.dependencies] +mypy-extensions = ">=1.0.0" +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} +typing-extensions = ">=4.1.0" + +[package.extras] +dmypy = ["psutil (>=4.0)"] +install-types = ["pip"] +mypyc = ["setuptools (>=50)"] +reports = ["lxml"] + +[[package]] +name = "mypy-extensions" +version = "1.0.0" +description = "Type system extensions for programs checked with the mypy type checker." +optional = false +python-versions = ">=3.5" +files = [ + {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, + {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, +] + +[[package]] +name = "packaging" +version = "24.1" +description = "Core utilities for Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "packaging-24.1-py3-none-any.whl", hash = "sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124"}, + {file = "packaging-24.1.tar.gz", hash = "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002"}, +] + +[[package]] +name = "parso" +version = "0.8.4" +description = "A Python Parser" +optional = false +python-versions = ">=3.6" +files = [ + {file = "parso-0.8.4-py2.py3-none-any.whl", hash = "sha256:a418670a20291dacd2dddc80c377c5c3791378ee1e8d12bffc35420643d43f18"}, + {file = "parso-0.8.4.tar.gz", hash = "sha256:eb3a7b58240fb99099a345571deecc0f9540ea5f4dd2fe14c2a99d6b281ab92d"}, +] + +[package.extras] +qa = ["flake8 (==5.0.4)", "mypy (==0.971)", "types-setuptools (==67.2.0.1)"] +testing = ["docopt", "pytest"] + +[[package]] +name = "pathspec" +version = "0.12.1" +description = "Utility library for gitignore style pattern matching of file paths." +optional = false +python-versions = ">=3.8" +files = [ + {file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"}, + {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"}, +] + +[[package]] +name = "pexpect" +version = "4.9.0" +description = "Pexpect allows easy control of interactive console applications." +optional = false +python-versions = "*" +files = [ + {file = "pexpect-4.9.0-py2.py3-none-any.whl", hash = "sha256:7236d1e080e4936be2dc3e326cec0af72acf9212a7e1d060210e70a47e253523"}, + {file = "pexpect-4.9.0.tar.gz", hash = "sha256:ee7d41123f3c9911050ea2c2dac107568dc43b2d3b0c7557a33212c398ead30f"}, +] + +[package.dependencies] +ptyprocess = ">=0.5" + +[[package]] +name = "pickleshare" +version = "0.7.5" +description = "Tiny 'shelve'-like database with concurrency support" +optional = false +python-versions = "*" +files = [ + {file = "pickleshare-0.7.5-py2.py3-none-any.whl", hash = "sha256:9649af414d74d4df115d5d718f82acb59c9d418196b7b4290ed47a12ce62df56"}, + {file = "pickleshare-0.7.5.tar.gz", hash = "sha256:87683d47965c1da65cdacaf31c8441d12b8044cdec9aca500cd78fc2c683afca"}, +] + +[[package]] +name = "pkgutil-resolve-name" +version = "1.3.10" +description = "Resolve a name to an object." +optional = false +python-versions = ">=3.6" +files = [ + {file = "pkgutil_resolve_name-1.3.10-py3-none-any.whl", hash = "sha256:ca27cc078d25c5ad71a9de0a7a330146c4e014c2462d9af19c6b828280649c5e"}, + {file = "pkgutil_resolve_name-1.3.10.tar.gz", hash = "sha256:357d6c9e6a755653cfd78893817c0853af365dd51ec97f3d358a819373bbd174"}, +] + +[[package]] +name = "platformdirs" +version = "4.2.2" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." +optional = false +python-versions = ">=3.8" +files = [ + {file = "platformdirs-4.2.2-py3-none-any.whl", hash = "sha256:2d7a1657e36a80ea911db832a8a6ece5ee53d8de21edd5cc5879af6530b1bfee"}, + {file = "platformdirs-4.2.2.tar.gz", hash = "sha256:38b7b51f512eed9e84a22788b4bce1de17c0adb134d6becb09836e37d8654cd3"}, +] + +[package.extras] +docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"] +type = ["mypy (>=1.8)"] + +[[package]] +name = "pluggy" +version = "1.5.0" +description = "plugin and hook calling mechanisms for python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, + {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, +] + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] + +[[package]] +name = "prompt-toolkit" +version = "3.0.47" +description = "Library for building powerful interactive command lines in Python" +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "prompt_toolkit-3.0.47-py3-none-any.whl", hash = "sha256:0d7bfa67001d5e39d02c224b663abc33687405033a8c422d0d675a5a13361d10"}, + {file = "prompt_toolkit-3.0.47.tar.gz", hash = "sha256:1e1b29cb58080b1e69f207c893a1a7bf16d127a5c30c9d17a25a5d77792e5360"}, +] + +[package.dependencies] +wcwidth = "*" + +[[package]] +name = "ptyprocess" +version = "0.7.0" +description = "Run a subprocess in a pseudo terminal" +optional = false +python-versions = "*" +files = [ + {file = "ptyprocess-0.7.0-py2.py3-none-any.whl", hash = "sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35"}, + {file = "ptyprocess-0.7.0.tar.gz", hash = "sha256:5c5d0a3b48ceee0b48485e0c26037c0acd7d29765ca3fbb5cb3831d347423220"}, +] + +[[package]] +name = "pure-eval" +version = "0.2.2" +description = "Safely evaluate AST nodes without side effects" +optional = false +python-versions = "*" +files = [ + {file = "pure_eval-0.2.2-py3-none-any.whl", hash = "sha256:01eaab343580944bc56080ebe0a674b39ec44a945e6d09ba7db3cb8cec289350"}, + {file = "pure_eval-0.2.2.tar.gz", hash = "sha256:2b45320af6dfaa1750f543d714b6d1c520a1688dec6fd24d339063ce0aaa9ac3"}, +] + +[package.extras] +tests = ["pytest"] + +[[package]] +name = "pycodestyle" +version = "2.12.0" +description = "Python style guide checker" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pycodestyle-2.12.0-py2.py3-none-any.whl", hash = "sha256:949a39f6b86c3e1515ba1787c2022131d165a8ad271b11370a8819aa070269e4"}, + {file = "pycodestyle-2.12.0.tar.gz", hash = "sha256:442f950141b4f43df752dd303511ffded3a04c2b6fb7f65980574f0c31e6e79c"}, +] + +[[package]] +name = "pydocstyle" +version = "6.3.0" +description = "Python docstring style checker" +optional = false +python-versions = ">=3.6" +files = [ + {file = "pydocstyle-6.3.0-py3-none-any.whl", hash = "sha256:118762d452a49d6b05e194ef344a55822987a462831ade91ec5c06fd2169d019"}, + {file = "pydocstyle-6.3.0.tar.gz", hash = "sha256:7ce43f0c0ac87b07494eb9c0b462c0b73e6ff276807f204d6b53edc72b7e44e1"}, +] + +[package.dependencies] +snowballstemmer = ">=2.2.0" + +[package.extras] +toml = ["tomli (>=1.2.3)"] + +[[package]] +name = "pyflakes" +version = "3.2.0" +description = "passive checker of Python programs" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pyflakes-3.2.0-py2.py3-none-any.whl", hash = "sha256:84b5be138a2dfbb40689ca07e2152deb896a65c3a3e24c251c5c62489568074a"}, + {file = "pyflakes-3.2.0.tar.gz", hash = "sha256:1c61603ff154621fb2a9172037d84dca3500def8c8b630657d1701f026f8af3f"}, +] + +[[package]] +name = "pygments" +version = "2.18.0" +description = "Pygments is a syntax highlighting package written in Python." +optional = false +python-versions = ">=3.8" +files = [ + {file = "pygments-2.18.0-py3-none-any.whl", hash = "sha256:b8e6aca0523f3ab76fee51799c488e38782ac06eafcf95e7ba832985c8e7b13a"}, + {file = "pygments-2.18.0.tar.gz", hash = "sha256:786ff802f32e91311bff3889f6e9a86e81505fe99f2735bb6d60ae0c5004f199"}, +] + +[package.extras] +windows-terminal = ["colorama (>=0.4.6)"] + +[[package]] +name = "pylint" +version = "3.2.5" +description = "python code static checker" +optional = false +python-versions = ">=3.8.0" +files = [ + {file = "pylint-3.2.5-py3-none-any.whl", hash = "sha256:32cd6c042b5004b8e857d727708720c54a676d1e22917cf1a2df9b4d4868abd6"}, + {file = "pylint-3.2.5.tar.gz", hash = "sha256:e9b7171e242dcc6ebd0aaa7540481d1a72860748a0a7816b8fe6cf6c80a6fe7e"}, +] + +[package.dependencies] +astroid = ">=3.2.2,<=3.3.0-dev0" +colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""} +dill = [ + {version = ">=0.2", markers = "python_version < \"3.11\""}, + {version = ">=0.3.6", markers = "python_version >= \"3.11\""}, + {version = ">=0.3.7", markers = "python_version >= \"3.12\""}, +] +isort = ">=4.2.5,<5.13.0 || >5.13.0,<6" +mccabe = ">=0.6,<0.8" +platformdirs = ">=2.2.0" +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} +tomlkit = ">=0.10.1" +typing-extensions = {version = ">=3.10.0", markers = "python_version < \"3.10\""} + +[package.extras] +spelling = ["pyenchant (>=3.2,<4.0)"] +testutils = ["gitpython (>3)"] + +[[package]] +name = "pyproject-api" +version = "1.7.1" +description = "API to interact with the python pyproject.toml based projects" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pyproject_api-1.7.1-py3-none-any.whl", hash = "sha256:2dc1654062c2b27733d8fd4cdda672b22fe8741ef1dde8e3a998a9547b071eeb"}, + {file = "pyproject_api-1.7.1.tar.gz", hash = "sha256:7ebc6cd10710f89f4cf2a2731710a98abce37ebff19427116ff2174c9236a827"}, +] + +[package.dependencies] +packaging = ">=24.1" +tomli = {version = ">=2.0.1", markers = "python_version < \"3.11\""} + +[package.extras] +docs = ["furo (>=2024.5.6)", "sphinx-autodoc-typehints (>=2.2.1)"] +testing = ["covdefaults (>=2.3)", "pytest (>=8.2.2)", "pytest-cov (>=5)", "pytest-mock (>=3.14)", "setuptools (>=70.1)"] + +[[package]] +name = "pytest" +version = "7.4.4" +description = "pytest: simple powerful testing with Python" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pytest-7.4.4-py3-none-any.whl", hash = "sha256:b090cdf5ed60bf4c45261be03239c2c1c22df034fbffe691abe93cd80cea01d8"}, + {file = "pytest-7.4.4.tar.gz", hash = "sha256:2cf0005922c6ace4a3e2ec8b4080eb0d9753fdc93107415332f50ce9e7994280"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "sys_platform == \"win32\""} +exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} +iniconfig = "*" +packaging = "*" +pluggy = ">=0.12,<2.0" +tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} + +[package.extras] +testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] + +[[package]] +name = "python-dateutil" +version = "2.9.0.post0" +description = "Extensions to the standard Python datetime module" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +files = [ + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, +] + +[package.dependencies] +six = ">=1.5" + +[[package]] +name = "pytz" +version = "2018.9" +description = "World timezone definitions, modern and historical" +optional = false +python-versions = "*" +files = [ + {file = "pytz-2018.9-py2.py3-none-any.whl", hash = "sha256:32b0891edff07e28efe91284ed9c31e123d84bea3fd98e1f72be2508f43ef8d9"}, + {file = "pytz-2018.9.tar.gz", hash = "sha256:d5f05e487007e29e03409f9398d074e158d920d36eb82eaf66fb1136b0c5374c"}, +] + +[[package]] +name = "referencing" +version = "0.35.1" +description = "JSON Referencing + Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "referencing-0.35.1-py3-none-any.whl", hash = "sha256:eda6d3234d62814d1c64e305c1331c9a3a6132da475ab6382eaa997b21ee75de"}, + {file = "referencing-0.35.1.tar.gz", hash = "sha256:25b42124a6c8b632a425174f24087783efb348a6f1e0008e63cd4466fedf703c"}, +] + +[package.dependencies] +attrs = ">=22.2.0" +rpds-py = ">=0.7.0" + +[[package]] +name = "rpds-py" +version = "0.18.1" +description = "Python bindings to Rust's persistent data structures (rpds)" +optional = false +python-versions = ">=3.8" +files = [ + {file = "rpds_py-0.18.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:d31dea506d718693b6b2cffc0648a8929bdc51c70a311b2770f09611caa10d53"}, + {file = "rpds_py-0.18.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:732672fbc449bab754e0b15356c077cc31566df874964d4801ab14f71951ea80"}, + {file = "rpds_py-0.18.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a98a1f0552b5f227a3d6422dbd61bc6f30db170939bd87ed14f3c339aa6c7c9"}, + {file = "rpds_py-0.18.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7f1944ce16401aad1e3f7d312247b3d5de7981f634dc9dfe90da72b87d37887d"}, + {file = "rpds_py-0.18.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:38e14fb4e370885c4ecd734f093a2225ee52dc384b86fa55fe3f74638b2cfb09"}, + {file = "rpds_py-0.18.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:08d74b184f9ab6289b87b19fe6a6d1a97fbfea84b8a3e745e87a5de3029bf944"}, + {file = "rpds_py-0.18.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d70129cef4a8d979caa37e7fe957202e7eee8ea02c5e16455bc9808a59c6b2f0"}, + {file = "rpds_py-0.18.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ce0bb20e3a11bd04461324a6a798af34d503f8d6f1aa3d2aa8901ceaf039176d"}, + {file = "rpds_py-0.18.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:81c5196a790032e0fc2464c0b4ab95f8610f96f1f2fa3d4deacce6a79852da60"}, + {file = "rpds_py-0.18.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:f3027be483868c99b4985fda802a57a67fdf30c5d9a50338d9db646d590198da"}, + {file = "rpds_py-0.18.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:d44607f98caa2961bab4fa3c4309724b185b464cdc3ba6f3d7340bac3ec97cc1"}, + {file = "rpds_py-0.18.1-cp310-none-win32.whl", hash = "sha256:c273e795e7a0f1fddd46e1e3cb8be15634c29ae8ff31c196debb620e1edb9333"}, + {file = "rpds_py-0.18.1-cp310-none-win_amd64.whl", hash = "sha256:8352f48d511de5f973e4f2f9412736d7dea76c69faa6d36bcf885b50c758ab9a"}, + {file = "rpds_py-0.18.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:6b5ff7e1d63a8281654b5e2896d7f08799378e594f09cf3674e832ecaf396ce8"}, + {file = "rpds_py-0.18.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8927638a4d4137a289e41d0fd631551e89fa346d6dbcfc31ad627557d03ceb6d"}, + {file = "rpds_py-0.18.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:154bf5c93d79558b44e5b50cc354aa0459e518e83677791e6adb0b039b7aa6a7"}, + {file = "rpds_py-0.18.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:07f2139741e5deb2c5154a7b9629bc5aa48c766b643c1a6750d16f865a82c5fc"}, + {file = "rpds_py-0.18.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8c7672e9fba7425f79019db9945b16e308ed8bc89348c23d955c8c0540da0a07"}, + {file = "rpds_py-0.18.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:489bdfe1abd0406eba6b3bb4fdc87c7fa40f1031de073d0cfb744634cc8fa261"}, + {file = "rpds_py-0.18.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3c20f05e8e3d4fc76875fc9cb8cf24b90a63f5a1b4c5b9273f0e8225e169b100"}, + {file = "rpds_py-0.18.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:967342e045564cef76dfcf1edb700b1e20838d83b1aa02ab313e6a497cf923b8"}, + {file = "rpds_py-0.18.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:2cc7c1a47f3a63282ab0f422d90ddac4aa3034e39fc66a559ab93041e6505da7"}, + {file = "rpds_py-0.18.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:f7afbfee1157e0f9376c00bb232e80a60e59ed716e3211a80cb8506550671e6e"}, + {file = "rpds_py-0.18.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:9e6934d70dc50f9f8ea47081ceafdec09245fd9f6032669c3b45705dea096b88"}, + {file = "rpds_py-0.18.1-cp311-none-win32.whl", hash = "sha256:c69882964516dc143083d3795cb508e806b09fc3800fd0d4cddc1df6c36e76bb"}, + {file = "rpds_py-0.18.1-cp311-none-win_amd64.whl", hash = "sha256:70a838f7754483bcdc830444952fd89645569e7452e3226de4a613a4c1793fb2"}, + {file = "rpds_py-0.18.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:3dd3cd86e1db5aadd334e011eba4e29d37a104b403e8ca24dcd6703c68ca55b3"}, + {file = "rpds_py-0.18.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:05f3d615099bd9b13ecf2fc9cf2d839ad3f20239c678f461c753e93755d629ee"}, + {file = "rpds_py-0.18.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:35b2b771b13eee8729a5049c976197ff58a27a3829c018a04341bcf1ae409b2b"}, + {file = "rpds_py-0.18.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ee17cd26b97d537af8f33635ef38be873073d516fd425e80559f4585a7b90c43"}, + {file = "rpds_py-0.18.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b646bf655b135ccf4522ed43d6902af37d3f5dbcf0da66c769a2b3938b9d8184"}, + {file = "rpds_py-0.18.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:19ba472b9606c36716062c023afa2484d1e4220548751bda14f725a7de17b4f6"}, + {file = "rpds_py-0.18.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e30ac5e329098903262dc5bdd7e2086e0256aa762cc8b744f9e7bf2a427d3f8"}, + {file = "rpds_py-0.18.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d58ad6317d188c43750cb76e9deacf6051d0f884d87dc6518e0280438648a9ac"}, + {file = "rpds_py-0.18.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e1735502458621921cee039c47318cb90b51d532c2766593be6207eec53e5c4c"}, + {file = "rpds_py-0.18.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:f5bab211605d91db0e2995a17b5c6ee5edec1270e46223e513eaa20da20076ac"}, + {file = "rpds_py-0.18.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2fc24a329a717f9e2448f8cd1f960f9dac4e45b6224d60734edeb67499bab03a"}, + {file = "rpds_py-0.18.1-cp312-none-win32.whl", hash = "sha256:1805d5901779662d599d0e2e4159d8a82c0b05faa86ef9222bf974572286b2b6"}, + {file = "rpds_py-0.18.1-cp312-none-win_amd64.whl", hash = "sha256:720edcb916df872d80f80a1cc5ea9058300b97721efda8651efcd938a9c70a72"}, + {file = "rpds_py-0.18.1-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:c827576e2fa017a081346dce87d532a5310241648eb3700af9a571a6e9fc7e74"}, + {file = "rpds_py-0.18.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:aa3679e751408d75a0b4d8d26d6647b6d9326f5e35c00a7ccd82b78ef64f65f8"}, + {file = "rpds_py-0.18.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0abeee75434e2ee2d142d650d1e54ac1f8b01e6e6abdde8ffd6eeac6e9c38e20"}, + {file = "rpds_py-0.18.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ed402d6153c5d519a0faf1bb69898e97fb31613b49da27a84a13935ea9164dfc"}, + {file = "rpds_py-0.18.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:338dee44b0cef8b70fd2ef54b4e09bb1b97fc6c3a58fea5db6cc083fd9fc2724"}, + {file = "rpds_py-0.18.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7750569d9526199c5b97e5a9f8d96a13300950d910cf04a861d96f4273d5b104"}, + {file = "rpds_py-0.18.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:607345bd5912aacc0c5a63d45a1f73fef29e697884f7e861094e443187c02be5"}, + {file = "rpds_py-0.18.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:207c82978115baa1fd8d706d720b4a4d2b0913df1c78c85ba73fe6c5804505f0"}, + {file = "rpds_py-0.18.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:6d1e42d2735d437e7e80bab4d78eb2e459af48c0a46e686ea35f690b93db792d"}, + {file = "rpds_py-0.18.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:5463c47c08630007dc0fe99fb480ea4f34a89712410592380425a9b4e1611d8e"}, + {file = "rpds_py-0.18.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:06d218939e1bf2ca50e6b0ec700ffe755e5216a8230ab3e87c059ebb4ea06afc"}, + {file = "rpds_py-0.18.1-cp38-none-win32.whl", hash = "sha256:312fe69b4fe1ffbe76520a7676b1e5ac06ddf7826d764cc10265c3b53f96dbe9"}, + {file = "rpds_py-0.18.1-cp38-none-win_amd64.whl", hash = "sha256:9437ca26784120a279f3137ee080b0e717012c42921eb07861b412340f85bae2"}, + {file = "rpds_py-0.18.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:19e515b78c3fc1039dd7da0a33c28c3154458f947f4dc198d3c72db2b6b5dc93"}, + {file = "rpds_py-0.18.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a7b28c5b066bca9a4eb4e2f2663012debe680f097979d880657f00e1c30875a0"}, + {file = "rpds_py-0.18.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:673fdbbf668dd958eff750e500495ef3f611e2ecc209464f661bc82e9838991e"}, + {file = "rpds_py-0.18.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d960de62227635d2e61068f42a6cb6aae91a7fe00fca0e3aeed17667c8a34611"}, + {file = "rpds_py-0.18.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:352a88dc7892f1da66b6027af06a2e7e5d53fe05924cc2cfc56495b586a10b72"}, + {file = "rpds_py-0.18.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4e0ee01ad8260184db21468a6e1c37afa0529acc12c3a697ee498d3c2c4dcaf3"}, + {file = "rpds_py-0.18.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4c39ad2f512b4041343ea3c7894339e4ca7839ac38ca83d68a832fc8b3748ab"}, + {file = "rpds_py-0.18.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:aaa71ee43a703c321906813bb252f69524f02aa05bf4eec85f0c41d5d62d0f4c"}, + {file = "rpds_py-0.18.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:6cd8098517c64a85e790657e7b1e509b9fe07487fd358e19431cb120f7d96338"}, + {file = "rpds_py-0.18.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:4adec039b8e2928983f885c53b7cc4cda8965b62b6596501a0308d2703f8af1b"}, + {file = "rpds_py-0.18.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:32b7daaa3e9389db3695964ce8e566e3413b0c43e3394c05e4b243a4cd7bef26"}, + {file = "rpds_py-0.18.1-cp39-none-win32.whl", hash = "sha256:2625f03b105328729f9450c8badda34d5243231eef6535f80064d57035738360"}, + {file = "rpds_py-0.18.1-cp39-none-win_amd64.whl", hash = "sha256:bf18932d0003c8c4d51a39f244231986ab23ee057d235a12b2684ea26a353590"}, + {file = "rpds_py-0.18.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:cbfbea39ba64f5e53ae2915de36f130588bba71245b418060ec3330ebf85678e"}, + {file = "rpds_py-0.18.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:a3d456ff2a6a4d2adcdf3c1c960a36f4fd2fec6e3b4902a42a384d17cf4e7a65"}, + {file = "rpds_py-0.18.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7700936ef9d006b7ef605dc53aa364da2de5a3aa65516a1f3ce73bf82ecfc7ae"}, + {file = "rpds_py-0.18.1-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:51584acc5916212e1bf45edd17f3a6b05fe0cbb40482d25e619f824dccb679de"}, + {file = "rpds_py-0.18.1-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:942695a206a58d2575033ff1e42b12b2aece98d6003c6bc739fbf33d1773b12f"}, + {file = "rpds_py-0.18.1-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b906b5f58892813e5ba5c6056d6a5ad08f358ba49f046d910ad992196ea61397"}, + {file = "rpds_py-0.18.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6f8e3fecca256fefc91bb6765a693d96692459d7d4c644660a9fff32e517843"}, + {file = "rpds_py-0.18.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7732770412bab81c5a9f6d20aeb60ae943a9b36dcd990d876a773526468e7163"}, + {file = "rpds_py-0.18.1-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:bd1105b50ede37461c1d51b9698c4f4be6e13e69a908ab7751e3807985fc0346"}, + {file = "rpds_py-0.18.1-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:618916f5535784960f3ecf8111581f4ad31d347c3de66d02e728de460a46303c"}, + {file = "rpds_py-0.18.1-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:17c6d2155e2423f7e79e3bb18151c686d40db42d8645e7977442170c360194d4"}, + {file = "rpds_py-0.18.1-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:6c4c4c3f878df21faf5fac86eda32671c27889e13570645a9eea0a1abdd50922"}, + {file = "rpds_py-0.18.1-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:fab6ce90574645a0d6c58890e9bcaac8d94dff54fb51c69e5522a7358b80ab64"}, + {file = "rpds_py-0.18.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:531796fb842b53f2695e94dc338929e9f9dbf473b64710c28af5a160b2a8927d"}, + {file = "rpds_py-0.18.1-pp38-pypy38_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:740884bc62a5e2bbb31e584f5d23b32320fd75d79f916f15a788d527a5e83644"}, + {file = "rpds_py-0.18.1-pp38-pypy38_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:998125738de0158f088aef3cb264a34251908dd2e5d9966774fdab7402edfab7"}, + {file = "rpds_py-0.18.1-pp38-pypy38_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e2be6e9dd4111d5b31ba3b74d17da54a8319d8168890fbaea4b9e5c3de630ae5"}, + {file = "rpds_py-0.18.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d0cee71bc618cd93716f3c1bf56653740d2d13ddbd47673efa8bf41435a60daa"}, + {file = "rpds_py-0.18.1-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2c3caec4ec5cd1d18e5dd6ae5194d24ed12785212a90b37f5f7f06b8bedd7139"}, + {file = "rpds_py-0.18.1-pp38-pypy38_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:27bba383e8c5231cd559affe169ca0b96ec78d39909ffd817f28b166d7ddd4d8"}, + {file = "rpds_py-0.18.1-pp38-pypy38_pp73-musllinux_1_2_i686.whl", hash = "sha256:a888e8bdb45916234b99da2d859566f1e8a1d2275a801bb8e4a9644e3c7e7909"}, + {file = "rpds_py-0.18.1-pp38-pypy38_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:6031b25fb1b06327b43d841f33842b383beba399884f8228a6bb3df3088485ff"}, + {file = "rpds_py-0.18.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:48c2faaa8adfacefcbfdb5f2e2e7bdad081e5ace8d182e5f4ade971f128e6bb3"}, + {file = "rpds_py-0.18.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:d85164315bd68c0806768dc6bb0429c6f95c354f87485ee3593c4f6b14def2bd"}, + {file = "rpds_py-0.18.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6afd80f6c79893cfc0574956f78a0add8c76e3696f2d6a15bca2c66c415cf2d4"}, + {file = "rpds_py-0.18.1-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fa242ac1ff583e4ec7771141606aafc92b361cd90a05c30d93e343a0c2d82a89"}, + {file = "rpds_py-0.18.1-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d21be4770ff4e08698e1e8e0bce06edb6ea0626e7c8f560bc08222880aca6a6f"}, + {file = "rpds_py-0.18.1-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5c45a639e93a0c5d4b788b2613bd637468edd62f8f95ebc6fcc303d58ab3f0a8"}, + {file = "rpds_py-0.18.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:910e71711d1055b2768181efa0a17537b2622afeb0424116619817007f8a2b10"}, + {file = "rpds_py-0.18.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b9bb1f182a97880f6078283b3505a707057c42bf55d8fca604f70dedfdc0772a"}, + {file = "rpds_py-0.18.1-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:1d54f74f40b1f7aaa595a02ff42ef38ca654b1469bef7d52867da474243cc633"}, + {file = "rpds_py-0.18.1-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:8d2e182c9ee01135e11e9676e9a62dfad791a7a467738f06726872374a83db49"}, + {file = "rpds_py-0.18.1-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:636a15acc588f70fda1661234761f9ed9ad79ebed3f2125d44be0862708b666e"}, + {file = "rpds_py-0.18.1.tar.gz", hash = "sha256:dc48b479d540770c811fbd1eb9ba2bb66951863e448efec2e2c102625328e92f"}, +] + +[[package]] +name = "six" +version = "1.16.0" +description = "Python 2 and 3 compatibility utilities" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, + {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, +] + +[[package]] +name = "snowballstemmer" +version = "2.2.0" +description = "This package provides 29 stemmers for 28 languages generated from Snowball algorithms." +optional = false +python-versions = "*" +files = [ + {file = "snowballstemmer-2.2.0-py2.py3-none-any.whl", hash = "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a"}, + {file = "snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1"}, +] + +[[package]] +name = "stack-data" +version = "0.6.3" +description = "Extract data from python stack frames and tracebacks for informative displays" +optional = false +python-versions = "*" +files = [ + {file = "stack_data-0.6.3-py3-none-any.whl", hash = "sha256:d5558e0c25a4cb0853cddad3d77da9891a08cb85dd9f9f91b9f8cd66e511e695"}, + {file = "stack_data-0.6.3.tar.gz", hash = "sha256:836a778de4fec4dcd1dcd89ed8abff8a221f58308462e1c4aa2a3cf30148f0b9"}, +] + +[package.dependencies] +asttokens = ">=2.1.0" +executing = ">=1.2.0" +pure-eval = "*" + +[package.extras] +tests = ["cython", "littleutils", "pygments", "pytest", "typeguard"] + +[[package]] +name = "tomli" +version = "2.0.1" +description = "A lil' TOML parser" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, + {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, +] + +[[package]] +name = "tomlkit" +version = "0.12.5" +description = "Style preserving TOML library" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tomlkit-0.12.5-py3-none-any.whl", hash = "sha256:af914f5a9c59ed9d0762c7b64d3b5d5df007448eb9cd2edc8a46b1eafead172f"}, + {file = "tomlkit-0.12.5.tar.gz", hash = "sha256:eef34fba39834d4d6b73c9ba7f3e4d1c417a4e56f89a7e96e090dd0d24b8fb3c"}, +] + +[[package]] +name = "tox" +version = "4.15.1" +description = "tox is a generic virtualenv management and test command line tool" +optional = false +python-versions = ">=3.8" +files = [ + {file = "tox-4.15.1-py3-none-any.whl", hash = "sha256:f00a5dc4222b358e69694e47e3da0227ac41253509bca9f45aa8f012053e8d9d"}, + {file = "tox-4.15.1.tar.gz", hash = "sha256:53a092527d65e873e39213ebd4bd027a64623320b6b0326136384213f95b7076"}, +] + +[package.dependencies] +cachetools = ">=5.3.2" +chardet = ">=5.2" +colorama = ">=0.4.6" +filelock = ">=3.13.1" +packaging = ">=23.2" +platformdirs = ">=4.1" +pluggy = ">=1.3" +pyproject-api = ">=1.6.1" +tomli = {version = ">=2.0.1", markers = "python_version < \"3.11\""} +virtualenv = ">=20.25" + +[package.extras] +docs = ["furo (>=2023.9.10)", "sphinx (>=7.2.6)", "sphinx-argparse-cli (>=1.11.1)", "sphinx-autodoc-typehints (>=1.25.2)", "sphinx-copybutton (>=0.5.2)", "sphinx-inline-tabs (>=2023.4.21)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.11)"] +testing = ["build[virtualenv] (>=1.0.3)", "covdefaults (>=2.3)", "detect-test-pollution (>=1.2)", "devpi-process (>=1)", "diff-cover (>=8.0.2)", "distlib (>=0.3.8)", "flaky (>=3.7)", "hatch-vcs (>=0.4)", "hatchling (>=1.21)", "psutil (>=5.9.7)", "pytest (>=7.4.4)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-xdist (>=3.5)", "re-assert (>=1.1)", "time-machine (>=2.13)", "wheel (>=0.42)"] + +[[package]] +name = "traitlets" +version = "5.14.3" +description = "Traitlets Python configuration system" +optional = false +python-versions = ">=3.8" +files = [ + {file = "traitlets-5.14.3-py3-none-any.whl", hash = "sha256:b74e89e397b1ed28cc831db7aea759ba6640cb3de13090ca145426688ff1ac4f"}, + {file = "traitlets-5.14.3.tar.gz", hash = "sha256:9ed0579d3502c94b4b3732ac120375cda96f923114522847de4b3bb98b96b6b7"}, +] + +[package.extras] +docs = ["myst-parser", "pydata-sphinx-theme", "sphinx"] +test = ["argcomplete (>=3.0.3)", "mypy (>=1.7.0)", "pre-commit", "pytest (>=7.0,<8.2)", "pytest-mock", "pytest-mypy-testing"] + +[[package]] +name = "types-python-dateutil" +version = "2.9.0.20240316" +description = "Typing stubs for python-dateutil" +optional = false +python-versions = ">=3.8" +files = [ + {file = "types-python-dateutil-2.9.0.20240316.tar.gz", hash = "sha256:5d2f2e240b86905e40944dd787db6da9263f0deabef1076ddaed797351ec0202"}, + {file = "types_python_dateutil-2.9.0.20240316-py3-none-any.whl", hash = "sha256:6b8cb66d960771ce5ff974e9dd45e38facb81718cc1e208b10b1baccbfdbee3b"}, +] + +[[package]] +name = "types-pytz" +version = "2024.1.0.20240417" +description = "Typing stubs for pytz" +optional = false +python-versions = ">=3.8" +files = [ + {file = "types-pytz-2024.1.0.20240417.tar.gz", hash = "sha256:6810c8a1f68f21fdf0f4f374a432487c77645a0ac0b31de4bf4690cf21ad3981"}, + {file = "types_pytz-2024.1.0.20240417-py3-none-any.whl", hash = "sha256:8335d443310e2db7b74e007414e74c4f53b67452c0cb0d228ca359ccfba59659"}, +] + +[[package]] +name = "typing-extensions" +version = "4.12.2" +description = "Backported and Experimental Type Hints for Python 3.8+" +optional = false +python-versions = ">=3.8" +files = [ + {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, + {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, +] + +[[package]] +name = "unify" +version = "0.5" +description = "Modifies strings to all use the same (single/double) quote where possible." +optional = false +python-versions = "*" +files = [ + {file = "unify-0.5.tar.gz", hash = "sha256:8ddce812b2457212b7598fe574c9e6eb3ad69710f445391338270c7f8a71723c"}, +] + +[package.dependencies] +untokenize = "*" + +[[package]] +name = "untokenize" +version = "0.1.1" +description = "Transforms tokens into original source code (while preserving whitespace)." +optional = false +python-versions = "*" +files = [ + {file = "untokenize-0.1.1.tar.gz", hash = "sha256:3865dbbbb8efb4bb5eaa72f1be7f3e0be00ea8b7f125c69cbd1f5fda926f37a2"}, +] + +[[package]] +name = "virtualenv" +version = "20.26.3" +description = "Virtual Python Environment builder" +optional = false +python-versions = ">=3.7" +files = [ + {file = "virtualenv-20.26.3-py3-none-any.whl", hash = "sha256:8cc4a31139e796e9a7de2cd5cf2489de1217193116a8fd42328f1bd65f434589"}, + {file = "virtualenv-20.26.3.tar.gz", hash = "sha256:4c43a2a236279d9ea36a0d76f98d84bd6ca94ac4e0f4a3b9d46d05e10fea542a"}, +] + +[package.dependencies] +distlib = ">=0.3.7,<1" +filelock = ">=3.12.2,<4" +platformdirs = ">=3.9.1,<5" + +[package.extras] +docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2,!=7.3)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"] +test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8)", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=68)", "time-machine (>=2.10)"] + +[[package]] +name = "wcwidth" +version = "0.2.13" +description = "Measures the displayed width of unicode strings in a terminal" +optional = false +python-versions = "*" +files = [ + {file = "wcwidth-0.2.13-py2.py3-none-any.whl", hash = "sha256:3da69048e4540d84af32131829ff948f1e022c1c6bdb8d6102117aac784f6859"}, + {file = "wcwidth-0.2.13.tar.gz", hash = "sha256:72ea0c06399eb286d978fdedb6923a9eb47e1c486ce63e9b4e64fc18303972b5"}, +] + +[[package]] +name = "zipp" +version = "3.19.2" +description = "Backport of pathlib-compatible object wrapper for zip files" +optional = false +python-versions = ">=3.8" +files = [ + {file = "zipp-3.19.2-py3-none-any.whl", hash = "sha256:f091755f667055f2d02b32c53771a7a6c8b47e1fdbc4b72a8b9072b3eef8015c"}, + {file = "zipp-3.19.2.tar.gz", hash = "sha256:bf1dcf6450f873a13e952a29504887c89e6de7506209e5b1bcc3460135d4de19"}, +] + +[package.extras] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy", "pytest-ruff (>=0.2.1)"] + +[metadata] +lock-version = "2.0" +python-versions = "^3.8" +content-hash = "327bb1f967c37221f4326095ee6323715036ecda14e68d3cccb39833f1eee81c" diff --git a/pyproject.toml b/pyproject.toml index 96df883..f81c256 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,3 +1,56 @@ +[tool.poetry] +name = "pipelinewise-singer-python" +version = "4.0.0" +description = "Singer.io utility library - PipelineWise compatible" +authors = [] +license = "Apache 2.0" +readme = "README.md" +homepage = "https://github.com/s7clarke10/pipelinewise-singer-python" +repository = "https://github.com/s7clarke10/pipelinewise-singer-python" +keywords = ["singer", "meltano", "pipelinewise", "framework"] +classifiers = [ + "License :: OSI Approved :: Apache Software License", + # Specify the Python versions you support here. + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Development Status :: 5 - Production/Stable", "Topic :: Singer" +] +packages = [ + { include = "singer" } +] +include = [ + { path = "singer/logging.conf", format = ["sdist", "wheel"] } +] + + +[tool.poetry.dependencies] +python = "^3.8" +pytz = "^2018.4" +jsonschema = "^4.19.2" +msgspec = "^0.18.0" +python-dateutil = "^2.7.3" +backoff = "2.2.1" +ciso8601 = "^2.3.1" + +[tool.poetry.dev-dependencies] +types-pytz = "^2024.1.0.20240417" +types-python-dateutil = "^2.9.0.20240316" +pylint = "3.2.5" +pytest = "7.*, <9.*" +coverage = ">= 6.3, < 8.0" +ipython = "^8.12.1" +ipdb = "^0.13.13" +unify = "^0.5" +tox = "^4.15.1" +flake8 = { version = "^7.1.0", python = ">=3.8.1" } +black = "^24.4.2" +pydocstyle = "^6.3.0" +mypy = "^1.10.1" +isort = "^5.13.2" + [tool.black] line-length = 120 @@ -24,3 +77,10 @@ add_imports = [ [tool.pytest.ini_options] addopts = "-v --doctest-modules" + +[build-system] +requires = ["poetry-core>=1.0.0"] +build-backend = "poetry.core.masonry.api" + +[tool.poetry.scripts] +pipelinewise-singer-python = "pipelinewise_singer_python.__init__:main" \ No newline at end of file diff --git a/setup.py b/setup.py deleted file mode 100644 index 857d32b..0000000 --- a/setup.py +++ /dev/null @@ -1,45 +0,0 @@ -#!/usr/bin/env python - -from setuptools import setup - -with open("README.md", "r") as fh: - long_description = fh.read() - -setup(name="pipelinewise-singer-python", - version='3.0.0', - description="Singer.io utility library - PipelineWise compatible", - python_requires=">=3.8.0, <3.13", - long_description=long_description, - long_description_content_type="text/markdown", - author="TransferWise", - classifiers=[ - 'License :: OSI Approved :: Apache Software License', - 'Programming Language :: Python :: 3 :: Only' - ], - url="https://github.com/transferwise/pipelinewise-singer-python", - install_requires=[ - 'pytz>=2018.4', - 'jsonschema>=4.19.2,==4.*', - 'msgspec>=0.18.0', - 'python-dateutil>2.7.3,==2.*', - 'backoff>=2.2.1,==2.*', - 'ciso8601>=2.3.1,==2.*', - ], - extras_require={ - 'dev': [ - 'pylint==3.2.5', - 'pytest>=7,<9', - 'coverage[toml]>=6.3,<8.0', - 'ipython', - 'ipdb', - 'unify==0.5' - ] - }, - packages=['singer'], - package_data={ - 'singer': [ - 'logging.conf' - ] - }, - include_package_data=True - ) diff --git a/singer/__init__.py b/singer/__init__.py index 5b72276..bfa3322 100644 --- a/singer/__init__.py +++ b/singer/__init__.py @@ -1,71 +1,65 @@ +from __future__ import annotations + from singer import utils -from singer.utils import ( - chunk, - load_json, - parse_args, - ratelimit, - strftime, - strptime, - update_state, - should_sync_field, +from singer.bookmarks import ( + clear_bookmark, + clear_offset, + get_bookmark, + get_currently_syncing, + get_offset, + reset_stream, + set_currently_syncing, + set_offset, + write_bookmark, ) - +from singer.catalog import Catalog, CatalogEntry from singer.logger import get_logger - -from singer.metrics import ( - Counter, - Timer, - http_request_timer, - job_timer, - record_counter, -) - from singer.messages import ( ActivateVersionMessage, + BatchMessage, Message, RecordMessage, SchemaMessage, StateMessage, - BatchMessage, format_message, parse_message, + write_batch, write_message, write_record, write_records, write_schema, write_state, write_version, - write_batch ) - +from singer.metrics import ( + Counter, + Timer, + http_request_timer, + job_timer, + record_counter, +) +from singer.schema import Schema from singer.transform import ( NO_INTEGER_DATETIME_PARSING, - UNIX_SECONDS_INTEGER_DATETIME_PARSING, UNIX_MILLISECONDS_INTEGER_DATETIME_PARSING, + UNIX_SECONDS_INTEGER_DATETIME_PARSING, Transformer, - transform, _transform_datetime, - resolve_schema_references -) - -from singer.catalog import ( - Catalog, - CatalogEntry + resolve_schema_references, + transform, ) -from singer.schema import Schema - -from singer.bookmarks import ( - write_bookmark, - get_bookmark, - clear_bookmark, - reset_stream, - set_offset, - clear_offset, - get_offset, - set_currently_syncing, - get_currently_syncing, +from singer.utils import ( + chunk, + load_json, + parse_args, + ratelimit, + should_sync_field, + strftime, + strptime, + update_state, ) -if __name__ == '__main__': +if __name__ == "__main__": import doctest + doctest.testmod() diff --git a/singer/bookmarks.py b/singer/bookmarks.py index 53b54ca..0419a6e 100644 --- a/singer/bookmarks.py +++ b/singer/bookmarks.py @@ -1,3 +1,6 @@ +from __future__ import annotations + + def ensure_bookmark_path(state, path): submap = state for path_component in path: @@ -7,40 +10,53 @@ def ensure_bookmark_path(state, path): submap = submap[path_component] return state + def write_bookmark(state, tap_stream_id, key, val): - state = ensure_bookmark_path(state, ['bookmarks', tap_stream_id]) - state['bookmarks'][tap_stream_id][key] = val + state = ensure_bookmark_path(state, ["bookmarks", tap_stream_id]) + state["bookmarks"][tap_stream_id][key] = val return state + def clear_bookmark(state, tap_stream_id, key): - state = ensure_bookmark_path(state, ['bookmarks', tap_stream_id]) - state['bookmarks'][tap_stream_id].pop(key, None) + state = ensure_bookmark_path(state, ["bookmarks", tap_stream_id]) + state["bookmarks"][tap_stream_id].pop(key, None) return state + def reset_stream(state, tap_stream_id): - state = ensure_bookmark_path(state, ['bookmarks', tap_stream_id]) - state['bookmarks'][tap_stream_id] = {} + state = ensure_bookmark_path(state, ["bookmarks", tap_stream_id]) + state["bookmarks"][tap_stream_id] = {} return state + def get_bookmark(state, tap_stream_id, key, default=None): - return state.get('bookmarks', {}).get(tap_stream_id, {}).get(key, default) + return state.get("bookmarks", {}).get(tap_stream_id, {}).get(key, default) + def set_offset(state, tap_stream_id, offset_key, offset_value): - state = ensure_bookmark_path(state, ['bookmarks', tap_stream_id, 'offset', offset_key]) - state['bookmarks'][tap_stream_id]['offset'][offset_key] = offset_value + state = ensure_bookmark_path( + state, ["bookmarks", tap_stream_id, "offset", offset_key] + ) + state["bookmarks"][tap_stream_id]["offset"][offset_key] = offset_value return state + def clear_offset(state, tap_stream_id): - state = ensure_bookmark_path(state, ['bookmarks', tap_stream_id, 'offset']) - state['bookmarks'][tap_stream_id]['offset'] = {} + state = ensure_bookmark_path(state, ["bookmarks", tap_stream_id, "offset"]) + state["bookmarks"][tap_stream_id]["offset"] = {} return state + def get_offset(state, tap_stream_id, default=None): - return state.get('bookmarks', {}).get(tap_stream_id, {}).get('offset', default) + return ( + state.get("bookmarks", {}).get(tap_stream_id, {}).get("offset", default) + ) + def set_currently_syncing(state, tap_stream_id): - state['currently_syncing'] = tap_stream_id + state["currently_syncing"] = tap_stream_id return state + def get_currently_syncing(state, default=None): - return state.get('currently_syncing', default) + return state.get("currently_syncing", default) diff --git a/singer/catalog.py b/singer/catalog.py index afaa6ad..3d6fbb3 100644 --- a/singer/catalog.py +++ b/singer/catalog.py @@ -1,7 +1,11 @@ -'''Provides an object model for a Singer Catalog.''' -import msgspec +"""Provides an object model for a Singer Catalog.""" + +from __future__ import annotations + import sys +import msgspec + from . import metadata as metadata_module from .bookmarks import get_currently_syncing from .logger import get_logger @@ -13,19 +17,31 @@ def write_catalog(catalog): # If the catalog has no streams, log a warning if not catalog.streams: - LOGGER.warning('Catalog being written with no streams.') + LOGGER.warning("Catalog being written with no streams.") catalog_json = msgspec.json.format(msgspec.json.encode(catalog.to_dict())) sys.stdout.buffer.write(catalog_json) sys.stdout.buffer.flush() -# pylint: disable=too-many-instance-attributes -class CatalogEntry(): - def __init__(self, tap_stream_id=None, stream=None, - key_properties=None, schema=None, replication_key=None, - is_view=None, database=None, table=None, row_count=None, - stream_alias=None, metadata=None, replication_method=None): +# pylint: disable=too-many-instance-attributes +class CatalogEntry: + + def __init__( + self, + tap_stream_id=None, + stream=None, + key_properties=None, + schema=None, + replication_key=None, + is_view=None, + database=None, + table=None, + row_count=None, + stream_alias=None, + metadata=None, + replication_method=None, + ): self.tap_stream_id = tap_stream_id self.stream = stream @@ -49,39 +65,41 @@ def __eq__(self, other): def is_selected(self): mdata = metadata_module.to_map(self.metadata) # pylint: disable=no-member - return self.schema.selected or metadata_module.get(mdata, (), 'selected') + return self.schema.selected or metadata_module.get( + mdata, (), "selected" + ) def to_dict(self): result = {} if self.tap_stream_id: - result['tap_stream_id'] = self.tap_stream_id + result["tap_stream_id"] = self.tap_stream_id if self.database: - result['database_name'] = self.database + result["database_name"] = self.database if self.table: - result['table_name'] = self.table + result["table_name"] = self.table if self.replication_key is not None: - result['replication_key'] = self.replication_key + result["replication_key"] = self.replication_key if self.replication_method is not None: - result['replication_method'] = self.replication_method + result["replication_method"] = self.replication_method if self.key_properties is not None: - result['key_properties'] = self.key_properties + result["key_properties"] = self.key_properties if self.schema is not None: schema = self.schema.to_dict() # pylint: disable=no-member - result['schema'] = schema + result["schema"] = schema if self.is_view is not None: - result['is_view'] = self.is_view + result["is_view"] = self.is_view if self.stream is not None: - result['stream'] = self.stream + result["stream"] = self.stream if self.row_count is not None: - result['row_count'] = self.row_count + result["row_count"] = self.row_count if self.stream_alias is not None: - result['stream_alias'] = self.stream_alias + result["stream_alias"] = self.stream_alias if self.metadata is not None: - result['metadata'] = self.metadata + result["metadata"] = self.metadata return result -class Catalog(): +class Catalog: def __init__(self, streams): self.streams = streams @@ -94,7 +112,9 @@ def __eq__(self, other): @classmethod def load(cls, filename): - with open(filename, encoding='utf-8') as fp: # pylint: disable=invalid-name + with open( + filename, encoding="utf-8" + ) as fp: # pylint: disable=invalid-name return Catalog.from_dict(msgspec.json.decode(fp.read())) @classmethod @@ -105,24 +125,24 @@ def from_dict(cls, data): # change, since callers typically access the streams property # directly. streams = [] - for stream in data['streams']: + for stream in data["streams"]: entry = CatalogEntry() - entry.tap_stream_id = stream.get('tap_stream_id') - entry.stream = stream.get('stream') - entry.replication_key = stream.get('replication_key') - entry.key_properties = stream.get('key_properties') - entry.database = stream.get('database_name') - entry.table = stream.get('table_name') - entry.schema = Schema.from_dict(stream.get('schema')) - entry.is_view = stream.get('is_view') - entry.stream_alias = stream.get('stream_alias') - entry.metadata = stream.get('metadata') - entry.replication_method = stream.get('replication_method') + entry.tap_stream_id = stream.get("tap_stream_id") + entry.stream = stream.get("stream") + entry.replication_key = stream.get("replication_key") + entry.key_properties = stream.get("key_properties") + entry.database = stream.get("database_name") + entry.table = stream.get("table_name") + entry.schema = Schema.from_dict(stream.get("schema")) + entry.is_view = stream.get("is_view") + entry.stream_alias = stream.get("stream_alias") + entry.metadata = stream.get("metadata") + entry.replication_method = stream.get("replication_method") streams.append(entry) return Catalog(streams) def to_dict(self): - return {'streams': [stream.to_dict() for stream in self.streams]} + return {"streams": [stream.to_dict() for stream in self.streams]} def dump(self): write_catalog(self) @@ -148,11 +168,10 @@ def _shuffle_streams(self, state): bottom_half = self.streams[:matching_index] return top_half + bottom_half - def get_selected_streams(self, state): for stream in self._shuffle_streams(state): if not stream.is_selected(): - LOGGER.info('Skipping stream: %s', stream.tap_stream_id) + LOGGER.info("Skipping stream: %s", stream.tap_stream_id) continue yield stream diff --git a/singer/logger.py b/singer/logger.py index 2453eb9..d97305c 100644 --- a/singer/logger.py +++ b/singer/logger.py @@ -1,18 +1,20 @@ +from __future__ import annotations + import logging import logging.config import os -def get_logger(name='singer'): +def get_logger(name="singer"): """Return a Logger instance to use in singer.""" # Use custom logging config provided by environment variable - if 'LOGGING_CONF_FILE' in os.environ and os.environ['LOGGING_CONF_FILE']: - path = os.environ['LOGGING_CONF_FILE'] + if "LOGGING_CONF_FILE" in os.environ and os.environ["LOGGING_CONF_FILE"]: + path = os.environ["LOGGING_CONF_FILE"] logging.config.fileConfig(path, disable_existing_loggers=False) # Use the default logging conf that meets the singer specs criteria else: this_dir, _ = os.path.split(__file__) - path = os.path.join(this_dir, 'logging.conf') + path = os.path.join(this_dir, "logging.conf") logging.config.fileConfig(path, disable_existing_loggers=False) return logging.getLogger(name) diff --git a/singer/messages.py b/singer/messages.py index 0be534c..3bb0110 100644 --- a/singer/messages.py +++ b/singer/messages.py @@ -1,12 +1,16 @@ +from __future__ import annotations + +import decimal import sys -import pytz -import msgspec import ciso8601 -import decimal +import msgspec +import pytz import singer.utils as u + from .logger import get_logger + LOGGER = get_logger() # A Global variable to hold the msgspec encoder. @@ -17,26 +21,27 @@ # https://jcristharif.com/msgspec/perf-tips.html msg_buffer = bytearray() -class Message(): - '''Base class for messages.''' + +class Message: + """Base class for messages.""" def asdict(self): # pylint: disable=no-self-use - raise Exception('Not implemented') + raise Exception("Not implemented") def __eq__(self, other): return isinstance(other, Message) and self.asdict() == other.asdict() def __repr__(self): - pairs = [f'{k}={v}' for k, v in self.asdict().items()] - attrstr = ', '.join(pairs) - return f'{self.__class__.__name__}({attrstr})' + pairs = [f"{k}={v}" for k, v in self.asdict().items()] + attrstr = ", ".join(pairs) + return f"{self.__class__.__name__}({attrstr})" def __str__(self): return str(self.asdict()) class RecordMessage(Message): - '''RECORD message. + """RECORD message. The RECORD message has these fields: @@ -50,7 +55,7 @@ class RecordMessage(Message): stream='users', record={'id': 1, 'name': 'Mary'}) - ''' + """ def __init__(self, stream, record, version=None, time_extracted=None): self.stream = stream @@ -58,20 +63,22 @@ def __init__(self, stream, record, version=None, time_extracted=None): self.version = version self.time_extracted = time_extracted if time_extracted and not time_extracted.tzinfo: - raise ValueError("'time_extracted' must be either None " + - 'or an aware datetime (with a time zone)') + raise ValueError( + "'time_extracted' must be either None " + + "or an aware datetime (with a time zone)" + ) def asdict(self): result = { - 'type': 'RECORD', - 'stream': self.stream, - 'record': self.record, + "type": "RECORD", + "stream": self.stream, + "record": self.record, } if self.version is not None: - result['version'] = self.version + result["version"] = self.version if self.time_extracted: as_utc = self.time_extracted.astimezone(pytz.utc) - result['time_extracted'] = u.strftime(as_utc) + result["time_extracted"] = u.strftime(as_utc) return result def __str__(self): @@ -79,7 +86,7 @@ def __str__(self): class SchemaMessage(Message): - '''SCHEMA message. + """SCHEMA message. The SCHEMA message has these fields: @@ -97,8 +104,11 @@ class SchemaMessage(Message): }, key_properties=['id']) - ''' - def __init__(self, stream, schema, key_properties, bookmark_properties=None): + """ + + def __init__( + self, stream, schema, key_properties, bookmark_properties=None + ): self.stream = stream self.schema = schema self.key_properties = key_properties @@ -106,24 +116,26 @@ def __init__(self, stream, schema, key_properties, bookmark_properties=None): if isinstance(bookmark_properties, (str, bytes)): bookmark_properties = [bookmark_properties] if bookmark_properties and not isinstance(bookmark_properties, list): - raise Exception('bookmark_properties must be a string or list of strings') + raise Exception( + "bookmark_properties must be a string or list of strings" + ) self.bookmark_properties = bookmark_properties def asdict(self): result = { - 'type': 'SCHEMA', - 'stream': self.stream, - 'schema': self.schema, - 'key_properties': self.key_properties + "type": "SCHEMA", + "stream": self.stream, + "schema": self.schema, + "key_properties": self.key_properties, } if self.bookmark_properties: - result['bookmark_properties'] = self.bookmark_properties + result["bookmark_properties"] = self.bookmark_properties return result class StateMessage(Message): - '''STATE message. + """STATE message. The STATE message has one field: @@ -132,19 +144,17 @@ class StateMessage(Message): msg = singer.StateMessage( value={'users': '2017-06-19T00:00:00'}) - ''' + """ + def __init__(self, value): self.value = value def asdict(self): - return { - 'type': 'STATE', - 'value': self.value - } + return {"type": "STATE", "value": self.value} class ActivateVersionMessage(Message): - '''ACTIVATE_VERSION message (EXPERIMENTAL). + """ACTIVATE_VERSION message (EXPERIMENTAL). The ACTIVATE_VERSION messages has these fields: @@ -163,37 +173,40 @@ class ActivateVersionMessage(Message): stream='users', version=2) - ''' + """ + def __init__(self, stream, version): self.stream = stream self.version = version def asdict(self): return { - 'type': 'ACTIVATE_VERSION', - 'stream': self.stream, - 'version': self.version + "type": "ACTIVATE_VERSION", + "stream": self.stream, + "version": self.version, } class BatchMessage(Message): - """ BATCH message (EXPERIMENTAL). + """BATCH message (EXPERIMENTAL). The BATCH message has these fields: * stream (string) - The name of the stream. * filepath (string) - The location of a batch file. e.g. '/tmp/users001.jsonl'. * format (string, optional) - An indication of serialization format. - If none is provided, 'jsonl' will be assumed. e.g. 'csv'. - * compression (string, optional) - An indication of file compression format. e.g. 'gzip'. + If none is provided, 'jsonl' will be assumed. e.g. 'csv'. + * compression (string, optional) - An indication of file compression format. + e.g. 'gzip'. * batch_size (int, optional) - Number of records in this batch. e.g. 100000. - * time_extracted (datetime, optional) - TZ-aware datetime with batch extraction time. + * time_extracted (datetime, optional) - TZ-aware datetime with batch + extraction time. If file_properties are not provided, uncompressed jsonl files are assumed. - A BATCH record points to a collection of messages (from a single stream) serialized to disk, - and is implemented for performance reasons. Most Taps and Targets should not need to use - BATCH messages at all. + A BATCH record points to a collection of messages (from a single stream) + serialized to disk, and is implemented for performance reasons. Most Taps + and Targets should not need to use BATCH messages at all. msg = singer.BatchMessage( stream='users', @@ -203,33 +216,40 @@ class BatchMessage(Message): """ def __init__( - self, stream, filepath, file_format=None, compression=None, - batch_size=None, time_extracted=None + self, + stream, + filepath, + file_format=None, + compression=None, + batch_size=None, + time_extracted=None, ): self.stream = stream self.filepath = filepath - self.format = file_format or 'jsonl' + self.format = file_format or "jsonl" self.compression = compression self.batch_size = batch_size self.time_extracted = time_extracted if time_extracted and not time_extracted.tzinfo: - raise ValueError("'time_extracted' must be either None " + - 'or an aware datetime (with a time zone)') + raise ValueError( + "'time_extracted' must be either None " + + "or an aware datetime (with a time zone)" + ) def asdict(self): result = { - 'type': 'BATCH', - 'stream': self.stream, - 'filepath': self.filepath, - 'format': self.format + "type": "BATCH", + "stream": self.stream, + "filepath": self.filepath, + "format": self.format, } if self.compression is not None: - result['compression'] = self.compression + result["compression"] = self.compression if self.batch_size is not None: - result['batch_size'] = self.batch_size + result["batch_size"] = self.batch_size if self.time_extracted: as_utc = self.time_extracted.astimezone(pytz.utc) - result['time_extracted'] = u.strftime(as_utc) + result["time_extracted"] = u.strftime(as_utc) return result @@ -250,53 +270,62 @@ def parse_message(msg): # leave conversion as is for now. dec = msgspec.json.Decoder(float_hook=decimal.Decimal) obj = dec.decode(msg) - msg_type = _required_key(obj, 'type') + msg_type = _required_key(obj, "type") - if msg_type == 'RECORD': - time_extracted = obj.get('time_extracted') + if msg_type == "RECORD": + time_extracted = obj.get("time_extracted") if time_extracted: try: time_extracted = ciso8601.parse_datetime(time_extracted) except Exception: - LOGGER.warning('unable to parse time_extracted with ciso8601 library') + LOGGER.warning( + "unable to parse time_extracted with ciso8601 library" + ) time_extracted = None - # time_extracted = dateutil.parser.parse(time_extracted) - return RecordMessage(stream=_required_key(obj, 'stream'), - record=_required_key(obj, 'record'), - version=obj.get('version'), - time_extracted=time_extracted) - - if msg_type == 'SCHEMA': - return SchemaMessage(stream=_required_key(obj, 'stream'), - schema=_required_key(obj, 'schema'), - key_properties=_required_key(obj, 'key_properties'), - bookmark_properties=obj.get('bookmark_properties')) - - if msg_type == 'STATE': - return StateMessage(value=_required_key(obj, 'value')) - - if msg_type == 'ACTIVATE_VERSION': - return ActivateVersionMessage(stream=_required_key(obj, 'stream'), - version=_required_key(obj, 'version')) - - if msg_type == 'BATCH': - time_extracted = obj.get('time_extracted') + return RecordMessage( + stream=_required_key(obj, "stream"), + record=_required_key(obj, "record"), + version=obj.get("version"), + time_extracted=time_extracted, + ) + + if msg_type == "SCHEMA": + return SchemaMessage( + stream=_required_key(obj, "stream"), + schema=_required_key(obj, "schema"), + key_properties=_required_key(obj, "key_properties"), + bookmark_properties=obj.get("bookmark_properties"), + ) + + if msg_type == "STATE": + return StateMessage(value=_required_key(obj, "value")) + + if msg_type == "ACTIVATE_VERSION": + return ActivateVersionMessage( + stream=_required_key(obj, "stream"), + version=_required_key(obj, "version"), + ) + + if msg_type == "BATCH": + time_extracted = obj.get("time_extracted") if time_extracted: try: time_extracted = ciso8601.parse_datetime(time_extracted) except Exception: - LOGGER.warning('Unable to parse time_extracted with ciso8601 library') + LOGGER.warning( + "Unable to parse time_extracted with ciso8601 library" + ) time_extracted = None return BatchMessage( - stream=_required_key(obj, 'stream'), - filepath=_required_key(obj, 'filepath'), - file_format=_required_key(obj, 'format'), - compression=obj.get('compression'), - batch_size=obj.get('batch_size'), - time_extracted=time_extracted + stream=_required_key(obj, "stream"), + filepath=_required_key(obj, "filepath"), + file_format=_required_key(obj, "format"), + compression=obj.get("compression"), + batch_size=obj.get("batch_size"), + time_extracted=time_extracted, ) return None @@ -319,14 +348,14 @@ def format_message(message, option=0): if not ENCODER: set_msgspec_encoder() - if option==0: + if option == 0: return ENCODER.encode(message.asdict()) - if option==1: + if option == 1: ENCODER.encode_into(message.asdict(), msg_buffer) msg_buffer.extend(b"\n") return msg_buffer - raise Exception('Not implemented: 0=Standard, 1=Message with newline') + raise Exception("Not implemented: 0=Standard, 1=Message with newline") def set_msgspec_encoder(): @@ -350,7 +379,7 @@ def set_msgspec_encoder(): if use_singer_decimal: ENCODER = msgspec.json.Encoder() LOGGER.info( - 'Singer Decimal Enabled! Floats and Decimals will be output as strings' + "Singer Decimal Enabled! Floats and Decimals will be output as strings" ) else: ENCODER = msgspec.json.Encoder(decimal_format="number") @@ -376,9 +405,13 @@ def write_record(stream_name, record, stream_alias=None, time_extracted=None): write_record("users", {"id": 2, "email": "mike@stitchdata.com"}) """ - write_message(RecordMessage(stream=(stream_alias or stream_name), - record=record, - time_extracted=time_extracted)) + write_message( + RecordMessage( + stream=(stream_alias or stream_name), + record=record, + time_extracted=time_extracted, + ) + ) def write_records(stream_name, records): @@ -392,25 +425,42 @@ def write_records(stream_name, records): write_record(stream_name, record) -def write_schema(stream_name, schema, key_properties, bookmark_properties=None, stream_alias=None): +def write_schema( + stream_name, + schema, + key_properties, + bookmark_properties=None, + stream_alias=None, +): """Write a schema message. stream = 'test' - schema = {'properties': {'id': {'type': 'integer'}, 'email': {'type': 'string'}}} # nopep8 + schema = { + "properties": { + "id": { + "type": "integer" + }, + "email": { + "type": "string" + } + } + } key_properties = ['id'] write_schema(stream, schema, key_properties) """ if isinstance(key_properties, (str, bytes)): key_properties = [key_properties] if not isinstance(key_properties, list): - raise Exception('key_properties must be a string or list of strings') + raise Exception("key_properties must be a string or list of strings") write_message( SchemaMessage( stream=(stream_alias or stream_name), schema=schema, key_properties=key_properties, - bookmark_properties=bookmark_properties)) + bookmark_properties=bookmark_properties, + ) + ) def write_state(value): @@ -430,9 +480,14 @@ def write_version(stream_name, version): """ write_message(ActivateVersionMessage(stream_name, version)) + def write_batch( - stream_name, filepath, file_format=None, - compression=None, batch_size=None, time_extracted=None + stream_name, + filepath, + file_format=None, + compression=None, + batch_size=None, + time_extracted=None, ): """Write a batch message. @@ -449,6 +504,6 @@ def write_batch( file_format=file_format, compression=compression, batch_size=batch_size, - time_extracted=time_extracted + time_extracted=time_extracted, ) ) diff --git a/singer/metadata.py b/singer/metadata.py index 41153ea..f000323 100644 --- a/singer/metadata.py +++ b/singer/metadata.py @@ -1,15 +1,24 @@ +from __future__ import annotations + + def new(): return {} + def to_map(raw_metadata): - return {tuple(md['breadcrumb']): md['metadata'] for md in raw_metadata} + return {tuple(md["breadcrumb"]): md["metadata"] for md in raw_metadata} + def to_list(compiled_metadata): - return [{'breadcrumb': k, 'metadata': v} for k, v in compiled_metadata.items()] + return [ + {"breadcrumb": k, "metadata": v} for k, v in compiled_metadata.items() + ] + def delete(compiled_metadata, breadcrumb, k): del compiled_metadata[breadcrumb][k] + def write(compiled_metadata, breadcrumb, k, val): if val is None: raise Exception() @@ -19,28 +28,43 @@ def write(compiled_metadata, breadcrumb, k, val): compiled_metadata[breadcrumb] = {k: val} return compiled_metadata + def get(compiled_metadata, breadcrumb, k): return compiled_metadata.get(breadcrumb, {}).get(k) -def get_standard_metadata(schema=None, schema_name=None, key_properties=None, - valid_replication_keys=None, replication_method=None): + +def get_standard_metadata( + schema=None, + schema_name=None, + key_properties=None, + valid_replication_keys=None, + replication_method=None, +): mdata = {} if key_properties is not None: - mdata = write(mdata, (), 'table-key-properties', key_properties) + mdata = write(mdata, (), "table-key-properties", key_properties) if replication_method: - mdata = write(mdata, (), 'forced-replication-method', replication_method) + mdata = write( + mdata, (), "forced-replication-method", replication_method + ) if valid_replication_keys is not None: - mdata = write(mdata, (), 'valid-replication-keys', valid_replication_keys) + mdata = write( + mdata, (), "valid-replication-keys", valid_replication_keys + ) if schema: - mdata = write(mdata, (), 'inclusion', 'available') + mdata = write(mdata, (), "inclusion", "available") if schema_name: - mdata = write(mdata, (), 'schema-name', schema_name) - for field_name in schema['properties'].keys(): + mdata = write(mdata, (), "schema-name", schema_name) + for field_name in schema["properties"].keys(): if key_properties and field_name in key_properties: - mdata = write(mdata, ('properties', field_name), 'inclusion', 'automatic') + mdata = write( + mdata, ("properties", field_name), "inclusion", "automatic" + ) else: - mdata = write(mdata, ('properties', field_name), 'inclusion', 'available') + mdata = write( + mdata, ("properties", field_name), "inclusion", "available" + ) return to_list(mdata) diff --git a/singer/metrics.py b/singer/metrics.py index 785b29e..2c3bfb8 100644 --- a/singer/metrics.py +++ b/singer/metrics.py @@ -1,4 +1,4 @@ -'''Utilities for logging and parsing metrics. +"""Utilities for logging and parsing metrics. A Tap should use this library to log structured messages about the read operations it makes. @@ -38,57 +38,61 @@ * job_timer - Emits a 'job_duration' metric to track time of asynchronous jobs. Provides "job_type" tag. -''' +""" + +from __future__ import annotations -import msgspec import re import time from collections import namedtuple + +import msgspec + from singer.logger import get_logger DEFAULT_LOG_INTERVAL = 60 class Status: - '''Constants for status codes''' - succeeded = 'succeeded' - failed = 'failed' + """Constants for status codes""" + + succeeded = "succeeded" + failed = "failed" class Metric: - '''Constants for metric names''' + """Constants for metric names""" - record_count = 'record_count' - job_duration = 'job_duration' - http_request_duration = 'http_request_duration' + record_count = "record_count" + job_duration = "job_duration" + http_request_duration = "http_request_duration" class Tag: - '''Constants for commonly used tags''' + """Constants for commonly used tags""" - endpoint = 'endpoint' - job_type = 'job_type' - http_status_code = 'http_status_code' - status = 'status' + endpoint = "endpoint" + job_type = "job_type" + http_status_code = "http_status_code" + status = "status" - -Point = namedtuple('Point', ['metric_type', 'metric', 'value', 'tags']) +Point = namedtuple("Point", ["metric_type", "metric", "value", "tags"]) def log(logger, point): - '''Log a single data point.''' + """Log a single data point.""" result = { - 'type': point.metric_type, - 'metric': point.metric, - 'value': point.value, - 'tags': point.tags + "type": point.metric_type, + "metric": point.metric, + "value": point.value, + "tags": point.tags, } - logger.info('METRIC: %s', msgspec.json.encode(result)) + logger.info("METRIC: %s", msgspec.json.encode(result)) -class Counter(): - '''Increments a counter metric. +class Counter: + """Increments a counter metric. When you use Counter as a context manager, it will automatically emit points for a "counter" metric periodically and also when the context @@ -111,7 +115,7 @@ class Counter(): } } - ''' + """ def __init__(self, metric, tags=None, log_interval=DEFAULT_LOG_INTERVAL): self.metric = metric @@ -126,13 +130,13 @@ def __enter__(self): return self def increment(self, amount=1): - '''Increments value by the specified amount.''' + """Increments value by the specified amount.""" self.value += amount if self._ready_to_log(): self._pop() def _pop(self): - log(self.logger, Point('counter', self.metric, self.value, self.tags)) + log(self.logger, Point("counter", self.metric, self.value, self.tags)) self.value = 0 self.last_log_time = time.time() @@ -143,8 +147,8 @@ def _ready_to_log(self): return time.time() - self.last_log_time > self.log_interval -class Timer(): # pylint: disable=too-few-public-methods - '''Produces metrics about the duration of operations. +class Timer: # pylint: disable=too-few-public-methods + """Produces metrics about the duration of operations. You use a Timer as a context manager wrapping around some operation. When the context exits, the Timer emits a metric that indicates how @@ -169,7 +173,8 @@ class Timer(): # pylint: disable=too-few-public-methods } }, - ''' + """ + def __init__(self, metric, tags): self.metric = metric self.tags = tags if tags else {} @@ -181,7 +186,7 @@ def __enter__(self): return self def elapsed(self): - '''Return elapsed time''' + """Return elapsed time""" return time.time() - self.start_time def __exit__(self, exc_type, exc_value, traceback): @@ -190,17 +195,17 @@ def __exit__(self, exc_type, exc_value, traceback): self.tags[Tag.status] = Status.succeeded else: self.tags[Tag.status] = Status.failed - log(self.logger, Point('timer', self.metric, self.elapsed(), self.tags)) + log(self.logger, Point("timer", self.metric, self.elapsed(), self.tags)) def record_counter(endpoint=None, log_interval=DEFAULT_LOG_INTERVAL): - '''Use for counting records retrieved from the source. + """Use for counting records retrieved from the source. with singer.metrics.record_counter(endpoint="users") as counter: for record in my_records: # Do something with the record counter.increment() - ''' + """ tags = {} if endpoint: tags[Tag.endpoint] = endpoint @@ -208,11 +213,11 @@ def record_counter(endpoint=None, log_interval=DEFAULT_LOG_INTERVAL): def http_request_timer(endpoint): - '''Use for timing HTTP requests to an endpoint + """Use for timing HTTP requests to an endpoint with singer.metrics.http_request_timer("users") as timer: # Make a request - ''' + """ tags = {} if endpoint: tags[Tag.endpoint] = endpoint @@ -220,11 +225,11 @@ def http_request_timer(endpoint): def job_timer(job_type=None): - '''Use for timing asynchronous jobs + """Use for timing asynchronous jobs with singer.metrics.job_timer(job_type="users") as timer: # Make a request - ''' + """ tags = {} if job_type: tags[Tag.job_type] = job_type @@ -232,17 +237,18 @@ def job_timer(job_type=None): def parse(line): - '''Parse a Point from a log line and return it, or None if no data point.''' - match = re.match(r'^INFO METRIC: (.*)$', line) + """Parse a Point from a log line and return it, or None if no data point.""" + match = re.match(r"^INFO METRIC: (.*)$", line) if match: json_str = match.group(1) try: raw = msgspec.json.decode(json_str) return Point( - metric_type=raw.get('type'), - metric=raw.get('metric'), - value=raw.get('value'), - tags=raw.get('tags')) + metric_type=raw.get("type"), + metric=raw.get("metric"), + value=raw.get("value"), + tags=raw.get("tags"), + ) except Exception as exc: # pylint: disable=broad-except - get_logger().warning('Error parsing metric: %s', exc) + get_logger().warning("Error parsing metric: %s", exc) return None diff --git a/singer/requests.py b/singer/requests.py index 295c2ec..5d2afd9 100644 --- a/singer/requests.py +++ b/singer/requests.py @@ -1,6 +1,8 @@ +from __future__ import annotations + + def giveup_on_http_4xx_except_429(error): response = error.response if response is None: return False - return not (response.status_code == 429 or - response.status_code >= 500) + return not (response.status_code == 429 or response.status_code >= 500) diff --git a/singer/schema.py b/singer/schema.py index 1dfe076..706cf94 100644 --- a/singer/schema.py +++ b/singer/schema.py @@ -1,51 +1,75 @@ # pylint: disable=redefined-builtin, too-many-arguments, invalid-name -'''Provides an object model for JSON Schema''' +"""Provides an object model for JSON Schema""" + +from __future__ import annotations import msgspec # These are standard keys defined in the JSON Schema spec STANDARD_KEYS = [ - 'title', - 'selected', - 'inclusion', - 'description', - 'minimum', - 'maximum', - 'exclusiveMinimum', - 'exclusiveMaximum', - 'multipleOf', - 'maxLength', - 'minLength', - 'format', - 'type', - 'default', - 'required', - 'enum', - 'pattern', - 'contentMediaType', - 'contentEncoding', - 'additionalProperties', - 'anyOf', - 'patternProperties', - 'allOf', + "title", + "selected", + "inclusion", + "description", + "minimum", + "maximum", + "exclusiveMinimum", + "exclusiveMaximum", + "multipleOf", + "maxLength", + "minLength", + "format", + "type", + "default", + "required", + "enum", + "pattern", + "contentMediaType", + "contentEncoding", + "additionalProperties", + "anyOf", + "patternProperties", + "allOf", ] -class Schema(): # pylint: disable=too-many-instance-attributes - '''Object model for JSON Schema. +class Schema: # pylint: disable=too-many-instance-attributes + """Object model for JSON Schema. Tap and Target authors may find this to be more convenient than working directly with JSON Schema data structures. - ''' + """ # pylint: disable=too-many-locals - def __init__(self, type=None, default=None, format=None, properties=None, items=None, - selected=None, inclusion=None, description=None, minimum=None, - maximum=None, exclusiveMinimum=None, exclusiveMaximum=None, - multipleOf=None, maxLength=None, minLength=None, additionalProperties=None, - anyOf=None, allOf=None, patternProperties=None, required=None, enum=None, - title=None, pattern=None, contentMediaType=None, contentEncoding=None): + def __init__( + self, + type=None, + default=None, + format=None, + properties=None, + items=None, + selected=None, + inclusion=None, + description=None, + minimum=None, + maximum=None, + exclusiveMinimum=None, + exclusiveMaximum=None, + multipleOf=None, + maxLength=None, + minLength=None, + additionalProperties=None, + anyOf=None, + allOf=None, + patternProperties=None, + required=None, + enum=None, + title=None, + pattern=None, + contentMediaType=None, + contentEncoding=None, + ): self.type = type self.default = default @@ -74,30 +98,27 @@ def __init__(self, type=None, default=None, format=None, properties=None, items= self.contentEncoding = contentEncoding def __str__(self): - return msgspec.json.encode(self.to_dict()).decode('utf-8') + return msgspec.json.encode(self.to_dict()).decode("utf-8") def __repr__(self): - pairs = [k + '=' + repr(v) for k, v in self.__dict__.items()] - args = ', '.join(pairs) - return 'Schema(' + args + ')' + pairs = [k + "=" + repr(v) for k, v in self.__dict__.items()] + args = ", ".join(pairs) + return "Schema(" + args + ")" def __eq__(self, other): return self.__dict__ == other.__dict__ def to_dict(self): - '''Return the raw JSON Schema as a (possibly nested) dict.''' + """Return the raw JSON Schema as a (possibly nested) dict.""" result = {} if self.properties is not None: - result['properties'] = { - k: v.to_dict() - for k, v - in self.properties.items() # pylint: disable=no-member - } - + result["properties"] = { + k: v.to_dict() for k, v in self.properties.items() + } # pylint: disable=no-member if self.items is not None: - result['items'] = self.items.to_dict() # pylint: disable=no-member + result["items"] = self.items.to_dict() # pylint: disable=no-member for key in STANDARD_KEYS: if self.__dict__.get(key) is not None: @@ -107,21 +128,21 @@ def to_dict(self): @classmethod def from_dict(cls, data, **schema_defaults): - '''Initialize a Schema object based on the JSON Schema structure. + """Initialize a Schema object based on the JSON Schema structure. :param schema_defaults: The default values to the Schema - constructor.''' + constructor.""" kwargs = schema_defaults.copy() - properties = data.get('properties') - items = data.get('items') + properties = data.get("properties") + items = data.get("items") if properties is not None: - kwargs['properties'] = { + kwargs["properties"] = { k: Schema.from_dict(v, **schema_defaults) for k, v in properties.items() } if items is not None: - kwargs['items'] = Schema.from_dict(items, **schema_defaults) + kwargs["items"] = Schema.from_dict(items, **schema_defaults) for key in STANDARD_KEYS: if key in data: kwargs[key] = data[key] diff --git a/singer/statediff.py b/singer/statediff.py index bc21fd5..b8b13a2 100644 --- a/singer/statediff.py +++ b/singer/statediff.py @@ -1,16 +1,19 @@ +from __future__ import annotations + import collections # Named tuples for holding add, change, and remove operations -Add = collections.namedtuple('Add', ['path', 'newval']) -Change = collections.namedtuple('Change', ['path', 'oldval', 'newval']) -Remove = collections.namedtuple('Remove', ['path', 'oldval']) +Add = collections.namedtuple("Add", ["path", "newval"]) +Change = collections.namedtuple("Change", ["path", "oldval", "newval"]) +Remove = collections.namedtuple("Remove", ["path", "oldval"]) + def paths(data, base=None): - '''Walk a data structure and return a list of (path, value) tuples, where + """Walk a data structure and return a list of (path, value) tuples, where each path is the path to a leaf node in the data structure and the value is the value it points to. Each path will be a tuple. - ''' + """ if base is None: base = () @@ -28,8 +31,9 @@ def paths(data, base=None): return result + def diff(oldstate, newstate): - '''Compare two states, returning a list of Add, Change, and Remove + """Compare two states, returning a list of Add, Change, and Remove objects. Add(path, newval) means path exists in newstate but not oldstate and @@ -42,7 +46,7 @@ def diff(oldstate, newstate): Remove(path, oldval) means the path exists in oldstate but not in newstate, and the value in oldstate is oldval. - ''' + """ # Convert oldstate and newstate from a deeply nested dict into a # single-level dict, mapping a path to a value. @@ -60,7 +64,7 @@ def diff(oldstate, newstate): if path in olddict: if path in newdict: if olddict[path] == newdict[path]: - pass # Don't emit anything if values are the same + pass # Don't emit anything if values are the same else: result.append(Change(path, olddict[path], newdict[path])) else: diff --git a/singer/transform.py b/singer/transform.py index 7446023..f711fec 100644 --- a/singer/transform.py +++ b/singer/transform.py @@ -1,19 +1,24 @@ +from __future__ import annotations + import datetime import logging import re -#from jsonschema import RefResolver + +# from jsonschema import RefResolver from referencing import Registry from referencing.jsonschema import DRAFT202012 import singer.metadata from singer.logger import get_logger -from singer.utils import (strftime, strptime_to_utc) +from singer.utils import strftime, strptime_to_utc LOGGER = get_logger() -NO_INTEGER_DATETIME_PARSING = 'no-integer-datetime-parsing' -UNIX_SECONDS_INTEGER_DATETIME_PARSING = 'unix-seconds-integer-datetime-parsing' -UNIX_MILLISECONDS_INTEGER_DATETIME_PARSING = 'unix-milliseconds-integer-datetime-parsing' +NO_INTEGER_DATETIME_PARSING = "no-integer-datetime-parsing" +UNIX_SECONDS_INTEGER_DATETIME_PARSING = "unix-seconds-integer-datetime-parsing" +UNIX_MILLISECONDS_INTEGER_DATETIME_PARSING = ( + "unix-milliseconds-integer-datetime-parsing" +) VALID_DATETIME_FORMATS = [ NO_INTEGER_DATETIME_PARSING, @@ -26,22 +31,28 @@ def string_to_datetime(value): try: return strftime(strptime_to_utc(value)) except Exception as ex: - LOGGER.warning('%s, (%s)', ex, value) + LOGGER.warning("%s, (%s)", ex, value) return None def unix_milliseconds_to_datetime(value): - return strftime(datetime.datetime.fromtimestamp(float(value) / 1000.0, datetime.timezone.utc)) + return strftime( + datetime.datetime.fromtimestamp( + float(value) / 1000.0, datetime.timezone.utc + ) + ) def unix_seconds_to_datetime(value): - return strftime(datetime.datetime.fromtimestamp(int(value), datetime.timezone.utc)) + return strftime( + datetime.datetime.fromtimestamp(int(value), datetime.timezone.utc) + ) class SchemaMismatch(Exception): def __init__(self, errors): if not errors: - msg = 'An error occured during transform that was not a schema mismatch' + msg = "An error occured during transform that was not a schema mismatch" else: estrs = [e.tostr() for e in errors] @@ -49,13 +60,15 @@ def __init__(self, errors): super().__init__(msg) + class SchemaKey: - ref = '$ref' - items = 'items' - properties = 'properties' - pattern_properties = 'patternProperties' - any_of = 'anyOf' - all_of = 'allOf' + ref = "$ref" + items = "items" + properties = "properties" + pattern_properties = "patternProperties" + any_of = "anyOf" + all_of = "allOf" + class Error: def __init__(self, path, data, schema=None, logging_level=logging.INFO): @@ -65,24 +78,26 @@ def __init__(self, path, data, schema=None, logging_level=logging.INFO): self.logging_level = logging_level def tostr(self): - path = '.'.join(map(str, self.path)) + path = ".".join(map(str, self.path)) if self.schema: if self.logging_level >= logging.INFO: - msg = f'data does not match {self.schema}' + msg = f"data does not match {self.schema}" else: - msg = f'does not match {self.schema}' + msg = f"does not match {self.schema}" else: - msg = 'not in schema' + msg = "not in schema" if self.logging_level >= logging.INFO: - output = f'{path}: {msg}' + output = f"{path}: {msg}" else: - output = f'{path}: {self.data} {msg}' + output = f"{path}: {self.data} {msg}" return output class Transformer: - def __init__(self, integer_datetime_fmt=NO_INTEGER_DATETIME_PARSING, pre_hook=None): + def __init__( + self, integer_datetime_fmt=NO_INTEGER_DATETIME_PARSING, pre_hook=None + ): self.integer_datetime_fmt = integer_datetime_fmt self.pre_hook = pre_hook self.removed = set() @@ -91,20 +106,23 @@ def __init__(self, integer_datetime_fmt=NO_INTEGER_DATETIME_PARSING, pre_hook=No def log_warning(self): if self.filtered: - LOGGER.debug('Filtered %s paths during transforms ' - 'as they were unsupported or not selected:\n\t%s', - len(self.filtered), - '\n\t'.join(sorted(self.filtered))) + LOGGER.debug( + "Filtered %s paths during transforms " + "as they were unsupported or not selected:\n\t%s", + len(self.filtered), + "\n\t".join(sorted(self.filtered)), + ) # Output list format to parse for reporting - LOGGER.debug('Filtered paths list: %s', - sorted(self.filtered)) + LOGGER.debug("Filtered paths list: %s", sorted(self.filtered)) if self.removed: - LOGGER.debug('Removed %s paths during transforms:\n\t%s', - len(self.removed), - '\n\t'.join(sorted(self.removed))) + LOGGER.debug( + "Removed %s paths during transforms:\n\t%s", + len(self.removed), + "\n\t".join(sorted(self.removed)), + ) # Output list format to parse for reporting - LOGGER.debug('Removed paths list: %s', sorted(self.removed)) + LOGGER.debug("Removed paths list: %s", sorted(self.removed)) def __enter__(self): return self @@ -115,9 +133,13 @@ def __exit__(self, *args): def filter_data_by_metadata(self, data, metadata): if isinstance(data, dict) and metadata: for field_name in list(data.keys()): - selected = singer.metadata.get(metadata, ('properties', field_name), 'selected') - inclusion = singer.metadata.get(metadata, ('properties', field_name), 'inclusion') - if inclusion == 'automatic': + selected = singer.metadata.get( + metadata, ("properties", field_name), "selected" + ) + inclusion = singer.metadata.get( + metadata, ("properties", field_name), "inclusion" + ) + if inclusion == "automatic": continue if selected is False: @@ -126,7 +148,7 @@ def filter_data_by_metadata(self, data, metadata): # didn't select it. self.filtered.add(field_name) - if inclusion == 'unsupported': + if inclusion == "unsupported": data.pop(field_name, None) # Track that the field was filtered because the tap # declared it as unsupported. @@ -144,39 +166,45 @@ def transform(self, data, schema, metadata=None): return transformed_data def transform_recur(self, data, schema, path): - if 'anyOf' in schema: + if "anyOf" in schema: return self._transform_anyof(data, schema, path) - if 'type' not in schema: + if "type" not in schema: # indicates no typing information so don't bother transforming it return True, data - types = schema['type'] + types = schema["type"] if not isinstance(types, list): types = [types] - if 'null' in types: - types.remove('null') - types.append('null') + if "null" in types: + types.remove("null") + types.append("null") for typ in types: success, transformed_data = self._transform(data, typ, schema, path) if success: return success, transformed_data - else: # pylint: disable=useless-else-on-loop + else: # pylint: disable=useless-else-on-loop # exhaused all types and didn't return, so we failed :-( - self.errors.append(Error(path, data, schema, logging_level=LOGGER.level)) + self.errors.append( + Error(path, data, schema, logging_level=LOGGER.level) + ) return False, None def _transform_anyof(self, data, schema, path): - subschemas = schema['anyOf'] + subschemas = schema["anyOf"] for subschema in subschemas: - success, transformed_data = self.transform_recur(data, subschema, path) + success, transformed_data = self.transform_recur( + data, subschema, path + ) if success: return success, transformed_data - else: # pylint: disable=useless-else-on-loop + else: # pylint: disable=useless-else-on-loop # exhaused all schemas and didn't return, so we failed :-( - self.errors.append(Error(path, data, schema, logging_level=LOGGER.level)) + self.errors.append( + Error(path, data, schema, logging_level=LOGGER.level) + ) return False, None def _transform_object(self, data, schema, path, pattern_properties): @@ -194,12 +222,16 @@ def _transform_object(self, data, schema, path, pattern_properties): successes = [] for key, value in data.items(): # patternProperties are a map of {"pattern": { schema...}} - pattern_schemas = [schema for pattern, schema - in (pattern_properties or {}).items() - if re.match(pattern, key)] + pattern_schemas = [ + schema + for pattern, schema in (pattern_properties or {}).items() + if re.match(pattern, key) + ] if key in schema or pattern_schemas: - sub_schema = schema.get(key, {'anyOf': pattern_schemas}) - success, subdata = self.transform_recur(value, sub_schema, path + [key]) + sub_schema = schema.get(key, {"anyOf": pattern_schemas}) + success, subdata = self.transform_recur( + value, sub_schema, path + [key] + ) successes.append(success) result[key] = subdata else: @@ -208,7 +240,7 @@ def _transform_object(self, data, schema, path, pattern_properties): # with discovery but rather than failing the run because # new data was added we'd rather continue the sync and # allow customers to indicate that they want the new data. - self.removed.add('.'.join(map(str, path + [key]))) + self.removed.add(".".join(map(str, path + [key]))) return all(successes), result @@ -228,17 +260,20 @@ def _transform_array(self, data, schema, path): return all(successes), result def _transform_datetime(self, value): - if value is None or value == '': - return None # Short circuit in the case of null or empty string + if value is None or value == "": + return None # Short circuit in the case of null or empty string if self.integer_datetime_fmt not in VALID_DATETIME_FORMATS: - raise Exception('Invalid integer datetime parsing option') + raise Exception("Invalid integer datetime parsing option") if self.integer_datetime_fmt == NO_INTEGER_DATETIME_PARSING: return string_to_datetime(value) try: - if self.integer_datetime_fmt == UNIX_SECONDS_INTEGER_DATETIME_PARSING: + if ( + self.integer_datetime_fmt + == UNIX_SECONDS_INTEGER_DATETIME_PARSING + ): return unix_seconds_to_datetime(value) return unix_milliseconds_to_datetime(value) @@ -249,30 +284,32 @@ def _transform(self, data, typ, schema, path): if self.pre_hook: data = self.pre_hook(data, typ, schema) - if typ == 'null': - if data is None or data == '': + if typ == "null": + if data is None or data == "": return True, None return False, None - if schema.get('format') == 'date-time': + if schema.get("format") == "date-time": data = self._transform_datetime(data) if data is None: return False, None return True, data - if typ == 'object': + if typ == "object": # Objects do not necessarily specify properties - return self._transform_object(data, - schema.get('properties', {}), - path, - schema.get(SchemaKey.pattern_properties)) + return self._transform_object( + data, + schema.get("properties", {}), + path, + schema.get(SchemaKey.pattern_properties), + ) - if typ == 'array': - return self._transform_array(data, schema['items'], path) + if typ == "array": + return self._transform_array(data, schema["items"], path) - if typ == 'string': + if typ == "string": if data is not None: try: return True, str(data) @@ -281,26 +318,26 @@ def _transform(self, data, typ, schema, path): else: return False, None - if typ == 'integer': + if typ == "integer": if isinstance(data, str): - data = data.replace(',', '') + data = data.replace(",", "") try: return True, int(data) except Exception: return False, None - if typ == 'number': + if typ == "number": if isinstance(data, str): - data = data.replace(',', '') + data = data.replace(",", "") try: return True, float(data) except Exception: return False, None - if typ == 'boolean': - if isinstance(data, str) and data.lower() == 'false': + if typ == "boolean": + if isinstance(data, str) and data.lower() == "false": return True, False try: @@ -311,8 +348,13 @@ def _transform(self, data, typ, schema, path): return False, None -def transform(data, schema, integer_datetime_fmt=NO_INTEGER_DATETIME_PARSING, - pre_hook=None, metadata=None): +def transform( + data, + schema, + integer_datetime_fmt=NO_INTEGER_DATETIME_PARSING, + pre_hook=None, + metadata=None, +): """ Applies schema (and integer_datetime_fmt, if supplied) to data, transforming each field in data to the type specified in schema. If no type matches a @@ -333,12 +375,16 @@ def transform(data, schema, integer_datetime_fmt=NO_INTEGER_DATETIME_PARSING, transformer = Transformer(integer_datetime_fmt, pre_hook) return transformer.transform(data, schema, metadata=metadata) -def _transform_datetime(value, integer_datetime_fmt=NO_INTEGER_DATETIME_PARSING): + +def _transform_datetime( + value, integer_datetime_fmt=NO_INTEGER_DATETIME_PARSING +): transformer = Transformer(integer_datetime_fmt) return transformer._transform_datetime(value) + def resolve_schema_references(schema, refs=None): - '''Resolves and replaces json-schema $refs with the appropriate dict. + """Resolves and replaces json-schema $refs with the appropriate dict. Recursively walks the given schema dict, converting every instance of $ref in a 'properties' structure with a resolved dict. @@ -353,11 +399,11 @@ def resolve_schema_references(schema, refs=None): Returns: schema - ''' -# refs = refs or {} -# return _resolve_schema_references(schema, RefResolver('', schema, store=refs)) + """ + # refs = refs or {} + # return _resolve_schema_references(schema, RefResolver('', schema, store=refs)) refs = refs or {} - registry: Registry = Registry() + registry: Registry = Registry() # type: ignore[annotation-unchecked] schema_resource = DRAFT202012.create_resource(schema) registry = registry.with_resource("", schema_resource) registry = registry.with_resources( @@ -367,6 +413,7 @@ def resolve_schema_references(schema, refs=None): resolver = registry.resolver() return _resolve_schema_references(schema, resolver) + def _resolve_schema_references(schema, resolver): if SchemaKey.ref in schema: reference_path = schema.pop(SchemaKey.ref, None) @@ -376,21 +423,31 @@ def _resolve_schema_references(schema, resolver): if SchemaKey.properties in schema: for k, val in schema[SchemaKey.properties].items(): - schema[SchemaKey.properties][k] = _resolve_schema_references(val, resolver) + schema[SchemaKey.properties][k] = _resolve_schema_references( + val, resolver + ) if SchemaKey.pattern_properties in schema: for k, val in schema[SchemaKey.pattern_properties].items(): - schema[SchemaKey.pattern_properties][k] = _resolve_schema_references(val, resolver) + schema[SchemaKey.pattern_properties][k] = ( + _resolve_schema_references(val, resolver) + ) if SchemaKey.items in schema: - schema[SchemaKey.items] = _resolve_schema_references(schema[SchemaKey.items], resolver) + schema[SchemaKey.items] = _resolve_schema_references( + schema[SchemaKey.items], resolver + ) if SchemaKey.any_of in schema: for i, element in enumerate(schema[SchemaKey.any_of]): - schema[SchemaKey.any_of][i] = _resolve_schema_references(element, resolver) + schema[SchemaKey.any_of][i] = _resolve_schema_references( + element, resolver + ) if SchemaKey.all_of in schema: for i, element in enumerate(schema[SchemaKey.all_of]): - schema[SchemaKey.all_of][i] = _resolve_schema_references(element, resolver) + schema[SchemaKey.all_of][i] = _resolve_schema_references( + element, resolver + ) return schema diff --git a/singer/utils.py b/singer/utils.py index bad6228..b091b89 100644 --- a/singer/utils.py +++ b/singer/utils.py @@ -1,25 +1,29 @@ +from __future__ import annotations + import argparse import collections import datetime import functools -import msgspec import time from warnings import warn +import backoff as backoff_module import dateutil.parser +import msgspec import pytz -import backoff as backoff_module from singer.catalog import Catalog -DATETIME_PARSE = '%Y-%m-%dT%H:%M:%SZ' -DATETIME_FMT = '%04Y-%m-%dT%H:%M:%S.%fZ' -DATETIME_FMT_SAFE = '%Y-%m-%dT%H:%M:%S.%fZ' +DATETIME_PARSE = "%Y-%m-%dT%H:%M:%SZ" +DATETIME_FMT = "%04Y-%m-%dT%H:%M:%S.%fZ" +DATETIME_FMT_SAFE = "%Y-%m-%dT%H:%M:%S.%fZ" USE_SINGER_DECIMAL = False + def now(): return datetime.datetime.utcnow().replace(tzinfo=pytz.UTC) + def strptime_with_tz(dtime): d_object = dateutil.parser.parse(dtime) if d_object.tzinfo is None: @@ -27,6 +31,7 @@ def strptime_with_tz(dtime): return d_object + def strptime(dtime): """DEPRECATED Use strptime_to_utc instead. @@ -52,12 +57,13 @@ def strptime(dtime): Traceback (most recent call last): ... ValueError: time data '2018-01-01T00:00:00.000000Z' does not match format '%Y-%m-%dT%H:%M:%SZ' - """ + """ # noqa: E501 - warn('Use strptime_to_utc instead', DeprecationWarning, stacklevel=2) + warn("Use strptime_to_utc instead", DeprecationWarning, stacklevel=2) return datetime.datetime.strptime(dtime, DATETIME_PARSE) + def strptime_to_utc(dtimestr): d_object = dateutil.parser.parse(dtimestr) if d_object.tzinfo is None: @@ -65,20 +71,22 @@ def strptime_to_utc(dtimestr): return d_object.astimezone(tz=pytz.UTC) + def strftime(dtime, format_str=DATETIME_FMT): if dtime.utcoffset() != datetime.timedelta(0): - raise Exception('datetime must be pegged at UTC tzoneinfo') + raise Exception("datetime must be pegged at UTC tzoneinfo") dt_str = None try: dt_str = dtime.strftime(format_str) - if dt_str.startswith('4Y'): + if dt_str.startswith("4Y"): dt_str = dtime.strftime(DATETIME_FMT_SAFE) except ValueError: dt_str = dtime.strftime(DATETIME_FMT_SAFE) return dt_str + def ratelimit(limit, every): def limitdecorator(func): times = collections.deque() @@ -102,11 +110,11 @@ def wrapper(*args, **kwargs): def chunk(array, num): for i in range(0, len(array), num): - yield array[i:i + num] + yield array[i : i + num] # noqa: E203 def load_json(path): - with open(path, encoding='utf-8') as fil: + with open(path, encoding="utf-8") as fil: return msgspec.json.decode(fil.read()) @@ -125,7 +133,7 @@ def update_state(state, entity, dtime): def parse_args(required_config_keys): - '''Parse standard command-line args. + """Parse standard command-line args. Parses the command-line arguments mentioned in the SPEC and the BEST_PRACTICES documents: @@ -139,51 +147,45 @@ def parse_args(required_config_keys): Returns the parsed args object from argparse. For each argument that point to JSON files (config, state, properties), we will automatically load and parse the JSON file. - ''' + """ parser = argparse.ArgumentParser() - parser.add_argument( - '-c', '--config', - help='Config file', - required=True) + parser.add_argument("-c", "--config", help="Config file", required=True) - parser.add_argument( - '-s', '--state', - help='State file') + parser.add_argument("-s", "--state", help="State file") parser.add_argument( - '-p', '--properties', - help='Property selections: DEPRECATED, Please use --catalog instead') + "-p", + "--properties", + help="Property selections: DEPRECATED, Please use --catalog instead", + ) - parser.add_argument( - '--catalog', - help='Catalog file') + parser.add_argument("--catalog", help="Catalog file") parser.add_argument( - '-d', '--discover', - action='store_true', - help='Do schema discovery') + "-d", "--discover", action="store_true", help="Do schema discovery" + ) args = parser.parse_args() if args.config: - setattr(args, 'config_path', args.config) + setattr(args, "config_path", args.config) args.config = load_json(args.config) if args.state: - setattr(args, 'state_path', args.state) + setattr(args, "state_path", args.state) args.state = load_json(args.state) else: args.state = {} if args.properties: - setattr(args, 'properties_path', args.properties) + setattr(args, "properties_path", args.properties) args.properties = load_json(args.properties) if args.catalog: - setattr(args, 'catalog_path', args.catalog) + setattr(args, "catalog_path", args.catalog) args.catalog = Catalog.load(args.catalog) check_config(args.config, required_config_keys) # Store the use_singer_decimal setting if available - use_singer_decimal = args.config.get('use_singer_decimal',False) + use_singer_decimal = args.config.get("use_singer_decimal", False) set_singer_decimal_setting(use_singer_decimal) return args @@ -192,7 +194,7 @@ def parse_args(required_config_keys): def check_config(config, required_keys): missing_keys = [key for key in required_keys if key not in config] if missing_keys: - raise Exception(f'Config is missing required keys: {missing_keys}') + raise Exception(f"Config is missing required keys: {missing_keys}") def backoff(exceptions, giveup): @@ -203,22 +205,19 @@ def backoff(exceptions, giveup): giveup is a function that accepts the exception and returns True to retry """ return backoff_module.on_exception( - backoff_module.expo, - exceptions, - max_tries=5, - giveup=giveup, - factor=2) + backoff_module.expo, exceptions, max_tries=5, giveup=giveup, factor=2 + ) def exception_is_4xx(exception): """Returns True if exception is in the 4xx range.""" - if not hasattr(exception, 'response'): + if not hasattr(exception, "response"): return False if exception.response is None: return False - if not hasattr(exception.response, 'status_code'): + if not hasattr(exception.response, "status_code"): return False return 400 <= exception.response.status_code < 500 @@ -227,6 +226,7 @@ def exception_is_4xx(exception): def handle_top_exception(logger): """A decorator that will catch exceptions and log the exception's message as a CRITICAL log.""" + def decorator(fnc): @functools.wraps(fnc) def wrapped(*args, **kwargs): @@ -235,7 +235,9 @@ def wrapped(*args, **kwargs): except Exception as exc: logger.critical(exc) raise + return wrapped + return decorator @@ -293,11 +295,11 @@ def should_sync_field(inclusion, selected, default=False): True """ # always select automatic fields - if inclusion == 'automatic': + if inclusion == "automatic": return True # never select unsupported fields - if inclusion == 'unsupported': + if inclusion == "unsupported": return False # at this point inclusion == "available" @@ -322,6 +324,7 @@ def get_singer_decimal_setting(): return USE_SINGER_DECIMAL + def set_singer_decimal_setting(config_singer_decimal=False): """ Updates the Singer Decimal default of True if config is enabled. @@ -333,6 +336,6 @@ def set_singer_decimal_setting(config_singer_decimal=False): Default: False """ - global USE_SINGER_DECIMAL # pylint: disable=W0603 + global USE_SINGER_DECIMAL # pylint: disable=W0603 USE_SINGER_DECIMAL = config_singer_decimal diff --git a/tests/test_bookmarks.py b/tests/test_bookmarks.py index 4902105..0ce6007 100644 --- a/tests/test_bookmarks.py +++ b/tests/test_bookmarks.py @@ -1,38 +1,50 @@ +from __future__ import annotations + import unittest + from singer import bookmarks + class TestGetBookmark(unittest.TestCase): def test_empty_state(self): empty_state = {} # Case with no value to fall back on - self.assertIsNone(bookmarks.get_bookmark(empty_state, 'some_stream', 'my_key')) + self.assertIsNone( + bookmarks.get_bookmark(empty_state, "some_stream", "my_key") + ) # Case with a given default - self.assertEqual(bookmarks.get_bookmark(empty_state, 'some_stream', 'my_key', 'default_value'), - 'default_value') + self.assertEqual( + bookmarks.get_bookmark( + empty_state, "some_stream", "my_key", "default_value" + ), + "default_value", + ) def test_empty_bookmark(self): - empty_bookmark = {'bookmarks':{}} + empty_bookmark = {"bookmarks": {}} # Case with no value to fall back on - self.assertIsNone(bookmarks.get_bookmark(empty_bookmark, 'some_stream', 'my_key')) + self.assertIsNone( + bookmarks.get_bookmark(empty_bookmark, "some_stream", "my_key") + ) # Case with a given default - self.assertEqual(bookmarks.get_bookmark(empty_bookmark, 'some_stream', 'my_key', 'default_value'), - 'default_value') + self.assertEqual( + bookmarks.get_bookmark( + empty_bookmark, "some_stream", "my_key", "default_value" + ), + "default_value", + ) def test_non_empty_state(self): - stream_id_1 = 'customers' - bookmark_key_1 = 'datetime' + stream_id_1 = "customers" + bookmark_key_1 = "datetime" bookmark_val_1 = 123456789 non_empty_state = { - 'bookmarks' : { - stream_id_1 : { - bookmark_key_1 : bookmark_val_1 - } - } + "bookmarks": {stream_id_1: {bookmark_key_1: bookmark_val_1}} } # @@ -40,34 +52,58 @@ def test_non_empty_state(self): # # Bad stream, bad key - self.assertIsNone(bookmarks.get_bookmark(non_empty_state, 'some_stream', 'my_key')) + self.assertIsNone( + bookmarks.get_bookmark(non_empty_state, "some_stream", "my_key") + ) # Good stream, bad key - self.assertIsNone(bookmarks.get_bookmark(non_empty_state, stream_id_1, 'my_key')) + self.assertIsNone( + bookmarks.get_bookmark(non_empty_state, stream_id_1, "my_key") + ) # Good stream, good key - self.assertEqual(bookmarks.get_bookmark(non_empty_state, stream_id_1, bookmark_key_1), - bookmark_val_1) + self.assertEqual( + bookmarks.get_bookmark( + non_empty_state, stream_id_1, bookmark_key_1 + ), + bookmark_val_1, + ) # # Cases with a given default # # Bad stream, bad key - self.assertEqual(bookmarks.get_bookmark(non_empty_state, 'some_stream', 'my_key', 'default_value'), - 'default_value') + self.assertEqual( + bookmarks.get_bookmark( + non_empty_state, "some_stream", "my_key", "default_value" + ), + "default_value", + ) # Bad stream, good key - self.assertEqual(bookmarks.get_bookmark(non_empty_state, 'some_stream', bookmark_key_1, 'default_value'), - 'default_value') + self.assertEqual( + bookmarks.get_bookmark( + non_empty_state, "some_stream", bookmark_key_1, "default_value" + ), + "default_value", + ) # Good stream, bad key - self.assertEqual(bookmarks.get_bookmark(non_empty_state, stream_id_1, 'my_key', 'default_value'), - 'default_value') + self.assertEqual( + bookmarks.get_bookmark( + non_empty_state, stream_id_1, "my_key", "default_value" + ), + "default_value", + ) # Good stream, good key - self.assertEqual(bookmarks.get_bookmark(non_empty_state, stream_id_1, bookmark_key_1, 'default_value'), - bookmark_val_1) + self.assertEqual( + bookmarks.get_bookmark( + non_empty_state, stream_id_1, bookmark_key_1, "default_value" + ), + bookmark_val_1, + ) class TestGetOffset(unittest.TestCase): @@ -75,33 +111,39 @@ def test_empty_state(self): empty_state = {} # Case with no value to fall back on - self.assertIsNone(bookmarks.get_offset(empty_state, 'some_stream')) + self.assertIsNone(bookmarks.get_offset(empty_state, "some_stream")) # Case with a given default - self.assertEqual(bookmarks.get_offset(empty_state, 'some_stream', 'default_value'), - 'default_value') + self.assertEqual( + bookmarks.get_offset(empty_state, "some_stream", "default_value"), + "default_value", + ) def test_empty_bookmark(self): - empty_bookmark = {'bookmarks':{}} + empty_bookmark = {"bookmarks": {}} # Case with no value to fall back on - self.assertIsNone(bookmarks.get_offset(empty_bookmark, 'some_stream')) + self.assertIsNone(bookmarks.get_offset(empty_bookmark, "some_stream")) # Case with a given default - self.assertEqual(bookmarks.get_offset(empty_bookmark, 'some_stream', 'default_value'), - 'default_value') + self.assertEqual( + bookmarks.get_offset( + empty_bookmark, "some_stream", "default_value" + ), + "default_value", + ) def test_non_empty_state(self): - stream_id_1 = 'customers' - bookmark_key_1 = 'datetime' + stream_id_1 = "customers" + bookmark_key_1 = "datetime" bookmark_val_1 = 123456789 - offset_val = 'fizzy water' + offset_val = "fizzy water" non_empty_state = { - 'bookmarks' : { - stream_id_1 : { - bookmark_key_1 : bookmark_val_1, - 'offset' : offset_val + "bookmarks": { + stream_id_1: { + bookmark_key_1: bookmark_val_1, + "offset": offset_val, } } } @@ -111,23 +153,30 @@ def test_non_empty_state(self): # # Bad stream - self.assertIsNone(bookmarks.get_offset(non_empty_state, 'some_stream')) + self.assertIsNone(bookmarks.get_offset(non_empty_state, "some_stream")) # Good stream - self.assertEqual(bookmarks.get_offset(non_empty_state, stream_id_1), - offset_val) + self.assertEqual( + bookmarks.get_offset(non_empty_state, stream_id_1), offset_val + ) # # Case with a given default # # Bad stream - self.assertEqual(bookmarks.get_offset(non_empty_state, 'some_stream', 'default_value'), - 'default_value') + self.assertEqual( + bookmarks.get_offset( + non_empty_state, "some_stream", "default_value" + ), + "default_value", + ) # Good stream - self.assertEqual(bookmarks.get_offset(non_empty_state, stream_id_1, 'default_value'), - offset_val) + self.assertEqual( + bookmarks.get_offset(non_empty_state, stream_id_1, "default_value"), + offset_val, + ) class TestGetCurrentlySyncing(unittest.TestCase): @@ -138,29 +187,34 @@ def test_empty_state(self): self.assertIsNone(bookmarks.get_currently_syncing(empty_state)) # Case with a given default - self.assertEqual(bookmarks.get_currently_syncing(empty_state, 'default_value'), - 'default_value') + self.assertEqual( + bookmarks.get_currently_syncing(empty_state, "default_value"), + "default_value", + ) def test_non_empty_state(self): - stream_id_1 = 'customers' - bookmark_key_1 = 'datetime' + stream_id_1 = "customers" + bookmark_key_1 = "datetime" bookmark_val_1 = 123456789 - offset_val = 'fizzy water' + offset_val = "fizzy water" non_empty_state = { - 'bookmarks' : { - stream_id_1 : { - bookmark_key_1 : bookmark_val_1, - 'offset' : offset_val + "bookmarks": { + stream_id_1: { + bookmark_key_1: bookmark_val_1, + "offset": offset_val, } }, - 'currently_syncing' : stream_id_1 + "currently_syncing": stream_id_1, } # Case with no value to fall back on - self.assertEqual(bookmarks.get_currently_syncing(non_empty_state), - stream_id_1) + self.assertEqual( + bookmarks.get_currently_syncing(non_empty_state), stream_id_1 + ) # Case with a given default - self.assertEqual(bookmarks.get_currently_syncing(non_empty_state, 'default_value'), - stream_id_1) + self.assertEqual( + bookmarks.get_currently_syncing(non_empty_state, "default_value"), + stream_id_1, + ) diff --git a/tests/test_catalog.py b/tests/test_catalog.py index 8a72e1a..d5932d3 100644 --- a/tests/test_catalog.py +++ b/tests/test_catalog.py @@ -1,7 +1,10 @@ +from __future__ import annotations + import unittest -from singer.schema import Schema from singer.catalog import Catalog, CatalogEntry, write_catalog +from singer.schema import Schema + class TestWriteCatalog(unittest.TestCase): def test_write_empty_catalog(self): @@ -9,123 +12,140 @@ def test_write_empty_catalog(self): write_catalog(catalog) def test_write_catalog_with_streams(self): - catalog = Catalog([CatalogEntry(tap_stream_id='a',schema=Schema(),metadata=[])]) + catalog = Catalog( + [CatalogEntry(tap_stream_id="a", schema=Schema(), metadata=[])] + ) write_catalog(catalog) + class TestGetSelectedStreams(unittest.TestCase): def test_one_selected_stream(self): - selected_entry = CatalogEntry(tap_stream_id='a', - schema=Schema(), - metadata=[{'metadata': - {'selected': True}, - 'breadcrumb': []}]) + selected_entry = CatalogEntry( + tap_stream_id="a", + schema=Schema(), + metadata=[{"metadata": {"selected": True}, "breadcrumb": []}], + ) catalog = Catalog( - [selected_entry, - CatalogEntry(tap_stream_id='b',schema=Schema(),metadata=[]), - CatalogEntry(tap_stream_id='c',schema=Schema(),metadata=[])]) + [ + selected_entry, + CatalogEntry(tap_stream_id="b", schema=Schema(), metadata=[]), + CatalogEntry(tap_stream_id="c", schema=Schema(), metadata=[]), + ] + ) state = {} selected_streams = catalog.get_selected_streams(state) - self.assertEqual([e for e in selected_streams],[selected_entry]) + self.assertEqual([e for e in selected_streams], [selected_entry]) def test_resumes_currently_syncing_stream(self): - selected_entry_a = CatalogEntry(tap_stream_id='a', - schema=Schema(), - metadata=[{'metadata': - {'selected': True}, - 'breadcrumb': []}]) - selected_entry_c = CatalogEntry(tap_stream_id='c', - schema=Schema(), - metadata=[{'metadata': - {'selected': True}, - 'breadcrumb': []}]) + selected_entry_a = CatalogEntry( + tap_stream_id="a", + schema=Schema(), + metadata=[{"metadata": {"selected": True}, "breadcrumb": []}], + ) + selected_entry_c = CatalogEntry( + tap_stream_id="c", + schema=Schema(), + metadata=[{"metadata": {"selected": True}, "breadcrumb": []}], + ) catalog = Catalog( - [selected_entry_a, - CatalogEntry(tap_stream_id='b',schema=Schema(),metadata=[]), - selected_entry_c]) - state = {'currently_syncing': 'c'} + [ + selected_entry_a, + CatalogEntry(tap_stream_id="b", schema=Schema(), metadata=[]), + selected_entry_c, + ] + ) + state = {"currently_syncing": "c"} selected_streams = catalog.get_selected_streams(state) - self.assertEqual([e for e in selected_streams][0],selected_entry_c) + self.assertEqual([e for e in selected_streams][0], selected_entry_c) + class TestToDictAndFromDict(unittest.TestCase): dict_form = { - 'streams': [ + "streams": [ { - 'stream': 'users', - 'tap_stream_id': 'prod_users', - 'stream_alias': 'users_alias', - 'database_name': 'prod', - 'table_name': 'users', - 'schema': { - 'type': 'object', - 'selected': True, - 'properties': { - 'id': {'type': 'integer', 'selected': True}, - 'name': {'type': 'string', 'selected': True} - } + "stream": "users", + "tap_stream_id": "prod_users", + "stream_alias": "users_alias", + "database_name": "prod", + "table_name": "users", + "schema": { + "type": "object", + "selected": True, + "properties": { + "id": {"type": "integer", "selected": True}, + "name": {"type": "string", "selected": True}, + }, }, - 'metadata': [ + "metadata": [ { - 'metadata': { - 'metadata-key': 'metadata-value' - }, - 'breadcrumb': [ - 'properties', - 'name', + "metadata": {"metadata-key": "metadata-value"}, + "breadcrumb": [ + "properties", + "name", ], }, ], }, { - 'stream': 'orders', - 'tap_stream_id': 'prod_orders', - 'database_name': 'prod', - 'table_name': 'orders', - 'schema': { - 'type': 'object', - 'selected': True, - 'properties': { - 'id': {'type': 'integer', 'selected': True}, - 'amount': {'type': 'number', 'selected': True} - } - } - } - ] - } - - obj_form = Catalog(streams=[ - CatalogEntry( - stream='users', - tap_stream_id='prod_users', - stream_alias='users_alias', - database='prod', - table='users', - schema=Schema( - type='object', - selected=True, - properties={ - 'id': Schema(type='integer', selected=True), - 'name': Schema(type='string', selected=True)}), - metadata=[{ - 'metadata': { - 'metadata-key': 'metadata-value' + "stream": "orders", + "tap_stream_id": "prod_orders", + "database_name": "prod", + "table_name": "orders", + "schema": { + "type": "object", + "selected": True, + "properties": { + "id": {"type": "integer", "selected": True}, + "amount": {"type": "number", "selected": True}, + }, }, - 'breadcrumb': [ - 'properties', - 'name', + }, + ] + } + + obj_form = Catalog( + streams=[ + CatalogEntry( + stream="users", + tap_stream_id="prod_users", + stream_alias="users_alias", + database="prod", + table="users", + schema=Schema( + type="object", + selected=True, + properties={ + "id": Schema(type="integer", selected=True), + "name": Schema(type="string", selected=True), + }, + ), + metadata=[ + { + "metadata": {"metadata-key": "metadata-value"}, + "breadcrumb": [ + "properties", + "name", + ], + } ], - }]), - CatalogEntry( - stream='orders', - tap_stream_id='prod_orders', - database='prod', - table='orders', - schema=Schema( - type='object', - selected=True, - properties={ - 'id': Schema(type='integer', selected=True), - 'amount': Schema(type='number', selected=True)}))]) + ), + CatalogEntry( + stream="orders", + tap_stream_id="prod_orders", + database="prod", + table="orders", + schema=Schema( + type="object", + selected=True, + properties={ + "id": Schema(type="integer", selected=True), + "amount": Schema(type="number", selected=True), + }, + ), + ), + ] + ) def test_from_dict(self): self.assertEqual(self.obj_form, Catalog.from_dict(self.dict_form)) @@ -137,8 +157,11 @@ def test_to_dict(self): class TestGetStream(unittest.TestCase): def test(self): catalog = Catalog( - [CatalogEntry(tap_stream_id='a'), - CatalogEntry(tap_stream_id='b'), - CatalogEntry(tap_stream_id='c')]) - entry = catalog.get_stream('b') - self.assertEqual('b', entry.tap_stream_id) + [ + CatalogEntry(tap_stream_id="a"), + CatalogEntry(tap_stream_id="b"), + CatalogEntry(tap_stream_id="c"), + ] + ) + entry = catalog.get_stream("b") + self.assertEqual("b", entry.tap_stream_id) diff --git a/tests/test_metadata.py b/tests/test_metadata.py index fd97ef2..e578d15 100644 --- a/tests/test_metadata.py +++ b/tests/test_metadata.py @@ -1,60 +1,60 @@ -from pprint import pprint +from __future__ import annotations + import unittest + from singer.metadata import get_standard_metadata + def make_expected_metadata(base_obj, dict_of_extras): metadata_value = {**base_obj} metadata_value.update(dict_of_extras) return [ + {"metadata": metadata_value, "breadcrumb": ()}, { - 'metadata': metadata_value, - 'breadcrumb': () - }, - { - 'metadata': { - 'inclusion': 'available', + "metadata": { + "inclusion": "available", }, - 'breadcrumb': ('properties', 'id') + "breadcrumb": ("properties", "id"), }, { - 'metadata': { - 'inclusion': 'available', + "metadata": { + "inclusion": "available", }, - 'breadcrumb': ('properties', 'name') + "breadcrumb": ("properties", "name"), }, { - 'metadata': { - 'inclusion': 'available', + "metadata": { + "inclusion": "available", }, - 'breadcrumb': ('properties', 'created') - } + "breadcrumb": ("properties", "created"), + }, ] + class TestStandardMetadata(unittest.TestCase): - #maxDiff = None + # maxDiff = None def test_standard_metadata(self): # Some contants shared by a number of expected metadata objects - tap_stream_id = 'employees' - test_kp = ['id'] - test_rm = 'INCREMENTAL' - test_rk = ['id', 'created'] - metadata_kp = {'table-key-properties': ['id']} - metadata_rm = {'forced-replication-method': 'INCREMENTAL'} - metadata_rk = {'valid_replication_keys': ['id','created']} - schema_present_base_obj = {'inclusion': 'available'} + tap_stream_id = "employees" + test_kp = ["id"] + test_rm = "INCREMENTAL" + test_rk = ["id", "created"] + # metadata_kp = {"table-key-properties": ["id"]} + # metadata_rm = {"forced-replication-method": "INCREMENTAL"} + # metadata_rk = {"valid_replication_keys": ["id", "created"]} + schema_present_base_obj = {"inclusion": "available"} test_schema = { - 'type': ['null', 'object'], - 'additionalProperties': False, - 'properties': { - 'id': {'type': ['null', 'string']}, - 'name': {'type': ['null', 'string']}, - 'created': {'type': ['null', 'string'], - 'format': 'date-time'}, - } + "type": ["null", "object"], + "additionalProperties": False, + "properties": { + "id": {"type": ["null", "string"]}, + "name": {"type": ["null", "string"]}, + "created": {"type": ["null", "string"], "format": "date-time"}, + }, } # test_variables is a list of tuples, where the first element is a @@ -63,241 +63,251 @@ def test_standard_metadata(self): test_variables = [ ( { - 'schema': test_schema, - 'schema_name': tap_stream_id, - 'key_properties': None, - 'replication_method': None, - 'valid_replication_keys': None + "schema": test_schema, + "schema_name": tap_stream_id, + "key_properties": None, + "replication_method": None, + "valid_replication_keys": None, }, make_expected_metadata( schema_present_base_obj, - {'schema-name': tap_stream_id,} - ) + { + "schema-name": tap_stream_id, + }, + ), ), ( { - 'schema': test_schema, - 'schema_name': tap_stream_id, - 'key_properties': None, - 'replication_method': None, - 'valid_replication_keys': test_rk + "schema": test_schema, + "schema_name": tap_stream_id, + "key_properties": None, + "replication_method": None, + "valid_replication_keys": test_rk, }, make_expected_metadata( schema_present_base_obj, - {'valid_replication_keys': ['id','created'], - 'schema-name':tap_stream_id} - ) + { + "valid_replication_keys": ["id", "created"], + "schema-name": tap_stream_id, + }, + ), ), ( { - 'schema': test_schema, - 'schema_name': tap_stream_id, - 'key_properties': None, - 'replication_method': test_rm, - 'valid_replication_keys': None + "schema": test_schema, + "schema_name": tap_stream_id, + "key_properties": None, + "replication_method": test_rm, + "valid_replication_keys": None, }, make_expected_metadata( schema_present_base_obj, - {'forced-replication-method': 'INCREMENTAL', - 'schema-name':tap_stream_id} - ) + { + "forced-replication-method": "INCREMENTAL", + "schema-name": tap_stream_id, + }, + ), ), ( { - 'schema': test_schema, - 'schema_name': tap_stream_id, - 'key_properties': None, - 'replication_method': test_rm, - 'valid_replication_keys': test_rk + "schema": test_schema, + "schema_name": tap_stream_id, + "key_properties": None, + "replication_method": test_rm, + "valid_replication_keys": test_rk, }, make_expected_metadata( schema_present_base_obj, - {'valid_replication_keys': ['id','created'], - 'forced-replication-method': 'INCREMENTAL', - 'schema-name':tap_stream_id} - ) + { + "valid_replication_keys": ["id", "created"], + "forced-replication-method": "INCREMENTAL", + "schema-name": tap_stream_id, + }, + ), ), ( { - 'schema': test_schema, - 'schema_name': tap_stream_id, - 'key_properties': test_kp, - 'replication_method': None, - 'valid_replication_keys': None + "schema": test_schema, + "schema_name": tap_stream_id, + "key_properties": test_kp, + "replication_method": None, + "valid_replication_keys": None, }, make_expected_metadata( schema_present_base_obj, - {'table-key-properties': ['id'], - 'schema-name':tap_stream_id} - ) + { + "table-key-properties": ["id"], + "schema-name": tap_stream_id, + }, + ), ), ( { - 'schema': test_schema, - 'schema_name': tap_stream_id, - 'key_properties': test_kp, - 'replication_method': None, - 'valid_replication_keys': test_rk + "schema": test_schema, + "schema_name": tap_stream_id, + "key_properties": test_kp, + "replication_method": None, + "valid_replication_keys": test_rk, }, make_expected_metadata( - schema_present_base_obj, - {'table-key-properties': ['id'], - 'valid_replication_keys': ['id','created'], - 'schema-name':tap_stream_id} - ) + { + "table-key-properties": ["id"], + "valid_replication_keys": ["id", "created"], + "schema-name": tap_stream_id, + }, + ), ), ( { - 'schema': test_schema, - 'schema_name': tap_stream_id, - 'key_properties': test_kp, - 'replication_method': test_rm, - 'valid_replication_keys': None + "schema": test_schema, + "schema_name": tap_stream_id, + "key_properties": test_kp, + "replication_method": test_rm, + "valid_replication_keys": None, }, make_expected_metadata( schema_present_base_obj, - {'table-key-properties': ['id'], - 'forced-replication-method': 'INCREMENTAL', - 'schema-name':tap_stream_id} - ) + { + "table-key-properties": ["id"], + "forced-replication-method": "INCREMENTAL", + "schema-name": tap_stream_id, + }, + ), ), ( { - 'schema': test_schema, - 'schema_name': tap_stream_id, - 'key_properties': test_kp, - 'replication_method': test_rm, - 'valid_replication_keys': test_rk + "schema": test_schema, + "schema_name": tap_stream_id, + "key_properties": test_kp, + "replication_method": test_rm, + "valid_replication_keys": test_rk, }, make_expected_metadata( schema_present_base_obj, - {'table-key-properties': ['id'], - 'forced-replication-method': 'INCREMENTAL', - 'valid_replication_keys': ['id','created'], - 'schema-name':tap_stream_id} - ) + { + "table-key-properties": ["id"], + "forced-replication-method": "INCREMENTAL", + "valid_replication_keys": ["id", "created"], + "schema-name": tap_stream_id, + }, + ), ), ( { - 'schema': None, - 'key_properties': None, - 'replication_method': None, - 'valid_replication_keys': None + "schema": None, + "key_properties": None, + "replication_method": None, + "valid_replication_keys": None, }, - [ - { - 'metadata': {}, - 'breadcrumb': [] - } - ] + [{"metadata": {}, "breadcrumb": []}], ), ( { - 'schema': None, - 'key_properties': None, - 'replication_method': None, - 'valid_replication_keys': test_rk + "schema": None, + "key_properties": None, + "replication_method": None, + "valid_replication_keys": test_rk, }, [ { - 'metadata': { - 'inclusion': 'available', - 'valid_replication_keys': ['id','created'] + "metadata": { + "inclusion": "available", + "valid_replication_keys": ["id", "created"], }, - 'breadcrumb': [] + "breadcrumb": [], } - ] + ], ), ( { - 'schema': None, - 'key_properties': None, - 'replication_method': test_rm, - 'valid_replication_keys': None + "schema": None, + "key_properties": None, + "replication_method": test_rm, + "valid_replication_keys": None, }, [ { - 'metadata': { - 'inclusion': 'available', - 'forced-replication-method': 'INCREMENTAL' + "metadata": { + "inclusion": "available", + "forced-replication-method": "INCREMENTAL", }, - 'breadcrumb': [] + "breadcrumb": [], } - ] + ], ), ( { - 'schema': None, - 'key_properties': None, - 'replication_method': test_rm, - 'valid_replication_keys': test_rk + "schema": None, + "key_properties": None, + "replication_method": test_rm, + "valid_replication_keys": test_rk, }, [ { - 'metadata': { - 'inclusion': 'available', - 'forced-replication-method': 'INCREMENTAL', - 'valid_replication_keys': ['id','created'] + "metadata": { + "inclusion": "available", + "forced-replication-method": "INCREMENTAL", + "valid_replication_keys": ["id", "created"], }, - 'breadcrumb': [] + "breadcrumb": [], } - ] + ], ), ( { - 'schema': None, - 'key_properties': test_kp, - 'replication_method': None, - 'valid_replication_keys': None + "schema": None, + "key_properties": test_kp, + "replication_method": None, + "valid_replication_keys": None, }, [ { - 'metadata': { - 'inclusion': 'available', - 'table-key-properties': ['id'], + "metadata": { + "inclusion": "available", + "table-key-properties": ["id"], }, - 'breadcrumb': [] + "breadcrumb": [], } - ] + ], ), ( { - 'schema': None, - 'key_properties': test_kp, - 'replication_method': None, - 'valid_replication_keys': test_rk + "schema": None, + "key_properties": test_kp, + "replication_method": None, + "valid_replication_keys": test_rk, }, [ { - 'metadata': { - 'inclusion': 'available', - 'table-key-properties': ['id'], - 'valid_replication_keys': ['id','created'] + "metadata": { + "inclusion": "available", + "table-key-properties": ["id"], + "valid_replication_keys": ["id", "created"], }, - 'breadcrumb': [] + "breadcrumb": [], } - ] + ], ), ( { - 'schema': None, - 'key_properties': test_kp, - 'replication_method': test_rm, - 'valid_replication_keys': test_rk + "schema": None, + "key_properties": test_kp, + "replication_method": test_rm, + "valid_replication_keys": test_rk, }, [ { - 'metadata': { - 'inclusion': 'available', - 'table-key-properties': ['id'], - 'forced-replication-method': 'INCREMENTAL', - 'valid_replication_keys': ['id','created'] + "metadata": { + "inclusion": "available", + "table-key-properties": ["id"], + "forced-replication-method": "INCREMENTAL", + "valid_replication_keys": ["id", "created"], }, - 'breadcrumb': [] + "breadcrumb": [], } - ] - ) + ], + ), ] for var in test_variables: @@ -311,25 +321,33 @@ def test_standard_metadata(self): self.assertIn(obj, test_value) # Test one function call where the parameters are not splat in - test_value = get_standard_metadata(test_schema, - tap_stream_id, - test_kp, - test_rk, - test_rm) + test_value = get_standard_metadata( + test_schema, tap_stream_id, test_kp, test_rk, test_rm + ) - expected_metadata = make_expected_metadata(schema_present_base_obj, - {'table-key-properties': ['id'], - 'forced-replication-method': 'INCREMENTAL', - 'valid_replication_keys': ['id','created'], - 'schema-name':tap_stream_id}) + expected_metadata = make_expected_metadata( + schema_present_base_obj, + { + "table-key-properties": ["id"], + "forced-replication-method": "INCREMENTAL", + "valid_replication_keys": ["id", "created"], + "schema-name": tap_stream_id, + }, + ) for obj in expected_metadata: if obj in test_value: self.assertIn(obj, test_value) def test_empty_key_properties_are_written(self): mdata = get_standard_metadata(key_properties=[]) - self.assertEqual(mdata, [{'breadcrumb': (), 'metadata': {'table-key-properties': []}}]) + self.assertEqual( + mdata, + [{"breadcrumb": (), "metadata": {"table-key-properties": []}}], + ) def test_empty_valid_replication_keys_are_written(self): mdata = get_standard_metadata(valid_replication_keys=[]) - self.assertEqual(mdata, [{'breadcrumb': (), 'metadata': {'valid-replication-keys': []}}]) + self.assertEqual( + mdata, + [{"breadcrumb": (), "metadata": {"valid-replication-keys": []}}], + ) diff --git a/tests/test_metrics.py b/tests/test_metrics.py index b3a579e..cf58af2 100644 --- a/tests/test_metrics.py +++ b/tests/test_metrics.py @@ -1,8 +1,10 @@ +from __future__ import annotations + import unittest from unittest.mock import patch + import singer.metrics as metrics -import time -import copy + class DummyException(Exception): pass @@ -14,82 +16,132 @@ def logged_points(mock): class TestRecordCounter(unittest.TestCase): - @patch('singer.metrics.log') + @patch("singer.metrics.log") def test_log_on_exit(self, log): - with metrics.record_counter('users') as counter: + with metrics.record_counter("users") as counter: counter.increment() counter.increment() self.assertEqual( - [metrics.Point('counter', 'record_count', 2, {'endpoint': 'users'})], - logged_points(log)) - - @patch('singer.metrics.log') + [ + metrics.Point( + "counter", "record_count", 2, {"endpoint": "users"} + ) + ], + logged_points(log), + ) + + @patch("singer.metrics.log") def test_incremental(self, log): - with metrics.record_counter(endpoint='users') as counter: + with metrics.record_counter(endpoint="users") as counter: counter.increment(1) counter._ready_to_log = lambda: True counter.increment(2) counter._ready_to_log = lambda: False counter.increment(5) self.assertEqual( - [metrics.Point('counter', 'record_count', 3, {'endpoint': 'users'}), - metrics.Point('counter', 'record_count', 5, {'endpoint': 'users'})], - logged_points(log)) + [ + metrics.Point( + "counter", "record_count", 3, {"endpoint": "users"} + ), + metrics.Point( + "counter", "record_count", 5, {"endpoint": "users"} + ), + ], + logged_points(log), + ) + class TestHttpRequestTimer(unittest.TestCase): - @patch('singer.metrics.log') + @patch("singer.metrics.log") def test_success(self, log): - timer = metrics.http_request_timer('users') + timer = metrics.http_request_timer("users") timer.elapsed = lambda: 0 with timer: pass got = logged_points(log) self.assertEqual( - [metrics.Point('timer', 'http_request_duration', 0, {'endpoint': 'users', 'status': 'succeeded'})], - got) - - @patch('singer.metrics.log') + [ + metrics.Point( + "timer", + "http_request_duration", + 0, + {"endpoint": "users", "status": "succeeded"}, + ) + ], + got, + ) + + @patch("singer.metrics.log") def test_success_with_http_status_code(self, log): - with metrics.http_request_timer('users') as timer: + with metrics.http_request_timer("users") as timer: timer.elapsed = lambda: 0 timer.tags[metrics.Tag.http_status_code] = 200 self.assertEqual( - [metrics.Point('timer', 'http_request_duration', 0, {'endpoint': 'users', 'status': 'succeeded', 'http_status_code': 200})], - logged_points(log)) - - @patch('singer.metrics.log') + [ + metrics.Point( + "timer", + "http_request_duration", + 0, + { + "endpoint": "users", + "status": "succeeded", + "http_status_code": 200, + }, + ) + ], + logged_points(log), + ) + + @patch("singer.metrics.log") def test_failure(self, log): try: - with metrics.http_request_timer('users') as timer: + with metrics.http_request_timer("users") as timer: timer.elapsed = lambda: 0 timer.tags[metrics.Tag.http_status_code] = 400 - raise ValueError('foo is not bar') + raise ValueError("foo is not bar") except ValueError: pass self.assertEqual( - [metrics.Point('timer', 'http_request_duration', 0, {'endpoint': 'users', 'status': 'failed', 'http_status_code': 400})], - logged_points(log)) + [ + metrics.Point( + "timer", + "http_request_duration", + 0, + { + "endpoint": "users", + "status": "failed", + "http_status_code": 400, + }, + ) + ], + logged_points(log), + ) class TestParse(unittest.TestCase): def test_parse_with_everything(self): - point = metrics.parse('INFO METRIC: {"type": "counter", "metric": "record_count", "value": 10, "tags": {"endpoint": "users"}}') + point = metrics.parse( + 'INFO METRIC: {"type": "counter", "metric": "record_count", "value": 10, "tags": {"endpoint": "users"}}' # noqa: E501 + ) self.assertEqual( point, - metrics.Point('counter', 'record_count', 10, {'endpoint': 'users'})) + metrics.Point("counter", "record_count", 10, {"endpoint": "users"}), + ) def test_parse_without_tags(self): - point = metrics.parse('INFO METRIC: {"type": "counter", "metric": "record_count", "value": 10}') + point = metrics.parse( + 'INFO METRIC: {"type": "counter", "metric": "record_count", "value": 10}' + ) self.assertEqual( - point, - metrics.Point('counter', 'record_count', 10, None)) + point, metrics.Point("counter", "record_count", 10, None) + ) def test_parse_invalid_json_returns_none(self): - point = metrics.parse('INFO METRIC: something that is invalid }') + point = metrics.parse("INFO METRIC: something that is invalid }") self.assertIsNone(point) def test_parse_no_match(self): - point = metrics.parse('a line that is not a metric') + point = metrics.parse("a line that is not a metric") self.assertIsNone(point) diff --git a/tests/test_schema.py b/tests/test_schema.py index abf7abd..4f3c315 100644 --- a/tests/test_schema.py +++ b/tests/test_schema.py @@ -1,50 +1,44 @@ +from __future__ import annotations + import unittest from singer.schema import Schema + class TestSchema(unittest.TestCase): # Raw data structures for several schema types - string_dict = { - 'type': 'string', - 'maxLength': 32 - } + string_dict = {"type": "string", "maxLength": 32} - integer_dict = { - 'type': 'integer', - 'maximum': 1000000 - } + integer_dict = {"type": "integer", "maximum": 1000000} - array_dict = { - 'type': 'array', - 'items': integer_dict - } + array_dict = {"type": "array", "items": integer_dict} object_dict = { - 'type': 'object', - 'properties': { - 'a_string': string_dict, - 'an_array': array_dict - }, - 'inclusion': 'whatever', - 'additionalProperties': True, + "type": "object", + "properties": {"a_string": string_dict, "an_array": array_dict}, + "inclusion": "whatever", + "additionalProperties": True, } # Schema object forms of the same schemas as above - string_obj = Schema(type='string', maxLength=32) + string_obj = Schema(type="string", maxLength=32) - integer_obj = Schema(type='integer', maximum=1000000) + integer_obj = Schema(type="integer", maximum=1000000) - array_obj = Schema(type='array', items=integer_obj) + array_obj = Schema(type="array", items=integer_obj) - object_obj = Schema(type='object', - properties={'a_string': string_obj, - 'an_array': array_obj}, - inclusion='whatever', - additionalProperties=True) + object_obj = Schema( + type="object", + properties={"a_string": string_obj, "an_array": array_obj}, + inclusion="whatever", + additionalProperties=True, + ) def test_to_string(self): - self.assertEqual('{"maxLength":32,"type":"string"}', str(self.string_obj)) + self.assertEqual( + '{"maxLength":32,"type":"string"}', str(self.string_obj) + ) def test_string_to_dict(self): self.assertEqual(self.string_dict, self.string_obj.to_dict()) @@ -77,8 +71,13 @@ def test_repr_recursive(self): self.assertEqual(self.object_obj, eval(repr(self.object_obj))) def test_object_from_dict_with_defaults(self): - schema = Schema.from_dict(self.object_dict, inclusion='automatic') - self.assertEqual('whatever', schema.inclusion, - msg='The schema value should override the default') - self.assertEqual('automatic', schema.properties['a_string'].inclusion) - self.assertEqual('automatic', schema.properties['an_array'].items.inclusion) + schema = Schema.from_dict(self.object_dict, inclusion="automatic") + self.assertEqual( + "whatever", + schema.inclusion, + msg="The schema value should override the default", + ) + self.assertEqual("automatic", schema.properties["a_string"].inclusion) + self.assertEqual( + "automatic", schema.properties["an_array"].items.inclusion + ) diff --git a/tests/test_singer.py b/tests/test_singer.py index 3f1e76b..667e689 100644 --- a/tests/test_singer.py +++ b/tests/test_singer.py @@ -1,52 +1,66 @@ -import singer -import msgspec +from __future__ import annotations + +import decimal import unittest + import dateutil -import decimal + +import singer class TestSinger(unittest.TestCase): def test_parse_message_record_good(self): message = singer.parse_message( - '{"type": "RECORD", "record": {"name": "foo"}, "stream": "users"}') + '{"type": "RECORD", "record": {"name": "foo"}, "stream": "users"}' + ) self.assertEqual( message, - singer.RecordMessage(record={'name': 'foo'}, stream='users')) + singer.RecordMessage(record={"name": "foo"}, stream="users"), + ) def test_parse_message_record_with_version_good(self): message = singer.parse_message( - '{"type": "RECORD", "record": {"name": "foo"}, "stream": "users", "version": 2}') + '{"type": "RECORD", "record": {"name": "foo"}, "stream": "users", "version": 2}' # noqa: E501 + ) self.assertEqual( message, - singer.RecordMessage(record={'name': 'foo'}, stream='users', version=2)) + singer.RecordMessage( + record={"name": "foo"}, stream="users", version=2 + ), + ) def test_parse_message_record_naive_extraction_time(self): - with self.assertRaisesRegex(ValueError, 'must be either None or an aware datetime'): - message = singer.parse_message( - '{"type": "RECORD", "record": {"name": "foo"}, "stream": "users", "version": 2, "time_extracted": "1970-01-02T00:00:00"}') + with self.assertRaisesRegex( + ValueError, "must be either None or an aware datetime" + ): + message = singer.parse_message( # noqa: F841 + '{"type": "RECORD", "record": {"name": "foo"}, "stream": "users", "version": 2, "time_extracted": "1970-01-02T00:00:00"}' # noqa: E501 + ) def test_parse_message_record_aware_extraction_time(self): message = singer.parse_message( - '{"type": "RECORD", "record": {"name": "foo"}, "stream": "users", "version": 2, "time_extracted": "1970-01-02T00:00:00.000Z"}') + '{"type": "RECORD", "record": {"name": "foo"}, "stream": "users", "version": 2, "time_extracted": "1970-01-02T00:00:00.000Z"}' # noqa: E501 + ) expected = singer.RecordMessage( - record={'name': 'foo'}, - stream='users', + record={"name": "foo"}, + stream="users", version=2, - time_extracted=dateutil.parser.parse('1970-01-02T00:00:00.000Z')) + time_extracted=dateutil.parser.parse("1970-01-02T00:00:00.000Z"), + ) print(message) print(expected) self.assertEqual(message, expected) def test_extraction_time_strftime(self): - """ Test that we're not corrupting timestamps with cross platform parsing. (Test case for OSX, specifically) """ + """Test that we're not corrupting timestamps with cross platform parsing. (Test case for OSX, specifically)""" # noqa: E501 message = singer.RecordMessage( - record={'name': 'foo'}, - stream='users', + record={"name": "foo"}, + stream="users", version=2, - time_extracted=dateutil.parser.parse('1970-01-02T00:00:00.000Z')) - expected = '1970-01-02T00:00:00.000000Z' - self.assertEqual(message.asdict()['time_extracted'], expected) - + time_extracted=dateutil.parser.parse("1970-01-02T00:00:00.000Z"), + ) + expected = "1970-01-02T00:00:00.000000Z" + self.assertEqual(message.asdict()["time_extracted"], expected) def test_parse_message_record_missing_record(self): with self.assertRaises(Exception): @@ -55,36 +69,46 @@ def test_parse_message_record_missing_record(self): def test_parse_message_record_missing_stream(self): with self.assertRaises(Exception): singer.parse_message( - '{"type": "RECORD", "record": {"name": "foo"}}') + '{"type": "RECORD", "record": {"name": "foo"}}' + ) def test_parse_message_schema_good(self): - message = singer.parse_message('{"type": "SCHEMA", "stream": "users", "schema": {"type": "object", "properties": {"name": {"type": "string"}}}, "key_properties": ["name"]}') # nopep8 + message = singer.parse_message( + '{"type": "SCHEMA", "stream": "users", "schema": {"type": "object", "properties": {"name": {"type": "string"}}}, "key_properties": ["name"]}' # noqa: E501 + ) # nopep8 self.assertEqual( message, singer.SchemaMessage( - stream='users', - key_properties=['name'], - schema={'type': 'object', - 'properties': { - 'name': {'type': 'string'}}})) + stream="users", + key_properties=["name"], + schema={ + "type": "object", + "properties": {"name": {"type": "string"}}, + }, + ), + ) def test_parse_message_schema_missing_stream(self): with self.assertRaises(Exception): - message = singer.parse_message('{"type": "SCHEMA", "schema": {"type": "object", "properties": {"name": {"type": "string"}}}, "key_properties": ["name"]}') # nopep8 + message = singer.parse_message( # noqa: F841 + '{"type": "SCHEMA", "schema": {"type": "object", "properties": {"name": {"type": "string"}}}, "key_properties": ["name"]}' # noqa: E501 + ) def test_parse_message_schema_missing_schema(self): with self.assertRaises(Exception): - message = singer.parse_message( - '{"type": "SCHEMA", "stream": "users", "key_properties": ["name"]}') # nopep8 + message = singer.parse_message( # noqa: F841 + '{"type": "SCHEMA", "stream": "users", "key_properties": ["name"]}' + ) def test_parse_message_schema_missing_key_properties(self): with self.assertRaises(Exception): - message = singer.parse_message('{"type": "SCHEMA", "stream": "users", "schema": {"type": "object", "properties": {"name": {"type": "string"}}}}') # nopep8 + message = singer.parse_message( # noqa: F841 + '{"type": "SCHEMA", "stream": "users", "schema": {"type": "object", "properties": {"name": {"type": "string"}}}}' # noqa: E501 + ) def test_parse_message_state_good(self): - message = singer.parse_message( - '{"type": "STATE", "value": {"seq": 1}}') - self.assertEqual(message, singer.StateMessage(value={'seq': 1})) + message = singer.parse_message('{"type": "STATE", "value": {"seq": 1}}') + self.assertEqual(message, singer.StateMessage(value={"seq": 1})) def test_parse_message_state_missing_value(self): with self.assertRaises(Exception): @@ -92,10 +116,17 @@ def test_parse_message_state_missing_value(self): def test_parse_message_batch_good(self): message = singer.parse_message( - '{"type": "BATCH", "stream": "users", "filepath": "/tmp/users0001.jsonl", "format": "jsonl", "time_extracted": "1970-01-02T00:00:00.000Z"}') + '{"type": "BATCH", "stream": "users", "filepath": "/tmp/users0001.jsonl", "format": "jsonl", "time_extracted": "1970-01-02T00:00:00.000Z"}' # noqa: E501 + ) self.assertEqual( message, - singer.BatchMessage(stream='users', filepath='/tmp/users0001.jsonl', time_extracted=dateutil.parser.parse('1970-01-02T00:00:00.000Z')) + singer.BatchMessage( + stream="users", + filepath="/tmp/users0001.jsonl", + time_extracted=dateutil.parser.parse( + "1970-01-02T00:00:00.000Z" + ), + ), ) def test_parse_message_batch_missing_value(self): @@ -104,91 +135,101 @@ def test_parse_message_batch_missing_value(self): def test_round_trip(self): record_message = singer.RecordMessage( - record={'name': 'foo'}, - stream='users') + record={"name": "foo"}, stream="users" + ) schema_message = singer.SchemaMessage( - stream='users', - key_properties=['name'], - schema={'type': 'object', - 'properties': { - 'name': {'type': 'string'}}}) - - state_message = singer.StateMessage(value={'seq': 1}) - - self.assertEqual(record_message, - singer.parse_message(singer.format_message(record_message))) - self.assertEqual(schema_message, - singer.parse_message(singer.format_message(schema_message))) - self.assertEqual(state_message, - singer.parse_message(singer.format_message(state_message))) - - ## These three tests just confirm that writing doesn't throw + stream="users", + key_properties=["name"], + schema={ + "type": "object", + "properties": {"name": {"type": "string"}}, + }, + ) + + state_message = singer.StateMessage(value={"seq": 1}) + + self.assertEqual( + record_message, + singer.parse_message(singer.format_message(record_message)), + ) + self.assertEqual( + schema_message, + singer.parse_message(singer.format_message(schema_message)), + ) + self.assertEqual( + state_message, + singer.parse_message(singer.format_message(state_message)), + ) + + # These three tests just confirm that writing doesn't throw def test_write_record(self): - singer.write_record('users', {'name': 'mike'}) + singer.write_record("users", {"name": "mike"}) def test_write_schema(self): - schema={'type': 'object', - 'properties': { - 'name': {'type': 'string'}}} - singer.write_schema('users', schema, ['name']) + schema = {"type": "object", "properties": {"name": {"type": "string"}}} + singer.write_schema("users", schema, ["name"]) def test_write_state(self): - singer.write_state({'foo': 1}) + singer.write_state({"foo": 1}) def test_write_batch(self): - singer.write_batch('users', '/tmp/users0001.jsonl') + singer.write_batch("users", "/tmp/users0001.jsonl") class TestParsingNumbers(unittest.TestCase): def create_record(self, value): - raw = '{"type": "RECORD", "stream": "test", "record": {"value": ' + value + '}}' + raw = ( + '{"type": "RECORD", "stream": "test", "record": {"value": ' + + value + + "}}" + ) parsed = singer.parse_message(raw) - return parsed.record['value'] + return parsed.record["value"] def test_parse_int_zero(self): - value = self.create_record('0') + value = self.create_record("0") self.assertEqual(type(value), int) self.assertEqual(value, 0) def test_parse_regular_decimal(self): - value = self.create_record('3.14') - self.assertEqual(decimal.Decimal('3.14'), value) + value = self.create_record("3.14") + self.assertEqual(decimal.Decimal("3.14"), value) def test_parse_large_decimal(self): - value = self.create_record('9999999999999999.9999') - self.assertEqual(decimal.Decimal('9999999999999999.9999'), value) + value = self.create_record("9999999999999999.9999") + self.assertEqual(decimal.Decimal("9999999999999999.9999"), value) def test_parse_small_decimal(self): - value = self.create_record('-9999999999999999.9999') - self.assertEqual(decimal.Decimal('-9999999999999999.9999'), value) + value = self.create_record("-9999999999999999.9999") + self.assertEqual(decimal.Decimal("-9999999999999999.9999"), value) def test_parse_absurdly_large_decimal(self): - value_str = '9' * 1024 + '.' + '9' * 1024 + value_str = "9" * 1024 + "." + "9" * 1024 value = self.create_record(value_str) self.assertEqual(decimal.Decimal(value_str), value) def test_parse_absurdly_large_int(self): - value_str = '9' * 1024 + value_str = "9" * 1024 value = self.create_record(value_str) self.assertEqual(int(value_str), value) self.assertEqual(int, type(value)) def test_parse_bulk_decs(self): value_strs = [ - '-9999999999999999.9999999999999999999999', - '0', - '9999999999999999.9999999999999999999999', - '-7187498962233394.3739812942138415666763', - '9273972760690975.2044306442955715221042', - '29515565286974.1188802122612813004366', - '9176089101347578.2596296292040288441238', - '-8416853039392703.306423225471199148379', - '1285266411314091.3002668125515694162268', - '6051872750342125.3812886238958681227336', - '-1132031605459408.5571559429308939781468', - '-6387836755056303.0038029604189860431045', - '4526059300505414' + "-9999999999999999.9999999999999999999999", + "0", + "9999999999999999.9999999999999999999999", + "-7187498962233394.3739812942138415666763", + "9273972760690975.2044306442955715221042", + "29515565286974.1188802122612813004366", + "9176089101347578.2596296292040288441238", + "-8416853039392703.306423225471199148379", + "1285266411314091.3002668125515694162268", + "6051872750342125.3812886238958681227336", + "-1132031605459408.5571559429308939781468", + "-6387836755056303.0038029604189860431045", + "4526059300505414", ] for value_str in value_strs: value = self.create_record(value_str) @@ -196,18 +237,24 @@ def test_parse_bulk_decs(self): def test_format_message(self): record_message = singer.RecordMessage( - record={'name': 'foo'}, - stream='users') + record={"name": "foo"}, stream="users" + ) - self.assertEqual(b'{"type":"RECORD","stream":"users","record":{"name":"foo"}}', - singer.format_message(record_message)) + self.assertEqual( + b'{"type":"RECORD","stream":"users","record":{"name":"foo"}}', + singer.format_message(record_message), + ) - self.assertEqual(b'{"type":"RECORD","stream":"users","record":{"name":"foo"}}', - singer.format_message(record_message, option=0)) + self.assertEqual( + b'{"type":"RECORD","stream":"users","record":{"name":"foo"}}', + singer.format_message(record_message, option=0), + ) - self.assertEqual(b'{"type":"RECORD","stream":"users","record":{"name":"foo"}}\n', - singer.format_message(record_message, option=1)) + self.assertEqual( + b'{"type":"RECORD","stream":"users","record":{"name":"foo"}}\n', + singer.format_message(record_message, option=1), + ) -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/tests/test_statediff.py b/tests/test_statediff.py index d3dfcb2..5e0c86c 100644 --- a/tests/test_statediff.py +++ b/tests/test_statediff.py @@ -1,105 +1,78 @@ +from __future__ import annotations + import unittest + import singer.statediff as statediff -from singer.statediff import Add, Remove, Change +from singer.statediff import Add, Change, Remove + class TestPaths(unittest.TestCase): def test_simple_dict(self): self.assertEqual( - [(('a',), 1), - (('b',), 2)], - statediff.paths({'a': 1, 'b': 2})) + [(("a",), 1), (("b",), 2)], statediff.paths({"a": 1, "b": 2}) + ) def test_nested_dict(self): self.assertEqual( - [(('a', 'b'), 1), - (('a', 'c'), 2), - (('d', 'e'), 3)], - statediff.paths( - { - 'a': { - 'b': 1, - 'c': 2 - }, - 'd': { - 'e': 3 - } - } - ) + [(("a", "b"), 1), (("a", "c"), 2), (("d", "e"), 3)], + statediff.paths({"a": {"b": 1, "c": 2}, "d": {"e": 3}}), ) - def test_simple_array(self): self.assertEqual( - [((0,), 'blue'), - ((1,), 'green')], - statediff.paths( - ['blue', 'green'])) + [((0,), "blue"), ((1,), "green")], + statediff.paths(["blue", "green"]), + ) def test_nested_array(self): self.assertEqual( - [((0, 0), 'blue'), - ((0, 1), 'red'), - ((1, 0), 'green')], - statediff.paths([['blue', 'red'], ['green']])) + [((0, 0), "blue"), ((0, 1), "red"), ((1, 0), "green")], + statediff.paths([["blue", "red"], ["green"]]), + ) def test_arrays_in_dicts(self): self.assertEqual( - [(('a', 0), 'blue'), - (('a', 1), 'red'), - (('b', 0), 'green')], - statediff.paths( - { - 'a': ['blue', 'red'], - 'b': ['green'] - } - ) + [(("a", 0), "blue"), (("a", 1), "red"), (("b", 0), "green")], + statediff.paths({"a": ["blue", "red"], "b": ["green"]}), ) def test_none(self): self.assertEqual([], statediff.paths(None)) - + + class TestDiff(unittest.TestCase): def test_add(self): self.assertEqual( - [Add(('a',), 1), - Add(('b',), 2)], - statediff.diff({}, {'a': 1, 'b': 2})) + [Add(("a",), 1), Add(("b",), 2)], + statediff.diff({}, {"a": 1, "b": 2}), + ) def test_remove(self): self.assertEqual( - [Remove(('a',), 1), - Remove(('b',), 2)], - statediff.diff({'a': 1, 'b': 2}, {})) + [Remove(("a",), 1), Remove(("b",), 2)], + statediff.diff({"a": 1, "b": 2}, {}), + ) def test_change(self): self.assertEqual( - [Change(('a',), 1, 100), - Change(('b',), 2, 200)], - statediff.diff({'a': 1, 'b': 2}, - {'a': 100, 'b': 200})) - + [Change(("a",), 1, 100), Change(("b",), 2, 200)], + statediff.diff({"a": 1, "b": 2}, {"a": 100, "b": 200}), + ) + def test_null_input_for_old(self): - self.assertEqual( - [Add(('a',), 1)], - statediff.diff(None, {'a': 1})) + self.assertEqual([Add(("a",), 1)], statediff.diff(None, {"a": 1})) def test_null_input_for_new(self): - self.assertEqual( - [Remove(('a',), 1)], - statediff.diff({'a': 1}, None)) + self.assertEqual([Remove(("a",), 1)], statediff.diff({"a": 1}, None)) def test_null_input_for_both(self): self.assertEqual([], statediff.diff(None, None)) def test_null_at_leaf(self): self.assertEqual( - [Change(('a',), 1, None), - Change(('b',), None, 2)], - statediff.diff({'a': 1, 'b': None}, - {'a': None, 'b': 2})) - - - + [Change(("a",), 1, None), Change(("b",), None, 2)], + statediff.diff({"a": 1, "b": None}, {"a": None, "b": 2}), + ) diff --git a/tests/test_transform.py b/tests/test_transform.py index 05f3ae8..ca312a7 100644 --- a/tests/test_transform.py +++ b/tests/test_transform.py @@ -1,115 +1,182 @@ +from __future__ import annotations + import unittest + from singer import transform -from singer.transform import * +from singer.transform import ( + NO_INTEGER_DATETIME_PARSING, + UNIX_MILLISECONDS_INTEGER_DATETIME_PARSING, + UNIX_SECONDS_INTEGER_DATETIME_PARSING, + Transformer, + resolve_schema_references, + unix_milliseconds_to_datetime, + unix_seconds_to_datetime, +) class TestTransform(unittest.TestCase): def test_integer_transform(self): - schema = {'type': 'integer'} + schema = {"type": "integer"} self.assertEqual(123, transform(123, schema)) - self.assertEqual(123, transform('123', schema)) - self.assertEqual(1234, transform('1,234', schema)) + self.assertEqual(123, transform("123", schema)) + self.assertEqual(1234, transform("1,234", schema)) def test_nested_transform(self): - schema = {'type': 'object', - 'properties': {'addrs': {'type': 'array', - 'items': {'type': 'object', - 'properties': {'addr1': {'type': 'string'}, - 'city': {'type': 'string'}, - 'state': {'type': 'string'}, - 'amount': {'type': 'integer'}}}}}} - data = {'addrs': [{'amount': '123'}, {'amount': '456'}]} - expected = {'addrs': [{'amount': 123}, {'amount': 456}]} + schema = { + "type": "object", + "properties": { + "addrs": { + "type": "array", + "items": { + "type": "object", + "properties": { + "addr1": {"type": "string"}, + "city": {"type": "string"}, + "state": {"type": "string"}, + "amount": {"type": "integer"}, + }, + }, + } + }, + } + data = {"addrs": [{"amount": "123"}, {"amount": "456"}]} + expected = {"addrs": [{"amount": 123}, {"amount": 456}]} self.assertDictEqual(expected, transform(data, schema)) def test_multi_type_object_transform(self): - schema = {'type': ['null', 'object', 'string'], - 'properties': {'whatever': {'type': 'date-time', - 'format': 'date-time'}}} - data = {'whatever': '2017-01-01'} - expected = {'whatever': '2017-01-01T00:00:00.000000Z'} + schema = { + "type": ["null", "object", "string"], + "properties": { + "whatever": {"type": "date-time", "format": "date-time"} + }, + } + data = {"whatever": "2017-01-01"} + expected = {"whatever": "2017-01-01T00:00:00.000000Z"} self.assertDictEqual(expected, transform(data, schema)) - data = 'justastring' - expected = 'justastring' + data = "justastring" + expected = "justastring" self.assertEqual(expected, transform(data, schema)) def test_multi_type_array_transform(self): - schema = {'type': ['null', 'array', 'integer'], - 'items': {'type': 'date-time', 'format': 'date-time'}} - data = ['2017-01-01'] - expected = ['2017-01-01T00:00:00.000000Z'] + schema = { + "type": ["null", "array", "integer"], + "items": {"type": "date-time", "format": "date-time"}, + } + data = ["2017-01-01"] + expected = ["2017-01-01T00:00:00.000000Z"] self.assertEqual(expected, transform(data, schema)) data = 23 expected = 23 self.assertEqual(expected, transform(data, schema)) def test_null_transform(self): - self.assertEqual('', transform('', {'type': ['null', 'string']})) - self.assertEqual('', transform('', {'type': [ 'string', 'null']})) - self.assertEqual(None, transform(None, {'type': [ 'string', 'null']})) - self.assertEqual(None, transform('', {'type': ['null']})) - self.assertEqual(None, transform(None, {'type': ['null']})) + self.assertEqual("", transform("", {"type": ["null", "string"]})) + self.assertEqual("", transform("", {"type": ["string", "null"]})) + self.assertEqual(None, transform(None, {"type": ["string", "null"]})) + self.assertEqual(None, transform("", {"type": ["null"]})) + self.assertEqual(None, transform(None, {"type": ["null"]})) def test_datetime_transform(self): - schema = {'type': 'string', 'format': 'date-time'} - string_datetime = '2017-01-01T00:00:00Z' - transformed_string_datetime = '2017-01-01T00:00:00.000000Z' - self.assertEqual(transformed_string_datetime, transform(string_datetime, schema, NO_INTEGER_DATETIME_PARSING)) - self.assertEqual('1970-01-02T00:00:00.000000Z', transform(86400, schema, UNIX_SECONDS_INTEGER_DATETIME_PARSING)) - self.assertEqual(transformed_string_datetime, transform(string_datetime, schema, UNIX_SECONDS_INTEGER_DATETIME_PARSING)) - self.assertEqual('1970-01-01T00:01:26.400000Z', transform(86400, schema, UNIX_MILLISECONDS_INTEGER_DATETIME_PARSING)) - self.assertEqual(transformed_string_datetime, transform(string_datetime, schema, UNIX_MILLISECONDS_INTEGER_DATETIME_PARSING)) + schema = {"type": "string", "format": "date-time"} + string_datetime = "2017-01-01T00:00:00Z" + transformed_string_datetime = "2017-01-01T00:00:00.000000Z" + self.assertEqual( + transformed_string_datetime, + transform(string_datetime, schema, NO_INTEGER_DATETIME_PARSING), + ) + self.assertEqual( + "1970-01-02T00:00:00.000000Z", + transform(86400, schema, UNIX_SECONDS_INTEGER_DATETIME_PARSING), + ) + self.assertEqual( + transformed_string_datetime, + transform( + string_datetime, schema, UNIX_SECONDS_INTEGER_DATETIME_PARSING + ), + ) + self.assertEqual( + "1970-01-01T00:01:26.400000Z", + transform( + 86400, schema, UNIX_MILLISECONDS_INTEGER_DATETIME_PARSING + ), + ) + self.assertEqual( + transformed_string_datetime, + transform( + string_datetime, + schema, + UNIX_MILLISECONDS_INTEGER_DATETIME_PARSING, + ), + ) trans = Transformer(NO_INTEGER_DATETIME_PARSING) - self.assertIsNone(trans._transform_datetime('cat')) + self.assertIsNone(trans._transform_datetime("cat")) self.assertIsNone(trans._transform_datetime(0)) trans.integer_datetime_fmt = UNIX_SECONDS_INTEGER_DATETIME_PARSING - self.assertIsNone(trans._transform_datetime('cat')) + self.assertIsNone(trans._transform_datetime("cat")) def test_datetime_string_with_timezone(self): - schema = {'type': 'string', 'format': 'date-time'} - string_datetime = '2017-03-18T07:00:05-0700' - transformed_string_datetime = '2017-03-18T14:00:05.000000Z' - self.assertEqual(transformed_string_datetime, transform(string_datetime, schema)) + schema = {"type": "string", "format": "date-time"} + string_datetime = "2017-03-18T07:00:05-0700" + transformed_string_datetime = "2017-03-18T14:00:05.000000Z" + self.assertEqual( + transformed_string_datetime, transform(string_datetime, schema) + ) def test_datetime_fractional_seconds_transform(self): - schema = {'type': 'string', 'format': 'date-time'} - string_datetime = '2017-01-01T00:00:00.123000Z' - self.assertEqual(string_datetime, transform(string_datetime, schema, NO_INTEGER_DATETIME_PARSING)) + schema = {"type": "string", "format": "date-time"} + string_datetime = "2017-01-01T00:00:00.123000Z" + self.assertEqual( + string_datetime, + transform(string_datetime, schema, NO_INTEGER_DATETIME_PARSING), + ) def test_anyof_datetime(self): - schema = {'anyOf': [{'type': 'null'}, {'format': 'date-time', 'type': 'string'}]} - string_datetime = '2016-03-10T18:47:20Z' - transformed_string_datetime = '2016-03-10T18:47:20.000000Z' - self.assertEqual(transformed_string_datetime, transform(string_datetime, schema)) + schema = { + "anyOf": [ + {"type": "null"}, + {"format": "date-time", "type": "string"}, + ] + } + string_datetime = "2016-03-10T18:47:20Z" + transformed_string_datetime = "2016-03-10T18:47:20.000000Z" + self.assertEqual( + transformed_string_datetime, transform(string_datetime, schema) + ) self.assertIsNone(transform(None, schema)) def test_error_path(self): - schema = {'type': 'object', - 'properties': {'foo': {'type': 'integer'}, - 'baz': {'type': 'integer'}}} - data = {'foo': 'bar', 'baz': 1} + schema = { + "type": "object", + "properties": { + "foo": {"type": "integer"}, + "baz": {"type": "integer"}, + }, + } + data = {"foo": "bar", "baz": 1} trans = Transformer(NO_INTEGER_DATETIME_PARSING) success, data = trans.transform_recur(data, schema, []) self.assertFalse(success) self.assertIsNone(data) - self.assertListEqual([[], ['foo']], sorted(e.path for e in trans.errors)) + self.assertListEqual( + [[], ["foo"]], sorted(e.path for e in trans.errors) + ) def test_nested_error_path_throws(self): schema = { - 'type': 'object', - 'properties': { - 'key1': { - 'type': 'object', - 'properties': { - 'key2': { - 'type': 'object', - 'properties': { - 'key3': { - 'type': 'object', - 'properties': { - 'key4': {'type': 'integer'}, + "type": "object", + "properties": { + "key1": { + "type": "object", + "properties": { + "key2": { + "type": "object", + "properties": { + "key3": { + "type": "object", + "properties": { + "key4": {"type": "integer"}, }, }, }, @@ -118,34 +185,34 @@ def test_nested_error_path_throws(self): }, }, } - data = {'key1': {'key2': {'key3': {'key4': 'not an integer'}}}} + data = {"key1": {"key2": {"key3": {"key4": "not an integer"}}}} trans = Transformer() success, _ = trans.transform_recur(data, schema, []) self.assertFalse(success) expected = [ [], - ['key1'], - ['key1', 'key2'], - ['key1', 'key2', 'key3'], - ['key1', 'key2', 'key3', 'key4'], + ["key1"], + ["key1", "key2"], + ["key1", "key2", "key3"], + ["key1", "key2", "key3", "key4"], ] self.assertListEqual(expected, sorted(e.path for e in trans.errors)) def test_nested_error_path_no_throw(self): schema = { - 'type': 'object', - 'properties': { - 'key1': { - 'type': 'object', - 'properties': { - 'key2': { - 'type': 'object', - 'properties': { - 'key3': { - 'type': 'object', - 'properties': { - 'key4': {'type': 'string'}, - 'key5': {'type': 'string'}, + "type": "object", + "properties": { + "key1": { + "type": "object", + "properties": { + "key2": { + "type": "object", + "properties": { + "key3": { + "type": "object", + "properties": { + "key4": {"type": "string"}, + "key5": {"type": "string"}, }, }, }, @@ -154,225 +221,374 @@ def test_nested_error_path_no_throw(self): }, }, } - data = {'key1': {'key2': {'key3': {'key4': None, 'key5': None}}}} + data = {"key1": {"key2": {"key3": {"key4": None, "key5": None}}}} trans = Transformer() success, data = trans.transform_recur(data, schema, []) self.assertFalse(success) self.assertIsNone(data) expected = [ [], - ['key1'], - ['key1', 'key2'], - ['key1', 'key2', 'key3'], - ['key1', 'key2', 'key3', 'key4'], - ['key1', 'key2', 'key3', 'key5'], + ["key1"], + ["key1", "key2"], + ["key1", "key2", "key3"], + ["key1", "key2", "key3", "key4"], + ["key1", "key2", "key3", "key5"], ] self.assertListEqual(expected, sorted(e.path for e in trans.errors)) def test_error_path_array(self): - schema = {'type': 'object', - 'properties': {'integers': {'type': 'array', - 'items': {'type': 'integer'}}}} - data = {'integers': [1, 2, 'not an integer', 4, 'also not an integer']} + schema = { + "type": "object", + "properties": { + "integers": {"type": "array", "items": {"type": "integer"}} + }, + } + data = {"integers": [1, 2, "not an integer", 4, "also not an integer"]} trans = Transformer() success, data = trans.transform_recur(data, schema, []) self.assertFalse(success) expected = [ [], - ['integers'], - ['integers', 2], - ['integers', 4], + ["integers"], + ["integers", 2], + ["integers", 4], ] self.assertListEqual(expected, sorted(e.path for e in trans.errors)) def test_nested_error_path_array(self): - schema = {'type': 'object', - 'properties': {'lists_of_integers': {'type': 'array', - 'items': {'type': 'array', - 'items': {'type': 'integer'}}}}} - data = {'lists_of_integers': [[1, 'not an integer'], [2, 3], ['also not an integer', 4]]} + schema = { + "type": "object", + "properties": { + "lists_of_integers": { + "type": "array", + "items": {"type": "array", "items": {"type": "integer"}}, + } + }, + } + data = { + "lists_of_integers": [ + [1, "not an integer"], + [2, 3], + ["also not an integer", 4], + ] + } trans = Transformer() success, transformed_data = trans.transform_recur(data, schema, []) self.assertFalse(success) expected = [ [], - ['lists_of_integers'], - ['lists_of_integers', 0], - ['lists_of_integers', 0, 1], - ['lists_of_integers', 2], - ['lists_of_integers', 2, 0], + ["lists_of_integers"], + ["lists_of_integers", 0], + ["lists_of_integers", 0, 1], + ["lists_of_integers", 2], + ["lists_of_integers", 2, 0], ] self.assertListEqual(expected, sorted(e.path for e in trans.errors)) def test_error_path_datetime(self): - schema = {'type': 'object', - 'properties': {'good_datetime': {'type': 'string', 'format': 'date-time'}, - 'bad_datetime1': {'type': 'string', 'format': 'date-time'}, - 'bad_datetime2': {'type': 'string', 'format': 'date-time'}}} - data = {'good_datetime': '2017-04-11T16:07:00Z', - 'bad_datetime1': 'not a datetime', - 'bad_datetime2': 1} + schema = { + "type": "object", + "properties": { + "good_datetime": {"type": "string", "format": "date-time"}, + "bad_datetime1": {"type": "string", "format": "date-time"}, + "bad_datetime2": {"type": "string", "format": "date-time"}, + }, + } + data = { + "good_datetime": "2017-04-11T16:07:00Z", + "bad_datetime1": "not a datetime", + "bad_datetime2": 1, + } trans = Transformer() success, transformed_data = trans.transform_recur(data, schema, []) self.assertFalse(success) expected = [ [], - ['bad_datetime1'], - ['bad_datetime2'], + ["bad_datetime1"], + ["bad_datetime2"], ] self.assertListEqual(expected, sorted(e.path for e in trans.errors)) def test_unexpected_object_properties(self): - schema = {'type': 'object', - 'properties': {'good_property': {'type': 'string'}}} - data = {'good_property': 'expected data', - 'bad_property': 'unexpected data'} + schema = { + "type": "object", + "properties": {"good_property": {"type": "string"}}, + } + data = { + "good_property": "expected data", + "bad_property": "unexpected data", + } trans = Transformer() success, transformed_data = trans.transform_recur(data, schema, []) self.assertTrue(success) - self.assertDictEqual({'good_property': 'expected data'}, transformed_data) - self.assertSetEqual(set(['bad_property']), trans.removed) + self.assertDictEqual( + {"good_property": "expected data"}, transformed_data + ) + self.assertSetEqual(set(["bad_property"]), trans.removed) self.assertListEqual([], trans.errors) def test_unix_seconds_to_datetime(self): - self.assertEqual(unix_seconds_to_datetime(0), '1970-01-01T00:00:00.000000Z') - self.assertEqual(unix_seconds_to_datetime(1502722441), '2017-08-14T14:54:01.000000Z') - - def test_unix_seconds_to_datetime(self): - self.assertEqual(unix_milliseconds_to_datetime(0), '1970-01-01T00:00:00.000000Z') - self.assertEqual(unix_milliseconds_to_datetime(1502722441000), '2017-08-14T14:54:01.000000Z') - + self.assertEqual( + unix_seconds_to_datetime(0), "1970-01-01T00:00:00.000000Z" + ) + self.assertEqual( + unix_seconds_to_datetime(1502722441), "2017-08-14T14:54:01.000000Z" + ) + + def test_unix_seconds_to_datetime2(self): + self.assertEqual( + unix_milliseconds_to_datetime(0), "1970-01-01T00:00:00.000000Z" + ) + self.assertEqual( + unix_milliseconds_to_datetime(1502722441000), + "2017-08-14T14:54:01.000000Z", + ) def test_null_object_transform(self): - schema = {'type': 'object', - 'properties': {'addrs': {'type': ['null', 'object'], - 'properties': {'city': {'type': 'string'}}}}} - none_data = {'addrs': None} + schema = { + "type": "object", + "properties": { + "addrs": { + "type": ["null", "object"], + "properties": {"city": {"type": "string"}}, + } + }, + } + none_data = {"addrs": None} self.assertDictEqual(none_data, transform(none_data, schema)) - empty_data = {'addrs': {}} + empty_data = {"addrs": {}} self.assertDictEqual(empty_data, transform(empty_data, schema)) + class TestTransformsWithMetadata(unittest.TestCase): def test_drops_no_data_when_not_dict(self): - schema = {'type': 'string'} + schema = {"type": "string"} metadata = {} - string_value = 'hello' - self.assertEqual(string_value, transform(string_value, schema, NO_INTEGER_DATETIME_PARSING, metadata=metadata)) + string_value = "hello" + self.assertEqual( + string_value, + transform( + string_value, + schema, + NO_INTEGER_DATETIME_PARSING, + metadata=metadata, + ), + ) def test_keeps_selected_data_from_dicts(self): - schema = {'type': 'object', - 'properties': { 'name': {'type': 'string'}}} - metadata = {('properties','name'): {'selected': True}} - dict_value = {'name': 'chicken'} - self.assertEqual({'name': 'chicken'}, transform(dict_value, schema, NO_INTEGER_DATETIME_PARSING, metadata=metadata)) + schema = {"type": "object", "properties": {"name": {"type": "string"}}} + metadata = {("properties", "name"): {"selected": True}} + dict_value = {"name": "chicken"} + self.assertEqual( + {"name": "chicken"}, + transform( + dict_value, + schema, + NO_INTEGER_DATETIME_PARSING, + metadata=metadata, + ), + ) def test_keeps_automatic_data_from_dicts(self): - schema = {'type': 'object', - 'properties': { 'name': {'type': 'string'}}} - metadata = {('properties','name'): {'inclusion': 'automatic'}} - dict_value = {'name': 'chicken'} - self.assertEqual({'name': 'chicken'}, transform(dict_value, schema, NO_INTEGER_DATETIME_PARSING, metadata=metadata)) + schema = {"type": "object", "properties": {"name": {"type": "string"}}} + metadata = {("properties", "name"): {"inclusion": "automatic"}} + dict_value = {"name": "chicken"} + self.assertEqual( + {"name": "chicken"}, + transform( + dict_value, + schema, + NO_INTEGER_DATETIME_PARSING, + metadata=metadata, + ), + ) def test_keeps_fields_without_metadata(self): - schema = {'type': 'object', - 'properties': { 'name': {'type': 'string'}}} - metadata = {('properties','age'): {'inclusion': 'automatic'}} - dict_value = {'name': 'chicken'} - self.assertEqual({'name': 'chicken'}, transform(dict_value, schema, NO_INTEGER_DATETIME_PARSING, metadata=metadata)) + schema = {"type": "object", "properties": {"name": {"type": "string"}}} + metadata = {("properties", "age"): {"inclusion": "automatic"}} + dict_value = {"name": "chicken"} + self.assertEqual( + {"name": "chicken"}, + transform( + dict_value, + schema, + NO_INTEGER_DATETIME_PARSING, + metadata=metadata, + ), + ) def test_drops_fields_which_are_unselected(self): - schema = {'type': 'object', - 'properties': { 'name': {'type': 'string'}}} - metadata = {('properties','name'): {'selected': False}} - dict_value = {'name': 'chicken'} - self.assertEqual({}, transform(dict_value, schema, NO_INTEGER_DATETIME_PARSING, metadata=metadata)) + schema = {"type": "object", "properties": {"name": {"type": "string"}}} + metadata = {("properties", "name"): {"selected": False}} + dict_value = {"name": "chicken"} + self.assertEqual( + {}, + transform( + dict_value, + schema, + NO_INTEGER_DATETIME_PARSING, + metadata=metadata, + ), + ) def test_drops_fields_which_are_unsupported(self): - schema = {'type': 'object', - 'properties': { 'name': {'type': 'string'}}} - metadata = {('properties','name'): {'inclusion': 'unsupported'}} - dict_value = {'name': 'chicken'} - self.assertEqual({}, transform(dict_value, schema, NO_INTEGER_DATETIME_PARSING, metadata=metadata)) + schema = {"type": "object", "properties": {"name": {"type": "string"}}} + metadata = {("properties", "name"): {"inclusion": "unsupported"}} + dict_value = {"name": "chicken"} + self.assertEqual( + {}, + transform( + dict_value, + schema, + NO_INTEGER_DATETIME_PARSING, + metadata=metadata, + ), + ) + class TestResolveSchemaReferences(unittest.TestCase): def test_internal_refs_resolve(self): - schema = {'type': 'object', - 'definitions': { 'string_type': {'type': 'string'}}, - 'properties': { 'name': {'$ref': '#/definitions/string_type'}}} + schema = { + "type": "object", + "definitions": {"string_type": {"type": "string"}}, + "properties": {"name": {"$ref": "#/definitions/string_type"}}, + } result = resolve_schema_references(schema) - self.assertEqual(result['properties']['name']['type'], 'string') + self.assertEqual(result["properties"]["name"]["type"], "string") def test_external_refs_resolve(self): - schema = {'type': 'object', - 'properties': { 'name': {'$ref': 'references.json#/definitions/string_type'}}} - refs = {'references.json': {'definitions': { 'string_type': {'type': 'string'}}}} + schema = { + "type": "object", + "properties": { + "name": {"$ref": "references.json#/definitions/string_type"} + }, + } + refs = { + "references.json": { + "definitions": {"string_type": {"type": "string"}} + } + } result = resolve_schema_references(schema, refs) - self.assertEqual(result['properties']['name']['type'], 'string') + self.assertEqual(result["properties"]["name"]["type"], "string") def test_refs_resolve_pattern_properties(self): - schema = {'type': 'object', - 'definitions': { 'string_type': {'type': 'string'}}, - 'patternProperties': {'.+': {'$ref': '#/definitions/string_type'}}} + schema = { + "type": "object", + "definitions": {"string_type": {"type": "string"}}, + "patternProperties": {".+": {"$ref": "#/definitions/string_type"}}, + } result = resolve_schema_references(schema) - self.assertEqual(result['patternProperties']['.+']['type'], 'string') + self.assertEqual(result["patternProperties"][".+"]["type"], "string") def test_refs_resolve_items(self): - schema = {'type': 'object', - 'properties': { 'dogs': - {'type': 'array', - 'items': {'$ref': 'doggie.json#/dogs'}}}} - refs = {'doggie.json': {'dogs': { - 'type': 'object', - 'properties': { - 'breed': { - 'type': 'string' - }, - 'name': { - 'type': 'string'}}}}} + schema = { + "type": "object", + "properties": { + "dogs": { + "type": "array", + "items": {"$ref": "doggie.json#/dogs"}, + } + }, + } + refs = { + "doggie.json": { + "dogs": { + "type": "object", + "properties": { + "breed": {"type": "string"}, + "name": {"type": "string"}, + }, + } + } + } result = resolve_schema_references(schema, refs) - self.assertEqual(result['properties']['dogs']['items']['properties']['breed'], {'type': 'string'}) + self.assertEqual( + result["properties"]["dogs"]["items"]["properties"]["breed"], + {"type": "string"}, + ) def test_refs_resolve_nested(self): - schema = {'type': 'object', - 'properties': { - 'thing': { - 'type': 'object', - 'properties': { - 'name': {'$ref': 'references.json#/definitions/string_type'}}}}} - refs = {'references.json': {'definitions': { 'string_type': {'type': 'string'}}}} + schema = { + "type": "object", + "properties": { + "thing": { + "type": "object", + "properties": { + "name": { + "$ref": "references.json#/definitions/string_type" + } + }, + } + }, + } + refs = { + "references.json": { + "definitions": {"string_type": {"type": "string"}} + } + } result = resolve_schema_references(schema, refs) - self.assertEqual(result['properties']['thing']['properties']['name']['type'], 'string') + self.assertEqual( + result["properties"]["thing"]["properties"]["name"]["type"], + "string", + ) def test_indirect_reference(self): - schema = {'type': 'object', - 'properties': { 'name': {'$ref': 'references.json#/definitions/string_type'}}} - refs = {'references.json': {'definitions': { 'string_type': {'$ref': 'second_reference.json'}}}, - 'second_reference.json': {'type': 'string'}} + schema = { + "type": "object", + "properties": { + "name": {"$ref": "references.json#/definitions/string_type"} + }, + } + refs = { + "references.json": { + "definitions": { + "string_type": {"$ref": "second_reference.json"} + } + }, + "second_reference.json": {"type": "string"}, + } result = resolve_schema_references(schema, refs) - self.assertEqual(result['properties']['name']['type'], 'string') + self.assertEqual(result["properties"]["name"]["type"], "string") def test_refs_resolve_preserves_existing_fields(self): - schema = {'type': 'object', - 'properties': { 'name': {'$ref': 'references.json#/definitions/string_type', - 'still_here': 'yep'}}} - refs = {'references.json': {'definitions': { 'string_type': {'type': 'string'}}}} + schema = { + "type": "object", + "properties": { + "name": { + "$ref": "references.json#/definitions/string_type", + "still_here": "yep", + } + }, + } + refs = { + "references.json": { + "definitions": {"string_type": {"type": "string"}} + } + } result = resolve_schema_references(schema, refs) - self.assertEqual(result['properties']['name']['type'], 'string') - self.assertEqual(result['properties']['name']['still_here'], 'yep') + self.assertEqual(result["properties"]["name"]["type"], "string") + self.assertEqual(result["properties"]["name"]["still_here"], "yep") + class TestPatternProperties(unittest.TestCase): def test_pattern_properties_match(self): - schema = {'type': 'object', - 'patternProperties': { '.+': {'type': 'string'}}} - dict_value = {'name': 'chicken', 'unit_cost': '1.45', 'SKU': '123456'} + schema = { + "type": "object", + "patternProperties": {".+": {"type": "string"}}, + } + dict_value = {"name": "chicken", "unit_cost": "1.45", "SKU": "123456"} expected = dict(dict_value) self.assertEqual(expected, transform(dict_value, schema)) def test_pattern_properties_match_multiple(self): - schema = {'type': 'object', - 'patternProperties': { '.+?cost': {'type': 'number'}, - '.+(? Date: Thu, 8 Aug 2024 15:21:01 +1200 Subject: [PATCH 16/31] Feature/supported python versions (#34) * Updating constraints for python versions * Updating Lockfile with Python dependencies --------- Co-authored-by: Steve Clarke --- CHANGELOG.md | 3 +++ poetry.lock | 4 ++-- pyproject.toml | 4 ++-- 3 files changed, 7 insertions(+), 4 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 6df03f0..b43ddb0 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,8 @@ # Changelog +## 4.0.1 (2024-08-08) + * Correcting Supported Python version constraint in pyproject.toml + ## 4.0.0 (2024-07-08) * Moving from setup.py to pyproject.toml and poetry for installation * Introducing tox for platform independent testing of pep8 compliance. diff --git a/poetry.lock b/poetry.lock index a389ac9..395e669 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1315,5 +1315,5 @@ test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", [metadata] lock-version = "2.0" -python-versions = "^3.8" -content-hash = "327bb1f967c37221f4326095ee6323715036ecda14e68d3cccb39833f1eee81c" +python-versions = ">=3.8,<3.13" +content-hash = "0f5ed68d8e3e78cd5f8001a4ae1648a908c21d2b23cb56681484d1f08cb86086" diff --git a/pyproject.toml b/pyproject.toml index f81c256..c0f3985 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "pipelinewise-singer-python" -version = "4.0.0" +version = "4.0.1" description = "Singer.io utility library - PipelineWise compatible" authors = [] license = "Apache 2.0" @@ -27,7 +27,7 @@ include = [ [tool.poetry.dependencies] -python = "^3.8" +python = ">=3.8,<3.13" pytz = "^2018.4" jsonschema = "^4.19.2" msgspec = "^0.18.0" From be48d56ceba1fa6ec6bd7998e79858b6a82c27f9 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 8 Aug 2024 15:22:33 +1200 Subject: [PATCH 17/31] Bump coverage from 7.5.4 to 7.6.1 (#33) Bumps [coverage](https://github.com/nedbat/coveragepy) from 7.5.4 to 7.6.1. - [Release notes](https://github.com/nedbat/coveragepy/releases) - [Changelog](https://github.com/nedbat/coveragepy/blob/master/CHANGES.rst) - [Commits](https://github.com/nedbat/coveragepy/compare/7.5.4...7.6.1) --- updated-dependencies: - dependency-name: coverage dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 130 ++++++++++++++++++++++++++++++---------------------- 1 file changed, 75 insertions(+), 55 deletions(-) diff --git a/poetry.lock b/poetry.lock index 395e669..04ef2ae 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.5.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. [[package]] name = "appnope" @@ -242,63 +242,83 @@ files = [ [[package]] name = "coverage" -version = "7.5.4" +version = "7.6.1" description = "Code coverage measurement for Python" optional = false python-versions = ">=3.8" files = [ - {file = "coverage-7.5.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6cfb5a4f556bb51aba274588200a46e4dd6b505fb1a5f8c5ae408222eb416f99"}, - {file = "coverage-7.5.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2174e7c23e0a454ffe12267a10732c273243b4f2d50d07544a91198f05c48f47"}, - {file = "coverage-7.5.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2214ee920787d85db1b6a0bd9da5f8503ccc8fcd5814d90796c2f2493a2f4d2e"}, - {file = "coverage-7.5.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1137f46adb28e3813dec8c01fefadcb8c614f33576f672962e323b5128d9a68d"}, - {file = "coverage-7.5.4-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b385d49609f8e9efc885790a5a0e89f2e3ae042cdf12958b6034cc442de428d3"}, - {file = "coverage-7.5.4-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:b4a474f799456e0eb46d78ab07303286a84a3140e9700b9e154cfebc8f527016"}, - {file = "coverage-7.5.4-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:5cd64adedf3be66f8ccee418473c2916492d53cbafbfcff851cbec5a8454b136"}, - {file = "coverage-7.5.4-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:e564c2cf45d2f44a9da56f4e3a26b2236504a496eb4cb0ca7221cd4cc7a9aca9"}, - {file = "coverage-7.5.4-cp310-cp310-win32.whl", hash = "sha256:7076b4b3a5f6d2b5d7f1185fde25b1e54eb66e647a1dfef0e2c2bfaf9b4c88c8"}, - {file = "coverage-7.5.4-cp310-cp310-win_amd64.whl", hash = "sha256:018a12985185038a5b2bcafab04ab833a9a0f2c59995b3cec07e10074c78635f"}, - {file = "coverage-7.5.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:db14f552ac38f10758ad14dd7b983dbab424e731588d300c7db25b6f89e335b5"}, - {file = "coverage-7.5.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3257fdd8e574805f27bb5342b77bc65578e98cbc004a92232106344053f319ba"}, - {file = "coverage-7.5.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3a6612c99081d8d6134005b1354191e103ec9705d7ba2754e848211ac8cacc6b"}, - {file = "coverage-7.5.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d45d3cbd94159c468b9b8c5a556e3f6b81a8d1af2a92b77320e887c3e7a5d080"}, - {file = "coverage-7.5.4-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ed550e7442f278af76d9d65af48069f1fb84c9f745ae249c1a183c1e9d1b025c"}, - {file = "coverage-7.5.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:7a892be37ca35eb5019ec85402c3371b0f7cda5ab5056023a7f13da0961e60da"}, - {file = "coverage-7.5.4-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:8192794d120167e2a64721d88dbd688584675e86e15d0569599257566dec9bf0"}, - {file = "coverage-7.5.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:820bc841faa502e727a48311948e0461132a9c8baa42f6b2b84a29ced24cc078"}, - {file = "coverage-7.5.4-cp311-cp311-win32.whl", hash = "sha256:6aae5cce399a0f065da65c7bb1e8abd5c7a3043da9dceb429ebe1b289bc07806"}, - {file = "coverage-7.5.4-cp311-cp311-win_amd64.whl", hash = "sha256:d2e344d6adc8ef81c5a233d3a57b3c7d5181f40e79e05e1c143da143ccb6377d"}, - {file = "coverage-7.5.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:54317c2b806354cbb2dc7ac27e2b93f97096912cc16b18289c5d4e44fc663233"}, - {file = "coverage-7.5.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:042183de01f8b6d531e10c197f7f0315a61e8d805ab29c5f7b51a01d62782747"}, - {file = "coverage-7.5.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a6bb74ed465d5fb204b2ec41d79bcd28afccf817de721e8a807d5141c3426638"}, - {file = "coverage-7.5.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b3d45ff86efb129c599a3b287ae2e44c1e281ae0f9a9bad0edc202179bcc3a2e"}, - {file = "coverage-7.5.4-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5013ed890dc917cef2c9f765c4c6a8ae9df983cd60dbb635df8ed9f4ebc9f555"}, - {file = "coverage-7.5.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1014fbf665fef86cdfd6cb5b7371496ce35e4d2a00cda501cf9f5b9e6fced69f"}, - {file = "coverage-7.5.4-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:3684bc2ff328f935981847082ba4fdc950d58906a40eafa93510d1b54c08a66c"}, - {file = "coverage-7.5.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:581ea96f92bf71a5ec0974001f900db495488434a6928a2ca7f01eee20c23805"}, - {file = "coverage-7.5.4-cp312-cp312-win32.whl", hash = "sha256:73ca8fbc5bc622e54627314c1a6f1dfdd8db69788f3443e752c215f29fa87a0b"}, - {file = "coverage-7.5.4-cp312-cp312-win_amd64.whl", hash = "sha256:cef4649ec906ea7ea5e9e796e68b987f83fa9a718514fe147f538cfeda76d7a7"}, - {file = "coverage-7.5.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:cdd31315fc20868c194130de9ee6bfd99755cc9565edff98ecc12585b90be882"}, - {file = "coverage-7.5.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:02ff6e898197cc1e9fa375581382b72498eb2e6d5fc0b53f03e496cfee3fac6d"}, - {file = "coverage-7.5.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d05c16cf4b4c2fc880cb12ba4c9b526e9e5d5bb1d81313d4d732a5b9fe2b9d53"}, - {file = "coverage-7.5.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c5986ee7ea0795a4095ac4d113cbb3448601efca7f158ec7f7087a6c705304e4"}, - {file = "coverage-7.5.4-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5df54843b88901fdc2f598ac06737f03d71168fd1175728054c8f5a2739ac3e4"}, - {file = "coverage-7.5.4-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:ab73b35e8d109bffbda9a3e91c64e29fe26e03e49addf5b43d85fc426dde11f9"}, - {file = "coverage-7.5.4-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:aea072a941b033813f5e4814541fc265a5c12ed9720daef11ca516aeacd3bd7f"}, - {file = "coverage-7.5.4-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:16852febd96acd953b0d55fc842ce2dac1710f26729b31c80b940b9afcd9896f"}, - {file = "coverage-7.5.4-cp38-cp38-win32.whl", hash = "sha256:8f894208794b164e6bd4bba61fc98bf6b06be4d390cf2daacfa6eca0a6d2bb4f"}, - {file = "coverage-7.5.4-cp38-cp38-win_amd64.whl", hash = "sha256:e2afe743289273209c992075a5a4913e8d007d569a406ffed0bd080ea02b0633"}, - {file = "coverage-7.5.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b95c3a8cb0463ba9f77383d0fa8c9194cf91f64445a63fc26fb2327e1e1eb088"}, - {file = "coverage-7.5.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3d7564cc09dd91b5a6001754a5b3c6ecc4aba6323baf33a12bd751036c998be4"}, - {file = "coverage-7.5.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:44da56a2589b684813f86d07597fdf8a9c6ce77f58976727329272f5a01f99f7"}, - {file = "coverage-7.5.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e16f3d6b491c48c5ae726308e6ab1e18ee830b4cdd6913f2d7f77354b33f91c8"}, - {file = "coverage-7.5.4-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dbc5958cb471e5a5af41b0ddaea96a37e74ed289535e8deca404811f6cb0bc3d"}, - {file = "coverage-7.5.4-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:a04e990a2a41740b02d6182b498ee9796cf60eefe40cf859b016650147908029"}, - {file = "coverage-7.5.4-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:ddbd2f9713a79e8e7242d7c51f1929611e991d855f414ca9996c20e44a895f7c"}, - {file = "coverage-7.5.4-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:b1ccf5e728ccf83acd313c89f07c22d70d6c375a9c6f339233dcf792094bcbf7"}, - {file = "coverage-7.5.4-cp39-cp39-win32.whl", hash = "sha256:56b4eafa21c6c175b3ede004ca12c653a88b6f922494b023aeb1e836df953ace"}, - {file = "coverage-7.5.4-cp39-cp39-win_amd64.whl", hash = "sha256:65e528e2e921ba8fd67d9055e6b9f9e34b21ebd6768ae1c1723f4ea6ace1234d"}, - {file = "coverage-7.5.4-pp38.pp39.pp310-none-any.whl", hash = "sha256:79b356f3dd5b26f3ad23b35c75dbdaf1f9e2450b6bcefc6d0825ea0aa3f86ca5"}, - {file = "coverage-7.5.4.tar.gz", hash = "sha256:a44963520b069e12789d0faea4e9fdb1e410cdc4aab89d94f7f55cbb7fef0353"}, + {file = "coverage-7.6.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b06079abebbc0e89e6163b8e8f0e16270124c154dc6e4a47b413dd538859af16"}, + {file = "coverage-7.6.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:cf4b19715bccd7ee27b6b120e7e9dd56037b9c0681dcc1adc9ba9db3d417fa36"}, + {file = "coverage-7.6.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61c0abb4c85b095a784ef23fdd4aede7a2628478e7baba7c5e3deba61070a02"}, + {file = "coverage-7.6.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fd21f6ae3f08b41004dfb433fa895d858f3f5979e7762d052b12aef444e29afc"}, + {file = "coverage-7.6.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f59d57baca39b32db42b83b2a7ba6f47ad9c394ec2076b084c3f029b7afca23"}, + {file = "coverage-7.6.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a1ac0ae2b8bd743b88ed0502544847c3053d7171a3cff9228af618a068ed9c34"}, + {file = "coverage-7.6.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e6a08c0be454c3b3beb105c0596ebdc2371fab6bb90c0c0297f4e58fd7e1012c"}, + {file = "coverage-7.6.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f5796e664fe802da4f57a168c85359a8fbf3eab5e55cd4e4569fbacecc903959"}, + {file = "coverage-7.6.1-cp310-cp310-win32.whl", hash = "sha256:7bb65125fcbef8d989fa1dd0e8a060999497629ca5b0efbca209588a73356232"}, + {file = "coverage-7.6.1-cp310-cp310-win_amd64.whl", hash = "sha256:3115a95daa9bdba70aea750db7b96b37259a81a709223c8448fa97727d546fe0"}, + {file = "coverage-7.6.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7dea0889685db8550f839fa202744652e87c60015029ce3f60e006f8c4462c93"}, + {file = "coverage-7.6.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ed37bd3c3b063412f7620464a9ac1314d33100329f39799255fb8d3027da50d3"}, + {file = "coverage-7.6.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d85f5e9a5f8b73e2350097c3756ef7e785f55bd71205defa0bfdaf96c31616ff"}, + {file = "coverage-7.6.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bc572be474cafb617672c43fe989d6e48d3c83af02ce8de73fff1c6bb3c198d"}, + {file = "coverage-7.6.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c0420b573964c760df9e9e86d1a9a622d0d27f417e1a949a8a66dd7bcee7bc6"}, + {file = "coverage-7.6.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1f4aa8219db826ce6be7099d559f8ec311549bfc4046f7f9fe9b5cea5c581c56"}, + {file = "coverage-7.6.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:fc5a77d0c516700ebad189b587de289a20a78324bc54baee03dd486f0855d234"}, + {file = "coverage-7.6.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b48f312cca9621272ae49008c7f613337c53fadca647d6384cc129d2996d1133"}, + {file = "coverage-7.6.1-cp311-cp311-win32.whl", hash = "sha256:1125ca0e5fd475cbbba3bb67ae20bd2c23a98fac4e32412883f9bcbaa81c314c"}, + {file = "coverage-7.6.1-cp311-cp311-win_amd64.whl", hash = "sha256:8ae539519c4c040c5ffd0632784e21b2f03fc1340752af711f33e5be83a9d6c6"}, + {file = "coverage-7.6.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:95cae0efeb032af8458fc27d191f85d1717b1d4e49f7cb226cf526ff28179778"}, + {file = "coverage-7.6.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5621a9175cf9d0b0c84c2ef2b12e9f5f5071357c4d2ea6ca1cf01814f45d2391"}, + {file = "coverage-7.6.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:260933720fdcd75340e7dbe9060655aff3af1f0c5d20f46b57f262ab6c86a5e8"}, + {file = "coverage-7.6.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07e2ca0ad381b91350c0ed49d52699b625aab2b44b65e1b4e02fa9df0e92ad2d"}, + {file = "coverage-7.6.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c44fee9975f04b33331cb8eb272827111efc8930cfd582e0320613263ca849ca"}, + {file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:877abb17e6339d96bf08e7a622d05095e72b71f8afd8a9fefc82cf30ed944163"}, + {file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:3e0cadcf6733c09154b461f1ca72d5416635e5e4ec4e536192180d34ec160f8a"}, + {file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c3c02d12f837d9683e5ab2f3d9844dc57655b92c74e286c262e0fc54213c216d"}, + {file = "coverage-7.6.1-cp312-cp312-win32.whl", hash = "sha256:e05882b70b87a18d937ca6768ff33cc3f72847cbc4de4491c8e73880766718e5"}, + {file = "coverage-7.6.1-cp312-cp312-win_amd64.whl", hash = "sha256:b5d7b556859dd85f3a541db6a4e0167b86e7273e1cdc973e5b175166bb634fdb"}, + {file = "coverage-7.6.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a4acd025ecc06185ba2b801f2de85546e0b8ac787cf9d3b06e7e2a69f925b106"}, + {file = "coverage-7.6.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a6d3adcf24b624a7b778533480e32434a39ad8fa30c315208f6d3e5542aeb6e9"}, + {file = "coverage-7.6.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0c212c49b6c10e6951362f7c6df3329f04c2b1c28499563d4035d964ab8e08c"}, + {file = "coverage-7.6.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6e81d7a3e58882450ec4186ca59a3f20a5d4440f25b1cff6f0902ad890e6748a"}, + {file = "coverage-7.6.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78b260de9790fd81e69401c2dc8b17da47c8038176a79092a89cb2b7d945d060"}, + {file = "coverage-7.6.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a78d169acd38300060b28d600344a803628c3fd585c912cacc9ea8790fe96862"}, + {file = "coverage-7.6.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:2c09f4ce52cb99dd7505cd0fc8e0e37c77b87f46bc9c1eb03fe3bc9991085388"}, + {file = "coverage-7.6.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6878ef48d4227aace338d88c48738a4258213cd7b74fd9a3d4d7582bb1d8a155"}, + {file = "coverage-7.6.1-cp313-cp313-win32.whl", hash = "sha256:44df346d5215a8c0e360307d46ffaabe0f5d3502c8a1cefd700b34baf31d411a"}, + {file = "coverage-7.6.1-cp313-cp313-win_amd64.whl", hash = "sha256:8284cf8c0dd272a247bc154eb6c95548722dce90d098c17a883ed36e67cdb129"}, + {file = "coverage-7.6.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:d3296782ca4eab572a1a4eca686d8bfb00226300dcefdf43faa25b5242ab8a3e"}, + {file = "coverage-7.6.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:502753043567491d3ff6d08629270127e0c31d4184c4c8d98f92c26f65019962"}, + {file = "coverage-7.6.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6a89ecca80709d4076b95f89f308544ec8f7b4727e8a547913a35f16717856cb"}, + {file = "coverage-7.6.1-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a318d68e92e80af8b00fa99609796fdbcdfef3629c77c6283566c6f02c6d6704"}, + {file = "coverage-7.6.1-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:13b0a73a0896988f053e4fbb7de6d93388e6dd292b0d87ee51d106f2c11b465b"}, + {file = "coverage-7.6.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:4421712dbfc5562150f7554f13dde997a2e932a6b5f352edcce948a815efee6f"}, + {file = "coverage-7.6.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:166811d20dfea725e2e4baa71fffd6c968a958577848d2131f39b60043400223"}, + {file = "coverage-7.6.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:225667980479a17db1048cb2bf8bfb39b8e5be8f164b8f6628b64f78a72cf9d3"}, + {file = "coverage-7.6.1-cp313-cp313t-win32.whl", hash = "sha256:170d444ab405852903b7d04ea9ae9b98f98ab6d7e63e1115e82620807519797f"}, + {file = "coverage-7.6.1-cp313-cp313t-win_amd64.whl", hash = "sha256:b9f222de8cded79c49bf184bdbc06630d4c58eec9459b939b4a690c82ed05657"}, + {file = "coverage-7.6.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6db04803b6c7291985a761004e9060b2bca08da6d04f26a7f2294b8623a0c1a0"}, + {file = "coverage-7.6.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f1adfc8ac319e1a348af294106bc6a8458a0f1633cc62a1446aebc30c5fa186a"}, + {file = "coverage-7.6.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a95324a9de9650a729239daea117df21f4b9868ce32e63f8b650ebe6cef5595b"}, + {file = "coverage-7.6.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b43c03669dc4618ec25270b06ecd3ee4fa94c7f9b3c14bae6571ca00ef98b0d3"}, + {file = "coverage-7.6.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8929543a7192c13d177b770008bc4e8119f2e1f881d563fc6b6305d2d0ebe9de"}, + {file = "coverage-7.6.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:a09ece4a69cf399510c8ab25e0950d9cf2b42f7b3cb0374f95d2e2ff594478a6"}, + {file = "coverage-7.6.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:9054a0754de38d9dbd01a46621636689124d666bad1936d76c0341f7d71bf569"}, + {file = "coverage-7.6.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:0dbde0f4aa9a16fa4d754356a8f2e36296ff4d83994b2c9d8398aa32f222f989"}, + {file = "coverage-7.6.1-cp38-cp38-win32.whl", hash = "sha256:da511e6ad4f7323ee5702e6633085fb76c2f893aaf8ce4c51a0ba4fc07580ea7"}, + {file = "coverage-7.6.1-cp38-cp38-win_amd64.whl", hash = "sha256:3f1156e3e8f2872197af3840d8ad307a9dd18e615dc64d9ee41696f287c57ad8"}, + {file = "coverage-7.6.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:abd5fd0db5f4dc9289408aaf34908072f805ff7792632250dcb36dc591d24255"}, + {file = "coverage-7.6.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:547f45fa1a93154bd82050a7f3cddbc1a7a4dd2a9bf5cb7d06f4ae29fe94eaf8"}, + {file = "coverage-7.6.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:645786266c8f18a931b65bfcefdbf6952dd0dea98feee39bd188607a9d307ed2"}, + {file = "coverage-7.6.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9e0b2df163b8ed01d515807af24f63de04bebcecbd6c3bfeff88385789fdf75a"}, + {file = "coverage-7.6.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:609b06f178fe8e9f89ef676532760ec0b4deea15e9969bf754b37f7c40326dbc"}, + {file = "coverage-7.6.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:702855feff378050ae4f741045e19a32d57d19f3e0676d589df0575008ea5004"}, + {file = "coverage-7.6.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:2bdb062ea438f22d99cba0d7829c2ef0af1d768d1e4a4f528087224c90b132cb"}, + {file = "coverage-7.6.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:9c56863d44bd1c4fe2abb8a4d6f5371d197f1ac0ebdee542f07f35895fc07f36"}, + {file = "coverage-7.6.1-cp39-cp39-win32.whl", hash = "sha256:6e2cd258d7d927d09493c8df1ce9174ad01b381d4729a9d8d4e38670ca24774c"}, + {file = "coverage-7.6.1-cp39-cp39-win_amd64.whl", hash = "sha256:06a737c882bd26d0d6ee7269b20b12f14a8704807a01056c80bb881a4b2ce6ca"}, + {file = "coverage-7.6.1-pp38.pp39.pp310-none-any.whl", hash = "sha256:e9a6e0eb86070e8ccaedfbd9d38fec54864f3125ab95419970575b42af7541df"}, + {file = "coverage-7.6.1.tar.gz", hash = "sha256:953510dfb7b12ab69d20135a0662397f077c59b1e6379a768e97c59d852ee51d"}, ] [package.extras] @@ -903,8 +923,8 @@ astroid = ">=3.2.2,<=3.3.0-dev0" colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""} dill = [ {version = ">=0.2", markers = "python_version < \"3.11\""}, - {version = ">=0.3.6", markers = "python_version >= \"3.11\""}, {version = ">=0.3.7", markers = "python_version >= \"3.12\""}, + {version = ">=0.3.6", markers = "python_version >= \"3.11\" and python_version < \"3.12\""}, ] isort = ">=4.2.5,<5.13.0 || >5.13.0,<6" mccabe = ">=0.6,<0.8" From 93fca9dcaa59bca8b7adef84df3705acb1cd4cf1 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 8 Aug 2024 15:24:21 +1200 Subject: [PATCH 18/31] Bump tox from 4.15.1 to 4.17.1 (#35) Bumps [tox](https://github.com/tox-dev/tox) from 4.15.1 to 4.17.1. - [Release notes](https://github.com/tox-dev/tox/releases) - [Changelog](https://github.com/tox-dev/tox/blob/main/docs/changelog.rst) - [Commits](https://github.com/tox-dev/tox/compare/4.15.1...4.17.1) --- updated-dependencies: - dependency-name: tox dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 32 ++++++++++++++++---------------- pyproject.toml | 2 +- 2 files changed, 17 insertions(+), 17 deletions(-) diff --git a/poetry.lock b/poetry.lock index 04ef2ae..08eb6bf 100644 --- a/poetry.lock +++ b/poetry.lock @@ -132,13 +132,13 @@ uvloop = ["uvloop (>=0.15.2)"] [[package]] name = "cachetools" -version = "5.3.3" +version = "5.4.0" description = "Extensible memoizing collections and decorators" optional = false python-versions = ">=3.7" files = [ - {file = "cachetools-5.3.3-py3-none-any.whl", hash = "sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945"}, - {file = "cachetools-5.3.3.tar.gz", hash = "sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105"}, + {file = "cachetools-5.4.0-py3-none-any.whl", hash = "sha256:3ae3b49a3d5e28a77a0be2b37dbcb89005058959cb2323858c2657c4a8cab474"}, + {file = "cachetools-5.4.0.tar.gz", hash = "sha256:b8adc2e7c07f105ced7bc56dbb6dfbe7c4a00acce20e2227b3f355be89bc6827"}, ] [[package]] @@ -1191,30 +1191,30 @@ files = [ [[package]] name = "tox" -version = "4.15.1" +version = "4.17.1" description = "tox is a generic virtualenv management and test command line tool" optional = false python-versions = ">=3.8" files = [ - {file = "tox-4.15.1-py3-none-any.whl", hash = "sha256:f00a5dc4222b358e69694e47e3da0227ac41253509bca9f45aa8f012053e8d9d"}, - {file = "tox-4.15.1.tar.gz", hash = "sha256:53a092527d65e873e39213ebd4bd027a64623320b6b0326136384213f95b7076"}, + {file = "tox-4.17.1-py3-none-any.whl", hash = "sha256:2974597c0353577126ab014f52d1a399fb761049e165ff34427f84e8cfe6c990"}, + {file = "tox-4.17.1.tar.gz", hash = "sha256:2c41565a571e34480bd401d668a4899806169a4633e972ac296c54406d2ded8a"}, ] [package.dependencies] -cachetools = ">=5.3.2" +cachetools = ">=5.4" chardet = ">=5.2" colorama = ">=0.4.6" -filelock = ">=3.13.1" -packaging = ">=23.2" -platformdirs = ">=4.1" -pluggy = ">=1.3" -pyproject-api = ">=1.6.1" +filelock = ">=3.15.4" +packaging = ">=24.1" +platformdirs = ">=4.2.2" +pluggy = ">=1.5" +pyproject-api = ">=1.7.1" tomli = {version = ">=2.0.1", markers = "python_version < \"3.11\""} -virtualenv = ">=20.25" +virtualenv = ">=20.26.3" [package.extras] -docs = ["furo (>=2023.9.10)", "sphinx (>=7.2.6)", "sphinx-argparse-cli (>=1.11.1)", "sphinx-autodoc-typehints (>=1.25.2)", "sphinx-copybutton (>=0.5.2)", "sphinx-inline-tabs (>=2023.4.21)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.11)"] -testing = ["build[virtualenv] (>=1.0.3)", "covdefaults (>=2.3)", "detect-test-pollution (>=1.2)", "devpi-process (>=1)", "diff-cover (>=8.0.2)", "distlib (>=0.3.8)", "flaky (>=3.7)", "hatch-vcs (>=0.4)", "hatchling (>=1.21)", "psutil (>=5.9.7)", "pytest (>=7.4.4)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-xdist (>=3.5)", "re-assert (>=1.1)", "time-machine (>=2.13)", "wheel (>=0.42)"] +docs = ["furo (>=2024.7.18)", "sphinx (>=7.4.7)", "sphinx-argparse-cli (>=1.16)", "sphinx-autodoc-typehints (>=2.2.3)", "sphinx-copybutton (>=0.5.2)", "sphinx-inline-tabs (>=2023.4.21)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.11)"] +testing = ["build[virtualenv] (>=1.2.1)", "covdefaults (>=2.3)", "detect-test-pollution (>=1.2)", "devpi-process (>=1)", "diff-cover (>=9.1.1)", "distlib (>=0.3.8)", "flaky (>=3.8.1)", "hatch-vcs (>=0.4)", "hatchling (>=1.25)", "psutil (>=6)", "pytest (>=8.3.2)", "pytest-cov (>=5)", "pytest-mock (>=3.14)", "pytest-xdist (>=3.6.1)", "re-assert (>=1.1)", "setuptools (>=70.3)", "time-machine (>=2.14.2)", "wheel (>=0.43)"] [[package]] name = "traitlets" @@ -1336,4 +1336,4 @@ test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", [metadata] lock-version = "2.0" python-versions = ">=3.8,<3.13" -content-hash = "0f5ed68d8e3e78cd5f8001a4ae1648a908c21d2b23cb56681484d1f08cb86086" +content-hash = "6cda3112d2caf624a184fcef7523e796700a63a8b311eb1b3b758e9d3d0152d3" diff --git a/pyproject.toml b/pyproject.toml index c0f3985..32783e2 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -44,7 +44,7 @@ coverage = ">= 6.3, < 8.0" ipython = "^8.12.1" ipdb = "^0.13.13" unify = "^0.5" -tox = "^4.15.1" +tox = "^4.17.1" flake8 = { version = "^7.1.0", python = ">=3.8.1" } black = "^24.4.2" pydocstyle = "^6.3.0" From d3382d8f1aca266e2dbb96f1f97bf82c89e789f2 Mon Sep 17 00:00:00 2001 From: Steve Clarke <84364906+s7clarke10@users.noreply.github.com> Date: Thu, 8 Aug 2024 15:59:15 +1200 Subject: [PATCH 19/31] Bumping dependencies (#36) Co-authored-by: Steve Clarke --- CHANGELOG.md | 7 ++++++- poetry.lock | 14 +++++++------- pyproject.toml | 4 ++-- 3 files changed, 15 insertions(+), 10 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index b43ddb0..f131dd6 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,7 +1,12 @@ # Changelog ## 4.0.1 (2024-08-08) - * Correcting Supported Python version constraint in pyproject.toml + * Correcting Supported Python version constraint in pyproject.toml >=3.8,<3.13 + * Bump pytest from 7.4.4 to 8.3.2 + * Bump pytz from 2018.9 to 2024.1 + * Bump jsonschema from 4.22.0 to 4.23.0 + * Bump tox from 4.15.1 to 4.17.1 #35 + * Bump coverage from 7.5.4 to 7.6.1 #33 ## 4.0.0 (2024-07-08) * Moving from setup.py to pyproject.toml and poetry for installation diff --git a/poetry.lock b/poetry.lock index 08eb6bf..fbc6f4e 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.5.1 and should not be changed by hand. [[package]] name = "appnope" @@ -540,13 +540,13 @@ testing = ["Django", "attrs", "colorama", "docopt", "pytest (<7.0.0)"] [[package]] name = "jsonschema" -version = "4.22.0" +version = "4.23.0" description = "An implementation of JSON Schema validation for Python" optional = false python-versions = ">=3.8" files = [ - {file = "jsonschema-4.22.0-py3-none-any.whl", hash = "sha256:ff4cfd6b1367a40e7bc6411caec72effadd3db0bbe5017de188f2d6108335802"}, - {file = "jsonschema-4.22.0.tar.gz", hash = "sha256:5b22d434a45935119af990552c862e5d6d564e8f6601206b305a61fdf661a2b7"}, + {file = "jsonschema-4.23.0-py3-none-any.whl", hash = "sha256:fbadb6f8b144a8f8cf9f0b89ba94501d143e50411a1278633f56a7acf7fd5566"}, + {file = "jsonschema-4.23.0.tar.gz", hash = "sha256:d71497fef26351a33265337fa77ffeb82423f3ea21283cd9467bb03999266bc4"}, ] [package.dependencies] @@ -559,7 +559,7 @@ rpds-py = ">=0.7.1" [package.extras] format = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3987", "uri-template", "webcolors (>=1.11)"] -format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "uri-template", "webcolors (>=1.11)"] +format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "uri-template", "webcolors (>=24.6.0)"] [[package]] name = "jsonschema-specifications" @@ -923,8 +923,8 @@ astroid = ">=3.2.2,<=3.3.0-dev0" colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""} dill = [ {version = ">=0.2", markers = "python_version < \"3.11\""}, + {version = ">=0.3.6", markers = "python_version >= \"3.11\""}, {version = ">=0.3.7", markers = "python_version >= \"3.12\""}, - {version = ">=0.3.6", markers = "python_version >= \"3.11\" and python_version < \"3.12\""}, ] isort = ">=4.2.5,<5.13.0 || >5.13.0,<6" mccabe = ">=0.6,<0.8" @@ -1336,4 +1336,4 @@ test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", [metadata] lock-version = "2.0" python-versions = ">=3.8,<3.13" -content-hash = "6cda3112d2caf624a184fcef7523e796700a63a8b311eb1b3b758e9d3d0152d3" +content-hash = "1f6283862d427a24fd9085efffbb9f49250376dcd81815cfe1d87b4b62491e51" diff --git a/pyproject.toml b/pyproject.toml index 32783e2..32998dd 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -28,8 +28,8 @@ include = [ [tool.poetry.dependencies] python = ">=3.8,<3.13" -pytz = "^2018.4" -jsonschema = "^4.19.2" +pytz = ">=2018.4,<2025.0" +jsonschema = "^4.23.0" msgspec = "^0.18.0" python-dateutil = "^2.7.3" backoff = "2.2.1" From 396bf68c789709cf07a15f8f3159e68bb2526c7f Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 8 Aug 2024 16:17:38 +1200 Subject: [PATCH 20/31] Bump pytest from 7.4.4 to 8.3.2 (#31) Bumps [pytest](https://github.com/pytest-dev/pytest) from 7.4.4 to 8.3.2. - [Release notes](https://github.com/pytest-dev/pytest/releases) - [Changelog](https://github.com/pytest-dev/pytest/blob/main/CHANGELOG.rst) - [Commits](https://github.com/pytest-dev/pytest/compare/7.4.4...8.3.2) --- updated-dependencies: - dependency-name: pytest dependency-type: direct:development update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 20 ++++++++++---------- pyproject.toml | 2 +- 2 files changed, 11 insertions(+), 11 deletions(-) diff --git a/poetry.lock b/poetry.lock index fbc6f4e..2c35edb 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.5.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. [[package]] name = "appnope" @@ -923,8 +923,8 @@ astroid = ">=3.2.2,<=3.3.0-dev0" colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""} dill = [ {version = ">=0.2", markers = "python_version < \"3.11\""}, - {version = ">=0.3.6", markers = "python_version >= \"3.11\""}, {version = ">=0.3.7", markers = "python_version >= \"3.12\""}, + {version = ">=0.3.6", markers = "python_version >= \"3.11\" and python_version < \"3.12\""}, ] isort = ">=4.2.5,<5.13.0 || >5.13.0,<6" mccabe = ">=0.6,<0.8" @@ -958,13 +958,13 @@ testing = ["covdefaults (>=2.3)", "pytest (>=8.2.2)", "pytest-cov (>=5)", "pytes [[package]] name = "pytest" -version = "7.4.4" +version = "8.3.2" description = "pytest: simple powerful testing with Python" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "pytest-7.4.4-py3-none-any.whl", hash = "sha256:b090cdf5ed60bf4c45261be03239c2c1c22df034fbffe691abe93cd80cea01d8"}, - {file = "pytest-7.4.4.tar.gz", hash = "sha256:2cf0005922c6ace4a3e2ec8b4080eb0d9753fdc93107415332f50ce9e7994280"}, + {file = "pytest-8.3.2-py3-none-any.whl", hash = "sha256:4ba08f9ae7dcf84ded419494d229b48d0903ea6407b030eaec46df5e6a73bba5"}, + {file = "pytest-8.3.2.tar.gz", hash = "sha256:c132345d12ce551242c87269de812483f5bcc87cdbb4722e48487ba194f9fdce"}, ] [package.dependencies] @@ -972,11 +972,11 @@ colorama = {version = "*", markers = "sys_platform == \"win32\""} exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} iniconfig = "*" packaging = "*" -pluggy = ">=0.12,<2.0" -tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} +pluggy = ">=1.5,<2" +tomli = {version = ">=1", markers = "python_version < \"3.11\""} [package.extras] -testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] +dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] [[package]] name = "python-dateutil" @@ -1336,4 +1336,4 @@ test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", [metadata] lock-version = "2.0" python-versions = ">=3.8,<3.13" -content-hash = "1f6283862d427a24fd9085efffbb9f49250376dcd81815cfe1d87b4b62491e51" +content-hash = "1d6d019d01bce7e0c14577e383182e6f1c693b6e7b1acafcfc7d6f577da42c7e" diff --git a/pyproject.toml b/pyproject.toml index 32998dd..53190dd 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -39,7 +39,7 @@ ciso8601 = "^2.3.1" types-pytz = "^2024.1.0.20240417" types-python-dateutil = "^2.9.0.20240316" pylint = "3.2.5" -pytest = "7.*, <9.*" +pytest = "8.*" coverage = ">= 6.3, < 8.0" ipython = "^8.12.1" ipdb = "^0.13.13" From 155d5a96df0684b7a4a2407cc329490d5928a36f Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 8 Aug 2024 16:19:48 +1200 Subject: [PATCH 21/31] Bump pytz from 2018.9 to 2024.1 (#28) Bumps [pytz](https://github.com/stub42/pytz) from 2018.9 to 2024.1. - [Release notes](https://github.com/stub42/pytz/releases) - [Commits](https://github.com/stub42/pytz/compare/release_2018.9...release_2024.1) --- updated-dependencies: - dependency-name: pytz dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/poetry.lock b/poetry.lock index 2c35edb..47eaac8 100644 --- a/poetry.lock +++ b/poetry.lock @@ -994,13 +994,13 @@ six = ">=1.5" [[package]] name = "pytz" -version = "2018.9" +version = "2024.1" description = "World timezone definitions, modern and historical" optional = false python-versions = "*" files = [ - {file = "pytz-2018.9-py2.py3-none-any.whl", hash = "sha256:32b0891edff07e28efe91284ed9c31e123d84bea3fd98e1f72be2508f43ef8d9"}, - {file = "pytz-2018.9.tar.gz", hash = "sha256:d5f05e487007e29e03409f9398d074e158d920d36eb82eaf66fb1136b0c5374c"}, + {file = "pytz-2024.1-py2.py3-none-any.whl", hash = "sha256:328171f4e3623139da4983451950b28e95ac706e13f3f2630a879749e7a8b319"}, + {file = "pytz-2024.1.tar.gz", hash = "sha256:2a29735ea9c18baf14b448846bde5a48030ed267578472d8955cd0e7443a9812"}, ] [[package]] From bfff96d7f1809e090d7ad6a42d69cb1c663840ff Mon Sep 17 00:00:00 2001 From: Steve Clarke <84364906+s7clarke10@users.noreply.github.com> Date: Fri, 9 Aug 2024 14:55:32 +1200 Subject: [PATCH 22/31] BREAKING CHANGE: Changing Repo to allowing Publishing to PyPI. (#37) --- ...{pythonpublish.yml.disabled => pythonpublish.yml} | 2 +- CHANGELOG.md | 4 ++++ pyproject.toml | 12 ++++++------ 3 files changed, 11 insertions(+), 7 deletions(-) rename .github/workflows/{pythonpublish.yml.disabled => pythonpublish.yml} (94%) diff --git a/.github/workflows/pythonpublish.yml.disabled b/.github/workflows/pythonpublish.yml similarity index 94% rename from .github/workflows/pythonpublish.yml.disabled rename to .github/workflows/pythonpublish.yml index ffcea55..ea88532 100644 --- a/.github/workflows/pythonpublish.yml.disabled +++ b/.github/workflows/pythonpublish.yml @@ -8,7 +8,7 @@ jobs: deploy: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - name: Set up Python uses: actions/setup-python@v4 with: diff --git a/CHANGELOG.md b/CHANGELOG.md index f131dd6..85a69ba 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,9 @@ # Changelog +## 5.0.0 (2024-08-09) + * Renaming repo to realit-singer-python to allow required publishing to PyPI + * NOTE: Will need to urgently update tap-oracle, tap-mssql, tap-s3-csv which are dependent on this. + ## 4.0.1 (2024-08-08) * Correcting Supported Python version constraint in pyproject.toml >=3.8,<3.13 * Bump pytest from 7.4.4 to 8.3.2 diff --git a/pyproject.toml b/pyproject.toml index 53190dd..60f86fc 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,12 +1,12 @@ [tool.poetry] -name = "pipelinewise-singer-python" -version = "4.0.1" -description = "Singer.io utility library - PipelineWise compatible" +name = "realit-singer-python" +version = "5.0.0" +description = "Singer.io utility library - PipelineWise and Meltano compatible" authors = [] license = "Apache 2.0" readme = "README.md" -homepage = "https://github.com/s7clarke10/pipelinewise-singer-python" -repository = "https://github.com/s7clarke10/pipelinewise-singer-python" +homepage = "https://github.com/s7clarke10/realit-singer-python" +repository = "https://github.com/s7clarke10/realit-singer-python" keywords = ["singer", "meltano", "pipelinewise", "framework"] classifiers = [ "License :: OSI Approved :: Apache Software License", @@ -83,4 +83,4 @@ requires = ["poetry-core>=1.0.0"] build-backend = "poetry.core.masonry.api" [tool.poetry.scripts] -pipelinewise-singer-python = "pipelinewise_singer_python.__init__:main" \ No newline at end of file +realit-singer-python = "realit_singer_python.__init__:main" \ No newline at end of file From d032c94c61c56b4ef8e12855f256329a586fc2f0 Mon Sep 17 00:00:00 2001 From: Steve Clarke <84364906+s7clarke10@users.noreply.github.com> Date: Fri, 9 Aug 2024 15:13:14 +1200 Subject: [PATCH 23/31] Updating push to pypi --- .github/workflows/pythonpublish.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/pythonpublish.yml b/.github/workflows/pythonpublish.yml index ea88532..fb70e95 100644 --- a/.github/workflows/pythonpublish.yml +++ b/.github/workflows/pythonpublish.yml @@ -2,7 +2,7 @@ name: Upload Python Package to PyPi on: release: - types: [created] + types: [published] jobs: deploy: From 72071a78f823f50842adc4d6e423275fd5a62c5c Mon Sep 17 00:00:00 2001 From: Steve Clarke <84364906+s7clarke10@users.noreply.github.com> Date: Fri, 9 Aug 2024 15:38:50 +1200 Subject: [PATCH 24/31] Adjusting build command --- .github/workflows/pythonpublish.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/pythonpublish.yml b/.github/workflows/pythonpublish.yml index fb70e95..e90ca4b 100644 --- a/.github/workflows/pythonpublish.yml +++ b/.github/workflows/pythonpublish.yml @@ -22,5 +22,5 @@ jobs: TWINE_USERNAME: ${{ secrets.PYPI_USERNAME }} TWINE_PASSWORD: ${{ secrets.PYPI_PASSWORD }} run: | - python setup.py sdist bdist_wheel + python -m build twine upload dist/* From 00be70c50937543c999aa2966ac349f6736fb0a8 Mon Sep 17 00:00:00 2001 From: Steve Clarke <84364906+s7clarke10@users.noreply.github.com> Date: Fri, 9 Aug 2024 15:39:41 +1200 Subject: [PATCH 25/31] Adding build --- .github/workflows/pythonpublish.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/pythonpublish.yml b/.github/workflows/pythonpublish.yml index e90ca4b..993a752 100644 --- a/.github/workflows/pythonpublish.yml +++ b/.github/workflows/pythonpublish.yml @@ -16,7 +16,7 @@ jobs: - name: Install dependencies run: | python -m pip install --upgrade pip - pip install setuptools wheel twine + pip install build setuptools wheel twine - name: Build and publish env: TWINE_USERNAME: ${{ secrets.PYPI_USERNAME }} From cae9c3a8f8eba098e00c4431e77f250476bd371f Mon Sep 17 00:00:00 2001 From: Steve Clarke <84364906+s7clarke10@users.noreply.github.com> Date: Fri, 9 Aug 2024 16:23:50 +1200 Subject: [PATCH 26/31] Updating Publishing approach --- .github/workflows/pythonpublish.yml | 21 ++++++++++++--------- 1 file changed, 12 insertions(+), 9 deletions(-) diff --git a/.github/workflows/pythonpublish.yml b/.github/workflows/pythonpublish.yml index 993a752..4237358 100644 --- a/.github/workflows/pythonpublish.yml +++ b/.github/workflows/pythonpublish.yml @@ -4,23 +4,26 @@ on: release: types: [published] +permissions: + contents: read + jobs: deploy: runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 - name: Set up Python - uses: actions/setup-python@v4 + uses: actions/setup-python@v3 with: python-version: '3.x' - name: Install dependencies run: | python -m pip install --upgrade pip - pip install build setuptools wheel twine - - name: Build and publish - env: - TWINE_USERNAME: ${{ secrets.PYPI_USERNAME }} - TWINE_PASSWORD: ${{ secrets.PYPI_PASSWORD }} - run: | - python -m build - twine upload dist/* + pip install build + - name: Build package + run: python -m build + - name: Publish package + uses: pypa/gh-action-pypi-publish@27b31702a0e7fc50959f5ad993c78deac1bdfc29 + with: + user: __token__ + password: ${{ secrets.PYPI_API_TOKEN }} From d862bdb926869c7a09b2203d0b86dc5d3ded3225 Mon Sep 17 00:00:00 2001 From: Steve Clarke <84364906+s7clarke10@users.noreply.github.com> Date: Fri, 9 Aug 2024 16:49:10 +1200 Subject: [PATCH 27/31] Correcting Topic --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 60f86fc..778b8b5 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -16,7 +16,7 @@ classifiers = [ "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", - "Development Status :: 5 - Production/Stable", "Topic :: Singer" + "Development Status :: 5 - Production/Stable", "Topic :: Database" ] packages = [ { include = "singer" } From 9d718c2320a52974ab8ffcd0afaae69425f029bc Mon Sep 17 00:00:00 2001 From: Steve Clarke <84364906+s7clarke10@users.noreply.github.com> Date: Mon, 12 Aug 2024 12:58:03 +1200 Subject: [PATCH 28/31] Feature/update readme badges (#38) * Update README.md * Bumping Version * Bumping versions --------- Co-authored-by: Steve Clarke --- CHANGELOG.md | 3 +++ README.md | 12 ++++++------ poetry.lock | 4 ++-- pyproject.toml | 2 +- 4 files changed, 12 insertions(+), 9 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 85a69ba..3f750d2 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,8 @@ # Changelog +## 5.0.1 (2024-08-12) + * Updating the README.md to have the badges point to the correctly published version on PyPI. + ## 5.0.0 (2024-08-09) * Renaming repo to realit-singer-python to allow required publishing to PyPI * NOTE: Will need to urgently update tap-oracle, tap-mssql, tap-s3-csv which are dependent on this. diff --git a/README.md b/README.md index d4d1eb2..46efca1 100644 --- a/README.md +++ b/README.md @@ -1,12 +1,12 @@ -pipelinewise-singer-python +realit-singer-python =================== -[![PyPI version](https://badge.fury.io/py/pipelinewise-singer-python.svg)](https://badge.fury.io/py/pipelinewise-singer-python) -[![PyPI - Python Version](https://img.shields.io/pypi/pyversions/pipelinewise-singer-python.svg)](https://pypi.org/project/pipelinewise-singer-python/) +[![PyPI version](https://badge.fury.io/py/realit-singer-python.svg)](https://badge.fury.io/py/realit-singer-python) +[![PyPI - Python Version](https://img.shields.io/pypi/pyversions/realit-singer-python.svg)](https://pypi.org/project/realit-singer-python/) [![License: MIT](https://img.shields.io/badge/License-Apache2-yellow.svg)](https://opensource.org/licenses/Apache-2.0) Writes the Singer format from Python. -This is a fork of [Singer's singer-python](https://github.com/singer-io/singer-python) made for [PipelineWise](https://transferwise.github.io/pipelinewise). +This is a fork of [Singer's singer-python](https://github.com/singer-io/singer-python) made for [PipelineWise](https://transferwise.github.io/pipelinewise) and [Meltano](https://meltano.com/) Usage --- @@ -44,8 +44,8 @@ singer.write_state({'my_table': 'd'}) ### Logging configuration -**pipelinewise-singer-python** by default doesn't use any predefined logging configuration, it's up to the calling -library to define it. However, if the environment variable `LOGGING_CONF_FILE` is found and set then the **pipelinewise-singer-python** +**realit-singer-python** by default doesn't use any predefined logging configuration, it's up to the calling +library to define it. However, if the environment variable `LOGGING_CONF_FILE` is found and set then the **realit-singer-python** would use the path provided in the env variable as the logging configuration for the logger. ### Singer Decimal diff --git a/poetry.lock b/poetry.lock index 47eaac8..8256fce 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.5.1 and should not be changed by hand. [[package]] name = "appnope" @@ -923,8 +923,8 @@ astroid = ">=3.2.2,<=3.3.0-dev0" colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""} dill = [ {version = ">=0.2", markers = "python_version < \"3.11\""}, + {version = ">=0.3.6", markers = "python_version >= \"3.11\""}, {version = ">=0.3.7", markers = "python_version >= \"3.12\""}, - {version = ">=0.3.6", markers = "python_version >= \"3.11\" and python_version < \"3.12\""}, ] isort = ">=4.2.5,<5.13.0 || >5.13.0,<6" mccabe = ">=0.6,<0.8" diff --git a/pyproject.toml b/pyproject.toml index 778b8b5..c88075b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "realit-singer-python" -version = "5.0.0" +version = "5.0.1" description = "Singer.io utility library - PipelineWise and Meltano compatible" authors = [] license = "Apache 2.0" From 357f37d6956bfd90cc4528634f89151c3d1dd211 Mon Sep 17 00:00:00 2001 From: Steve Clarke <84364906+s7clarke10@users.noreply.github.com> Date: Tue, 13 Aug 2024 13:57:41 +1200 Subject: [PATCH 29/31] Bumping dependencies (#45) Co-authored-by: Steve Clarke --- .github/workflows/pythonpublish.yml | 4 +- CHANGELOG.md | 8 + poetry.lock | 380 ++++++++++++++-------------- pyproject.toml | 10 +- 4 files changed, 207 insertions(+), 195 deletions(-) diff --git a/.github/workflows/pythonpublish.yml b/.github/workflows/pythonpublish.yml index 4237358..79cd058 100644 --- a/.github/workflows/pythonpublish.yml +++ b/.github/workflows/pythonpublish.yml @@ -13,7 +13,7 @@ jobs: steps: - uses: actions/checkout@v4 - name: Set up Python - uses: actions/setup-python@v3 + uses: actions/setup-python@v5 with: python-version: '3.x' - name: Install dependencies @@ -23,7 +23,7 @@ jobs: - name: Build package run: python -m build - name: Publish package - uses: pypa/gh-action-pypi-publish@27b31702a0e7fc50959f5ad993c78deac1bdfc29 + uses: pypa/gh-action-pypi-publish@ec4db0b4ddc65acdf4bff5fa45ac92d78b56bdf0 with: user: __token__ password: ${{ secrets.PYPI_API_TOKEN }} diff --git a/CHANGELOG.md b/CHANGELOG.md index 3f750d2..5deea25 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,13 @@ # Changelog +## 5.0.2 (2024-08-13) + * Bump black from 24.4.2 to 24.8.0 + * Bump pylint from 3.2.5 to 3.2.6 + * Bump mypy from 1.10.1 to 1.11.1 + * Bump flake8 from 7.1.0 to 7.1.1 + * Bump actions/setup-python from 3 to 5 + * Bump pypa/gh-action-pypi-publish from 1.4.2 to 1.9.0 + ## 5.0.1 (2024-08-12) * Updating the README.md to have the badges point to the correctly published version on PyPI. diff --git a/poetry.lock b/poetry.lock index 8256fce..c6b5459 100644 --- a/poetry.lock +++ b/poetry.lock @@ -13,13 +13,13 @@ files = [ [[package]] name = "astroid" -version = "3.2.2" +version = "3.2.4" description = "An abstract syntax tree for Python with inference support." optional = false python-versions = ">=3.8.0" files = [ - {file = "astroid-3.2.2-py3-none-any.whl", hash = "sha256:e8a0083b4bb28fcffb6207a3bfc9e5d0a68be951dd7e336d5dcf639c682388c0"}, - {file = "astroid-3.2.2.tar.gz", hash = "sha256:8ead48e31b92b2e217b6c9733a21afafe479d52d6e164dd25fb1a770c7c3cf94"}, + {file = "astroid-3.2.4-py3-none-any.whl", hash = "sha256:413658a61eeca6202a59231abb473f932038fbcbf1666587f66d482083413a25"}, + {file = "astroid-3.2.4.tar.gz", hash = "sha256:0e14202810b30da1b735827f78f5157be2bbd4a7a59b7707ca0bfc2fb4c0063a"}, ] [package.dependencies] @@ -45,22 +45,22 @@ test = ["astroid (>=1,<2)", "astroid (>=2,<4)", "pytest"] [[package]] name = "attrs" -version = "23.2.0" +version = "24.2.0" description = "Classes Without Boilerplate" optional = false python-versions = ">=3.7" files = [ - {file = "attrs-23.2.0-py3-none-any.whl", hash = "sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1"}, - {file = "attrs-23.2.0.tar.gz", hash = "sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30"}, + {file = "attrs-24.2.0-py3-none-any.whl", hash = "sha256:81921eb96de3191c8258c199618104dd27ac608d9366f5e35d011eae1867ede2"}, + {file = "attrs-24.2.0.tar.gz", hash = "sha256:5cfb1b9148b5b086569baec03f20d7b6bf3bcacc9a42bebf87ffaaca362f6346"}, ] [package.extras] -cov = ["attrs[tests]", "coverage[toml] (>=5.3)"] -dev = ["attrs[tests]", "pre-commit"] -docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"] -tests = ["attrs[tests-no-zope]", "zope-interface"] -tests-mypy = ["mypy (>=1.6)", "pytest-mypy-plugins"] -tests-no-zope = ["attrs[tests-mypy]", "cloudpickle", "hypothesis", "pympler", "pytest (>=4.3.0)", "pytest-xdist[psutil]"] +benchmark = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-codspeed", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +cov = ["cloudpickle", "coverage[toml] (>=5.3)", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +dev = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +docs = ["cogapp", "furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier (<24.7)"] +tests = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +tests-mypy = ["mypy (>=1.11.1)", "pytest-mypy-plugins"] [[package]] name = "backcall" @@ -86,33 +86,33 @@ files = [ [[package]] name = "black" -version = "24.4.2" +version = "24.8.0" description = "The uncompromising code formatter." optional = false python-versions = ">=3.8" files = [ - {file = "black-24.4.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:dd1b5a14e417189db4c7b64a6540f31730713d173f0b63e55fabd52d61d8fdce"}, - {file = "black-24.4.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8e537d281831ad0e71007dcdcbe50a71470b978c453fa41ce77186bbe0ed6021"}, - {file = "black-24.4.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eaea3008c281f1038edb473c1aa8ed8143a5535ff18f978a318f10302b254063"}, - {file = "black-24.4.2-cp310-cp310-win_amd64.whl", hash = "sha256:7768a0dbf16a39aa5e9a3ded568bb545c8c2727396d063bbaf847df05b08cd96"}, - {file = "black-24.4.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:257d724c2c9b1660f353b36c802ccece186a30accc7742c176d29c146df6e474"}, - {file = "black-24.4.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:bdde6f877a18f24844e381d45e9947a49e97933573ac9d4345399be37621e26c"}, - {file = "black-24.4.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e151054aa00bad1f4e1f04919542885f89f5f7d086b8a59e5000e6c616896ffb"}, - {file = "black-24.4.2-cp311-cp311-win_amd64.whl", hash = "sha256:7e122b1c4fb252fd85df3ca93578732b4749d9be076593076ef4d07a0233c3e1"}, - {file = "black-24.4.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:accf49e151c8ed2c0cdc528691838afd217c50412534e876a19270fea1e28e2d"}, - {file = "black-24.4.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:88c57dc656038f1ab9f92b3eb5335ee9b021412feaa46330d5eba4e51fe49b04"}, - {file = "black-24.4.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:be8bef99eb46d5021bf053114442914baeb3649a89dc5f3a555c88737e5e98fc"}, - {file = "black-24.4.2-cp312-cp312-win_amd64.whl", hash = "sha256:415e686e87dbbe6f4cd5ef0fbf764af7b89f9057b97c908742b6008cc554b9c0"}, - {file = "black-24.4.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:bf10f7310db693bb62692609b397e8d67257c55f949abde4c67f9cc574492cc7"}, - {file = "black-24.4.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:98e123f1d5cfd42f886624d84464f7756f60ff6eab89ae845210631714f6db94"}, - {file = "black-24.4.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:48a85f2cb5e6799a9ef05347b476cce6c182d6c71ee36925a6c194d074336ef8"}, - {file = "black-24.4.2-cp38-cp38-win_amd64.whl", hash = "sha256:b1530ae42e9d6d5b670a34db49a94115a64596bc77710b1d05e9801e62ca0a7c"}, - {file = "black-24.4.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:37aae07b029fa0174d39daf02748b379399b909652a806e5708199bd93899da1"}, - {file = "black-24.4.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:da33a1a5e49c4122ccdfd56cd021ff1ebc4a1ec4e2d01594fef9b6f267a9e741"}, - {file = "black-24.4.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ef703f83fc32e131e9bcc0a5094cfe85599e7109f896fe8bc96cc402f3eb4b6e"}, - {file = "black-24.4.2-cp39-cp39-win_amd64.whl", hash = "sha256:b9176b9832e84308818a99a561e90aa479e73c523b3f77afd07913380ae2eab7"}, - {file = "black-24.4.2-py3-none-any.whl", hash = "sha256:d36ed1124bb81b32f8614555b34cc4259c3fbc7eec17870e8ff8ded335b58d8c"}, - {file = "black-24.4.2.tar.gz", hash = "sha256:c872b53057f000085da66a19c55d68f6f8ddcac2642392ad3a355878406fbd4d"}, + {file = "black-24.8.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:09cdeb74d494ec023ded657f7092ba518e8cf78fa8386155e4a03fdcc44679e6"}, + {file = "black-24.8.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:81c6742da39f33b08e791da38410f32e27d632260e599df7245cccee2064afeb"}, + {file = "black-24.8.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:707a1ca89221bc8a1a64fb5e15ef39cd755633daa672a9db7498d1c19de66a42"}, + {file = "black-24.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:d6417535d99c37cee4091a2f24eb2b6d5ec42b144d50f1f2e436d9fe1916fe1a"}, + {file = "black-24.8.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:fb6e2c0b86bbd43dee042e48059c9ad7830abd5c94b0bc518c0eeec57c3eddc1"}, + {file = "black-24.8.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:837fd281f1908d0076844bc2b801ad2d369c78c45cf800cad7b61686051041af"}, + {file = "black-24.8.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:62e8730977f0b77998029da7971fa896ceefa2c4c4933fcd593fa599ecbf97a4"}, + {file = "black-24.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:72901b4913cbac8972ad911dc4098d5753704d1f3c56e44ae8dce99eecb0e3af"}, + {file = "black-24.8.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:7c046c1d1eeb7aea9335da62472481d3bbf3fd986e093cffd35f4385c94ae368"}, + {file = "black-24.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:649f6d84ccbae73ab767e206772cc2d7a393a001070a4c814a546afd0d423aed"}, + {file = "black-24.8.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2b59b250fdba5f9a9cd9d0ece6e6d993d91ce877d121d161e4698af3eb9c1018"}, + {file = "black-24.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:6e55d30d44bed36593c3163b9bc63bf58b3b30e4611e4d88a0c3c239930ed5b2"}, + {file = "black-24.8.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:505289f17ceda596658ae81b61ebbe2d9b25aa78067035184ed0a9d855d18afd"}, + {file = "black-24.8.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b19c9ad992c7883ad84c9b22aaa73562a16b819c1d8db7a1a1a49fb7ec13c7d2"}, + {file = "black-24.8.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1f13f7f386f86f8121d76599114bb8c17b69d962137fc70efe56137727c7047e"}, + {file = "black-24.8.0-cp38-cp38-win_amd64.whl", hash = "sha256:f490dbd59680d809ca31efdae20e634f3fae27fba3ce0ba3208333b713bc3920"}, + {file = "black-24.8.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:eab4dd44ce80dea27dc69db40dab62d4ca96112f87996bca68cd75639aeb2e4c"}, + {file = "black-24.8.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3c4285573d4897a7610054af5a890bde7c65cb466040c5f0c8b732812d7f0e5e"}, + {file = "black-24.8.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9e84e33b37be070ba135176c123ae52a51f82306def9f7d063ee302ecab2cf47"}, + {file = "black-24.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:73bbf84ed136e45d451a260c6b73ed674652f90a2b3211d6a35e78054563a9bb"}, + {file = "black-24.8.0-py3-none-any.whl", hash = "sha256:972085c618ee94f402da1af548a4f218c754ea7e5dc70acb168bfaca4c2542ed"}, + {file = "black-24.8.0.tar.gz", hash = "sha256:2500945420b6784c38b9ee885af039f5e7471ef284ab03fa35ecdde4688cd83f"}, ] [package.dependencies] @@ -363,13 +363,13 @@ files = [ [[package]] name = "exceptiongroup" -version = "1.2.1" +version = "1.2.2" description = "Backport of PEP 654 (exception groups)" optional = false python-versions = ">=3.7" files = [ - {file = "exceptiongroup-1.2.1-py3-none-any.whl", hash = "sha256:5258b9ed329c5bbdd31a309f53cbfb0b155341807f6ff7606a1e801a891b29ad"}, - {file = "exceptiongroup-1.2.1.tar.gz", hash = "sha256:a4785e48b045528f5bfe627b6ad554ff32def154f42372786903b7abcfe1aa16"}, + {file = "exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b"}, + {file = "exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc"}, ] [package.extras] @@ -407,13 +407,13 @@ typing = ["typing-extensions (>=4.8)"] [[package]] name = "flake8" -version = "7.1.0" +version = "7.1.1" description = "the modular source code checker: pep8 pyflakes and co" optional = false python-versions = ">=3.8.1" files = [ - {file = "flake8-7.1.0-py2.py3-none-any.whl", hash = "sha256:2e416edcc62471a64cea09353f4e7bdba32aeb079b6e360554c659a122b1bc6a"}, - {file = "flake8-7.1.0.tar.gz", hash = "sha256:48a07b626b55236e0fb4784ee69a465fbf59d79eec1f5b4785c3d3bc57d17aa5"}, + {file = "flake8-7.1.1-py2.py3-none-any.whl", hash = "sha256:597477df7860daa5aa0fdd84bf5208a043ab96b8e96ab708770ae0364dd03213"}, + {file = "flake8-7.1.1.tar.gz", hash = "sha256:049d058491e228e03e67b390f311bbf88fce2dbaa8fa673e7aea87b7198b8d38"}, ] [package.dependencies] @@ -655,44 +655,44 @@ yaml = ["pyyaml"] [[package]] name = "mypy" -version = "1.10.1" +version = "1.11.1" description = "Optional static typing for Python" optional = false python-versions = ">=3.8" files = [ - {file = "mypy-1.10.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e36f229acfe250dc660790840916eb49726c928e8ce10fbdf90715090fe4ae02"}, - {file = "mypy-1.10.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:51a46974340baaa4145363b9e051812a2446cf583dfaeba124af966fa44593f7"}, - {file = "mypy-1.10.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:901c89c2d67bba57aaaca91ccdb659aa3a312de67f23b9dfb059727cce2e2e0a"}, - {file = "mypy-1.10.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0cd62192a4a32b77ceb31272d9e74d23cd88c8060c34d1d3622db3267679a5d9"}, - {file = "mypy-1.10.1-cp310-cp310-win_amd64.whl", hash = "sha256:a2cbc68cb9e943ac0814c13e2452d2046c2f2b23ff0278e26599224cf164e78d"}, - {file = "mypy-1.10.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:bd6f629b67bb43dc0d9211ee98b96d8dabc97b1ad38b9b25f5e4c4d7569a0c6a"}, - {file = "mypy-1.10.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a1bbb3a6f5ff319d2b9d40b4080d46cd639abe3516d5a62c070cf0114a457d84"}, - {file = "mypy-1.10.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8edd4e9bbbc9d7b79502eb9592cab808585516ae1bcc1446eb9122656c6066f"}, - {file = "mypy-1.10.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:6166a88b15f1759f94a46fa474c7b1b05d134b1b61fca627dd7335454cc9aa6b"}, - {file = "mypy-1.10.1-cp311-cp311-win_amd64.whl", hash = "sha256:5bb9cd11c01c8606a9d0b83ffa91d0b236a0e91bc4126d9ba9ce62906ada868e"}, - {file = "mypy-1.10.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d8681909f7b44d0b7b86e653ca152d6dff0eb5eb41694e163c6092124f8246d7"}, - {file = "mypy-1.10.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:378c03f53f10bbdd55ca94e46ec3ba255279706a6aacaecac52ad248f98205d3"}, - {file = "mypy-1.10.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6bacf8f3a3d7d849f40ca6caea5c055122efe70e81480c8328ad29c55c69e93e"}, - {file = "mypy-1.10.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:701b5f71413f1e9855566a34d6e9d12624e9e0a8818a5704d74d6b0402e66c04"}, - {file = "mypy-1.10.1-cp312-cp312-win_amd64.whl", hash = "sha256:3c4c2992f6ea46ff7fce0072642cfb62af7a2484efe69017ed8b095f7b39ef31"}, - {file = "mypy-1.10.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:604282c886497645ffb87b8f35a57ec773a4a2721161e709a4422c1636ddde5c"}, - {file = "mypy-1.10.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37fd87cab83f09842653f08de066ee68f1182b9b5282e4634cdb4b407266bade"}, - {file = "mypy-1.10.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8addf6313777dbb92e9564c5d32ec122bf2c6c39d683ea64de6a1fd98b90fe37"}, - {file = "mypy-1.10.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5cc3ca0a244eb9a5249c7c583ad9a7e881aa5d7b73c35652296ddcdb33b2b9c7"}, - {file = "mypy-1.10.1-cp38-cp38-win_amd64.whl", hash = "sha256:1b3a2ffce52cc4dbaeee4df762f20a2905aa171ef157b82192f2e2f368eec05d"}, - {file = "mypy-1.10.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:fe85ed6836165d52ae8b88f99527d3d1b2362e0cb90b005409b8bed90e9059b3"}, - {file = "mypy-1.10.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c2ae450d60d7d020d67ab440c6e3fae375809988119817214440033f26ddf7bf"}, - {file = "mypy-1.10.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6be84c06e6abd72f960ba9a71561c14137a583093ffcf9bbfaf5e613d63fa531"}, - {file = "mypy-1.10.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2189ff1e39db399f08205e22a797383613ce1cb0cb3b13d8bcf0170e45b96cc3"}, - {file = "mypy-1.10.1-cp39-cp39-win_amd64.whl", hash = "sha256:97a131ee36ac37ce9581f4220311247ab6cba896b4395b9c87af0675a13a755f"}, - {file = "mypy-1.10.1-py3-none-any.whl", hash = "sha256:71d8ac0b906354ebda8ef1673e5fde785936ac1f29ff6987c7483cfbd5a4235a"}, - {file = "mypy-1.10.1.tar.gz", hash = "sha256:1f8f492d7db9e3593ef42d4f115f04e556130f2819ad33ab84551403e97dd4c0"}, + {file = "mypy-1.11.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a32fc80b63de4b5b3e65f4be82b4cfa362a46702672aa6a0f443b4689af7008c"}, + {file = "mypy-1.11.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c1952f5ea8a5a959b05ed5f16452fddadbaae48b5d39235ab4c3fc444d5fd411"}, + {file = "mypy-1.11.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e1e30dc3bfa4e157e53c1d17a0dad20f89dc433393e7702b813c10e200843b03"}, + {file = "mypy-1.11.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:2c63350af88f43a66d3dfeeeb8d77af34a4f07d760b9eb3a8697f0386c7590b4"}, + {file = "mypy-1.11.1-cp310-cp310-win_amd64.whl", hash = "sha256:a831671bad47186603872a3abc19634f3011d7f83b083762c942442d51c58d58"}, + {file = "mypy-1.11.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7b6343d338390bb946d449677726edf60102a1c96079b4f002dedff375953fc5"}, + {file = "mypy-1.11.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e4fe9f4e5e521b458d8feb52547f4bade7ef8c93238dfb5bbc790d9ff2d770ca"}, + {file = "mypy-1.11.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:886c9dbecc87b9516eff294541bf7f3655722bf22bb898ee06985cd7269898de"}, + {file = "mypy-1.11.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fca4a60e1dd9fd0193ae0067eaeeb962f2d79e0d9f0f66223a0682f26ffcc809"}, + {file = "mypy-1.11.1-cp311-cp311-win_amd64.whl", hash = "sha256:0bd53faf56de9643336aeea1c925012837432b5faf1701ccca7fde70166ccf72"}, + {file = "mypy-1.11.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f39918a50f74dc5969807dcfaecafa804fa7f90c9d60506835036cc1bc891dc8"}, + {file = "mypy-1.11.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0bc71d1fb27a428139dd78621953effe0d208aed9857cb08d002280b0422003a"}, + {file = "mypy-1.11.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b868d3bcff720dd7217c383474008ddabaf048fad8d78ed948bb4b624870a417"}, + {file = "mypy-1.11.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a707ec1527ffcdd1c784d0924bf5cb15cd7f22683b919668a04d2b9c34549d2e"}, + {file = "mypy-1.11.1-cp312-cp312-win_amd64.whl", hash = "sha256:64f4a90e3ea07f590c5bcf9029035cf0efeae5ba8be511a8caada1a4893f5525"}, + {file = "mypy-1.11.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:749fd3213916f1751fff995fccf20c6195cae941dc968f3aaadf9bb4e430e5a2"}, + {file = "mypy-1.11.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b639dce63a0b19085213ec5fdd8cffd1d81988f47a2dec7100e93564f3e8fb3b"}, + {file = "mypy-1.11.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4c956b49c5d865394d62941b109728c5c596a415e9c5b2be663dd26a1ff07bc0"}, + {file = "mypy-1.11.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:45df906e8b6804ef4b666af29a87ad9f5921aad091c79cc38e12198e220beabd"}, + {file = "mypy-1.11.1-cp38-cp38-win_amd64.whl", hash = "sha256:d44be7551689d9d47b7abc27c71257adfdb53f03880841a5db15ddb22dc63edb"}, + {file = "mypy-1.11.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2684d3f693073ab89d76da8e3921883019ea8a3ec20fa5d8ecca6a2db4c54bbe"}, + {file = "mypy-1.11.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:79c07eb282cb457473add5052b63925e5cc97dfab9812ee65a7c7ab5e3cb551c"}, + {file = "mypy-1.11.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:11965c2f571ded6239977b14deebd3f4c3abd9a92398712d6da3a772974fad69"}, + {file = "mypy-1.11.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a2b43895a0f8154df6519706d9bca8280cda52d3d9d1514b2d9c3e26792a0b74"}, + {file = "mypy-1.11.1-cp39-cp39-win_amd64.whl", hash = "sha256:1a81cf05975fd61aec5ae16501a091cfb9f605dc3e3c878c0da32f250b74760b"}, + {file = "mypy-1.11.1-py3-none-any.whl", hash = "sha256:0624bdb940255d2dd24e829d99a13cfeb72e4e9031f9492148f410ed30bcab54"}, + {file = "mypy-1.11.1.tar.gz", hash = "sha256:f404a0b069709f18bbdb702eb3dcfe51910602995de00bd39cea3050b5772d08"}, ] [package.dependencies] mypy-extensions = ">=1.0.0" tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} -typing-extensions = ">=4.1.0" +typing-extensions = ">=4.6.0" [package.extras] dmypy = ["psutil (>=4.0)"] @@ -842,13 +842,13 @@ files = [ [[package]] name = "pure-eval" -version = "0.2.2" +version = "0.2.3" description = "Safely evaluate AST nodes without side effects" optional = false python-versions = "*" files = [ - {file = "pure_eval-0.2.2-py3-none-any.whl", hash = "sha256:01eaab343580944bc56080ebe0a674b39ec44a945e6d09ba7db3cb8cec289350"}, - {file = "pure_eval-0.2.2.tar.gz", hash = "sha256:2b45320af6dfaa1750f543d714b6d1c520a1688dec6fd24d339063ce0aaa9ac3"}, + {file = "pure_eval-0.2.3-py3-none-any.whl", hash = "sha256:1db8e35b67b3d218d818ae653e27f06c3aa420901fa7b081ca98cbedc874e0d0"}, + {file = "pure_eval-0.2.3.tar.gz", hash = "sha256:5f4e983f40564c576c7c8635ae88db5956bb2229d7e9237d03b3c0b0190eaf42"}, ] [package.extras] @@ -856,13 +856,13 @@ tests = ["pytest"] [[package]] name = "pycodestyle" -version = "2.12.0" +version = "2.12.1" description = "Python style guide checker" optional = false python-versions = ">=3.8" files = [ - {file = "pycodestyle-2.12.0-py2.py3-none-any.whl", hash = "sha256:949a39f6b86c3e1515ba1787c2022131d165a8ad271b11370a8819aa070269e4"}, - {file = "pycodestyle-2.12.0.tar.gz", hash = "sha256:442f950141b4f43df752dd303511ffded3a04c2b6fb7f65980574f0c31e6e79c"}, + {file = "pycodestyle-2.12.1-py2.py3-none-any.whl", hash = "sha256:46f0fb92069a7c28ab7bb558f05bfc0110dac69a0cd23c61ea0040283a9d78b3"}, + {file = "pycodestyle-2.12.1.tar.gz", hash = "sha256:6838eae08bbce4f6accd5d5572075c63626a15ee3e6f842df996bf62f6d73521"}, ] [[package]] @@ -909,17 +909,17 @@ windows-terminal = ["colorama (>=0.4.6)"] [[package]] name = "pylint" -version = "3.2.5" +version = "3.2.6" description = "python code static checker" optional = false python-versions = ">=3.8.0" files = [ - {file = "pylint-3.2.5-py3-none-any.whl", hash = "sha256:32cd6c042b5004b8e857d727708720c54a676d1e22917cf1a2df9b4d4868abd6"}, - {file = "pylint-3.2.5.tar.gz", hash = "sha256:e9b7171e242dcc6ebd0aaa7540481d1a72860748a0a7816b8fe6cf6c80a6fe7e"}, + {file = "pylint-3.2.6-py3-none-any.whl", hash = "sha256:03c8e3baa1d9fb995b12c1dbe00aa6c4bcef210c2a2634374aedeb22fb4a8f8f"}, + {file = "pylint-3.2.6.tar.gz", hash = "sha256:a5d01678349454806cff6d886fb072294f56a58c4761278c97fb557d708e1eb3"}, ] [package.dependencies] -astroid = ">=3.2.2,<=3.3.0-dev0" +astroid = ">=3.2.4,<=3.3.0-dev0" colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""} dill = [ {version = ">=0.2", markers = "python_version < \"3.11\""}, @@ -1020,110 +1020,114 @@ rpds-py = ">=0.7.0" [[package]] name = "rpds-py" -version = "0.18.1" +version = "0.20.0" description = "Python bindings to Rust's persistent data structures (rpds)" optional = false python-versions = ">=3.8" files = [ - {file = "rpds_py-0.18.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:d31dea506d718693b6b2cffc0648a8929bdc51c70a311b2770f09611caa10d53"}, - {file = "rpds_py-0.18.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:732672fbc449bab754e0b15356c077cc31566df874964d4801ab14f71951ea80"}, - {file = "rpds_py-0.18.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a98a1f0552b5f227a3d6422dbd61bc6f30db170939bd87ed14f3c339aa6c7c9"}, - {file = "rpds_py-0.18.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7f1944ce16401aad1e3f7d312247b3d5de7981f634dc9dfe90da72b87d37887d"}, - {file = "rpds_py-0.18.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:38e14fb4e370885c4ecd734f093a2225ee52dc384b86fa55fe3f74638b2cfb09"}, - {file = "rpds_py-0.18.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:08d74b184f9ab6289b87b19fe6a6d1a97fbfea84b8a3e745e87a5de3029bf944"}, - {file = "rpds_py-0.18.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d70129cef4a8d979caa37e7fe957202e7eee8ea02c5e16455bc9808a59c6b2f0"}, - {file = "rpds_py-0.18.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ce0bb20e3a11bd04461324a6a798af34d503f8d6f1aa3d2aa8901ceaf039176d"}, - {file = "rpds_py-0.18.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:81c5196a790032e0fc2464c0b4ab95f8610f96f1f2fa3d4deacce6a79852da60"}, - {file = "rpds_py-0.18.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:f3027be483868c99b4985fda802a57a67fdf30c5d9a50338d9db646d590198da"}, - {file = "rpds_py-0.18.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:d44607f98caa2961bab4fa3c4309724b185b464cdc3ba6f3d7340bac3ec97cc1"}, - {file = "rpds_py-0.18.1-cp310-none-win32.whl", hash = "sha256:c273e795e7a0f1fddd46e1e3cb8be15634c29ae8ff31c196debb620e1edb9333"}, - {file = "rpds_py-0.18.1-cp310-none-win_amd64.whl", hash = "sha256:8352f48d511de5f973e4f2f9412736d7dea76c69faa6d36bcf885b50c758ab9a"}, - {file = "rpds_py-0.18.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:6b5ff7e1d63a8281654b5e2896d7f08799378e594f09cf3674e832ecaf396ce8"}, - {file = "rpds_py-0.18.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8927638a4d4137a289e41d0fd631551e89fa346d6dbcfc31ad627557d03ceb6d"}, - {file = "rpds_py-0.18.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:154bf5c93d79558b44e5b50cc354aa0459e518e83677791e6adb0b039b7aa6a7"}, - {file = "rpds_py-0.18.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:07f2139741e5deb2c5154a7b9629bc5aa48c766b643c1a6750d16f865a82c5fc"}, - {file = "rpds_py-0.18.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8c7672e9fba7425f79019db9945b16e308ed8bc89348c23d955c8c0540da0a07"}, - {file = "rpds_py-0.18.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:489bdfe1abd0406eba6b3bb4fdc87c7fa40f1031de073d0cfb744634cc8fa261"}, - {file = "rpds_py-0.18.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3c20f05e8e3d4fc76875fc9cb8cf24b90a63f5a1b4c5b9273f0e8225e169b100"}, - {file = "rpds_py-0.18.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:967342e045564cef76dfcf1edb700b1e20838d83b1aa02ab313e6a497cf923b8"}, - {file = "rpds_py-0.18.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:2cc7c1a47f3a63282ab0f422d90ddac4aa3034e39fc66a559ab93041e6505da7"}, - {file = "rpds_py-0.18.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:f7afbfee1157e0f9376c00bb232e80a60e59ed716e3211a80cb8506550671e6e"}, - {file = "rpds_py-0.18.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:9e6934d70dc50f9f8ea47081ceafdec09245fd9f6032669c3b45705dea096b88"}, - {file = "rpds_py-0.18.1-cp311-none-win32.whl", hash = "sha256:c69882964516dc143083d3795cb508e806b09fc3800fd0d4cddc1df6c36e76bb"}, - {file = "rpds_py-0.18.1-cp311-none-win_amd64.whl", hash = "sha256:70a838f7754483bcdc830444952fd89645569e7452e3226de4a613a4c1793fb2"}, - {file = "rpds_py-0.18.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:3dd3cd86e1db5aadd334e011eba4e29d37a104b403e8ca24dcd6703c68ca55b3"}, - {file = "rpds_py-0.18.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:05f3d615099bd9b13ecf2fc9cf2d839ad3f20239c678f461c753e93755d629ee"}, - {file = "rpds_py-0.18.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:35b2b771b13eee8729a5049c976197ff58a27a3829c018a04341bcf1ae409b2b"}, - {file = "rpds_py-0.18.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ee17cd26b97d537af8f33635ef38be873073d516fd425e80559f4585a7b90c43"}, - {file = "rpds_py-0.18.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b646bf655b135ccf4522ed43d6902af37d3f5dbcf0da66c769a2b3938b9d8184"}, - {file = "rpds_py-0.18.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:19ba472b9606c36716062c023afa2484d1e4220548751bda14f725a7de17b4f6"}, - {file = "rpds_py-0.18.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e30ac5e329098903262dc5bdd7e2086e0256aa762cc8b744f9e7bf2a427d3f8"}, - {file = "rpds_py-0.18.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d58ad6317d188c43750cb76e9deacf6051d0f884d87dc6518e0280438648a9ac"}, - {file = "rpds_py-0.18.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e1735502458621921cee039c47318cb90b51d532c2766593be6207eec53e5c4c"}, - {file = "rpds_py-0.18.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:f5bab211605d91db0e2995a17b5c6ee5edec1270e46223e513eaa20da20076ac"}, - {file = "rpds_py-0.18.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2fc24a329a717f9e2448f8cd1f960f9dac4e45b6224d60734edeb67499bab03a"}, - {file = "rpds_py-0.18.1-cp312-none-win32.whl", hash = "sha256:1805d5901779662d599d0e2e4159d8a82c0b05faa86ef9222bf974572286b2b6"}, - {file = "rpds_py-0.18.1-cp312-none-win_amd64.whl", hash = "sha256:720edcb916df872d80f80a1cc5ea9058300b97721efda8651efcd938a9c70a72"}, - {file = "rpds_py-0.18.1-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:c827576e2fa017a081346dce87d532a5310241648eb3700af9a571a6e9fc7e74"}, - {file = "rpds_py-0.18.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:aa3679e751408d75a0b4d8d26d6647b6d9326f5e35c00a7ccd82b78ef64f65f8"}, - {file = "rpds_py-0.18.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0abeee75434e2ee2d142d650d1e54ac1f8b01e6e6abdde8ffd6eeac6e9c38e20"}, - {file = "rpds_py-0.18.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ed402d6153c5d519a0faf1bb69898e97fb31613b49da27a84a13935ea9164dfc"}, - {file = "rpds_py-0.18.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:338dee44b0cef8b70fd2ef54b4e09bb1b97fc6c3a58fea5db6cc083fd9fc2724"}, - {file = "rpds_py-0.18.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7750569d9526199c5b97e5a9f8d96a13300950d910cf04a861d96f4273d5b104"}, - {file = "rpds_py-0.18.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:607345bd5912aacc0c5a63d45a1f73fef29e697884f7e861094e443187c02be5"}, - {file = "rpds_py-0.18.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:207c82978115baa1fd8d706d720b4a4d2b0913df1c78c85ba73fe6c5804505f0"}, - {file = "rpds_py-0.18.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:6d1e42d2735d437e7e80bab4d78eb2e459af48c0a46e686ea35f690b93db792d"}, - {file = "rpds_py-0.18.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:5463c47c08630007dc0fe99fb480ea4f34a89712410592380425a9b4e1611d8e"}, - {file = "rpds_py-0.18.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:06d218939e1bf2ca50e6b0ec700ffe755e5216a8230ab3e87c059ebb4ea06afc"}, - {file = "rpds_py-0.18.1-cp38-none-win32.whl", hash = "sha256:312fe69b4fe1ffbe76520a7676b1e5ac06ddf7826d764cc10265c3b53f96dbe9"}, - {file = "rpds_py-0.18.1-cp38-none-win_amd64.whl", hash = "sha256:9437ca26784120a279f3137ee080b0e717012c42921eb07861b412340f85bae2"}, - {file = "rpds_py-0.18.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:19e515b78c3fc1039dd7da0a33c28c3154458f947f4dc198d3c72db2b6b5dc93"}, - {file = "rpds_py-0.18.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a7b28c5b066bca9a4eb4e2f2663012debe680f097979d880657f00e1c30875a0"}, - {file = "rpds_py-0.18.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:673fdbbf668dd958eff750e500495ef3f611e2ecc209464f661bc82e9838991e"}, - {file = "rpds_py-0.18.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d960de62227635d2e61068f42a6cb6aae91a7fe00fca0e3aeed17667c8a34611"}, - {file = "rpds_py-0.18.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:352a88dc7892f1da66b6027af06a2e7e5d53fe05924cc2cfc56495b586a10b72"}, - {file = "rpds_py-0.18.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4e0ee01ad8260184db21468a6e1c37afa0529acc12c3a697ee498d3c2c4dcaf3"}, - {file = "rpds_py-0.18.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4c39ad2f512b4041343ea3c7894339e4ca7839ac38ca83d68a832fc8b3748ab"}, - {file = "rpds_py-0.18.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:aaa71ee43a703c321906813bb252f69524f02aa05bf4eec85f0c41d5d62d0f4c"}, - {file = "rpds_py-0.18.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:6cd8098517c64a85e790657e7b1e509b9fe07487fd358e19431cb120f7d96338"}, - {file = "rpds_py-0.18.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:4adec039b8e2928983f885c53b7cc4cda8965b62b6596501a0308d2703f8af1b"}, - {file = "rpds_py-0.18.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:32b7daaa3e9389db3695964ce8e566e3413b0c43e3394c05e4b243a4cd7bef26"}, - {file = "rpds_py-0.18.1-cp39-none-win32.whl", hash = "sha256:2625f03b105328729f9450c8badda34d5243231eef6535f80064d57035738360"}, - {file = "rpds_py-0.18.1-cp39-none-win_amd64.whl", hash = "sha256:bf18932d0003c8c4d51a39f244231986ab23ee057d235a12b2684ea26a353590"}, - {file = "rpds_py-0.18.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:cbfbea39ba64f5e53ae2915de36f130588bba71245b418060ec3330ebf85678e"}, - {file = "rpds_py-0.18.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:a3d456ff2a6a4d2adcdf3c1c960a36f4fd2fec6e3b4902a42a384d17cf4e7a65"}, - {file = "rpds_py-0.18.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7700936ef9d006b7ef605dc53aa364da2de5a3aa65516a1f3ce73bf82ecfc7ae"}, - {file = "rpds_py-0.18.1-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:51584acc5916212e1bf45edd17f3a6b05fe0cbb40482d25e619f824dccb679de"}, - {file = "rpds_py-0.18.1-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:942695a206a58d2575033ff1e42b12b2aece98d6003c6bc739fbf33d1773b12f"}, - {file = "rpds_py-0.18.1-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b906b5f58892813e5ba5c6056d6a5ad08f358ba49f046d910ad992196ea61397"}, - {file = "rpds_py-0.18.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6f8e3fecca256fefc91bb6765a693d96692459d7d4c644660a9fff32e517843"}, - {file = "rpds_py-0.18.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7732770412bab81c5a9f6d20aeb60ae943a9b36dcd990d876a773526468e7163"}, - {file = "rpds_py-0.18.1-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:bd1105b50ede37461c1d51b9698c4f4be6e13e69a908ab7751e3807985fc0346"}, - {file = "rpds_py-0.18.1-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:618916f5535784960f3ecf8111581f4ad31d347c3de66d02e728de460a46303c"}, - {file = "rpds_py-0.18.1-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:17c6d2155e2423f7e79e3bb18151c686d40db42d8645e7977442170c360194d4"}, - {file = "rpds_py-0.18.1-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:6c4c4c3f878df21faf5fac86eda32671c27889e13570645a9eea0a1abdd50922"}, - {file = "rpds_py-0.18.1-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:fab6ce90574645a0d6c58890e9bcaac8d94dff54fb51c69e5522a7358b80ab64"}, - {file = "rpds_py-0.18.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:531796fb842b53f2695e94dc338929e9f9dbf473b64710c28af5a160b2a8927d"}, - {file = "rpds_py-0.18.1-pp38-pypy38_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:740884bc62a5e2bbb31e584f5d23b32320fd75d79f916f15a788d527a5e83644"}, - {file = "rpds_py-0.18.1-pp38-pypy38_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:998125738de0158f088aef3cb264a34251908dd2e5d9966774fdab7402edfab7"}, - {file = "rpds_py-0.18.1-pp38-pypy38_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e2be6e9dd4111d5b31ba3b74d17da54a8319d8168890fbaea4b9e5c3de630ae5"}, - {file = "rpds_py-0.18.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d0cee71bc618cd93716f3c1bf56653740d2d13ddbd47673efa8bf41435a60daa"}, - {file = "rpds_py-0.18.1-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2c3caec4ec5cd1d18e5dd6ae5194d24ed12785212a90b37f5f7f06b8bedd7139"}, - {file = "rpds_py-0.18.1-pp38-pypy38_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:27bba383e8c5231cd559affe169ca0b96ec78d39909ffd817f28b166d7ddd4d8"}, - {file = "rpds_py-0.18.1-pp38-pypy38_pp73-musllinux_1_2_i686.whl", hash = "sha256:a888e8bdb45916234b99da2d859566f1e8a1d2275a801bb8e4a9644e3c7e7909"}, - {file = "rpds_py-0.18.1-pp38-pypy38_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:6031b25fb1b06327b43d841f33842b383beba399884f8228a6bb3df3088485ff"}, - {file = "rpds_py-0.18.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:48c2faaa8adfacefcbfdb5f2e2e7bdad081e5ace8d182e5f4ade971f128e6bb3"}, - {file = "rpds_py-0.18.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:d85164315bd68c0806768dc6bb0429c6f95c354f87485ee3593c4f6b14def2bd"}, - {file = "rpds_py-0.18.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6afd80f6c79893cfc0574956f78a0add8c76e3696f2d6a15bca2c66c415cf2d4"}, - {file = "rpds_py-0.18.1-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fa242ac1ff583e4ec7771141606aafc92b361cd90a05c30d93e343a0c2d82a89"}, - {file = "rpds_py-0.18.1-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d21be4770ff4e08698e1e8e0bce06edb6ea0626e7c8f560bc08222880aca6a6f"}, - {file = "rpds_py-0.18.1-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5c45a639e93a0c5d4b788b2613bd637468edd62f8f95ebc6fcc303d58ab3f0a8"}, - {file = "rpds_py-0.18.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:910e71711d1055b2768181efa0a17537b2622afeb0424116619817007f8a2b10"}, - {file = "rpds_py-0.18.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b9bb1f182a97880f6078283b3505a707057c42bf55d8fca604f70dedfdc0772a"}, - {file = "rpds_py-0.18.1-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:1d54f74f40b1f7aaa595a02ff42ef38ca654b1469bef7d52867da474243cc633"}, - {file = "rpds_py-0.18.1-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:8d2e182c9ee01135e11e9676e9a62dfad791a7a467738f06726872374a83db49"}, - {file = "rpds_py-0.18.1-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:636a15acc588f70fda1661234761f9ed9ad79ebed3f2125d44be0862708b666e"}, - {file = "rpds_py-0.18.1.tar.gz", hash = "sha256:dc48b479d540770c811fbd1eb9ba2bb66951863e448efec2e2c102625328e92f"}, + {file = "rpds_py-0.20.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:3ad0fda1635f8439cde85c700f964b23ed5fc2d28016b32b9ee5fe30da5c84e2"}, + {file = "rpds_py-0.20.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9bb4a0d90fdb03437c109a17eade42dfbf6190408f29b2744114d11586611d6f"}, + {file = "rpds_py-0.20.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c6377e647bbfd0a0b159fe557f2c6c602c159fc752fa316572f012fc0bf67150"}, + {file = "rpds_py-0.20.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eb851b7df9dda52dc1415ebee12362047ce771fc36914586b2e9fcbd7d293b3e"}, + {file = "rpds_py-0.20.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1e0f80b739e5a8f54837be5d5c924483996b603d5502bfff79bf33da06164ee2"}, + {file = "rpds_py-0.20.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5a8c94dad2e45324fc74dce25e1645d4d14df9a4e54a30fa0ae8bad9a63928e3"}, + {file = "rpds_py-0.20.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8e604fe73ba048c06085beaf51147eaec7df856824bfe7b98657cf436623daf"}, + {file = "rpds_py-0.20.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:df3de6b7726b52966edf29663e57306b23ef775faf0ac01a3e9f4012a24a4140"}, + {file = "rpds_py-0.20.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:cf258ede5bc22a45c8e726b29835b9303c285ab46fc7c3a4cc770736b5304c9f"}, + {file = "rpds_py-0.20.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:55fea87029cded5df854ca7e192ec7bdb7ecd1d9a3f63d5c4eb09148acf4a7ce"}, + {file = "rpds_py-0.20.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:ae94bd0b2f02c28e199e9bc51485d0c5601f58780636185660f86bf80c89af94"}, + {file = "rpds_py-0.20.0-cp310-none-win32.whl", hash = "sha256:28527c685f237c05445efec62426d285e47a58fb05ba0090a4340b73ecda6dee"}, + {file = "rpds_py-0.20.0-cp310-none-win_amd64.whl", hash = "sha256:238a2d5b1cad28cdc6ed15faf93a998336eb041c4e440dd7f902528b8891b399"}, + {file = "rpds_py-0.20.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:ac2f4f7a98934c2ed6505aead07b979e6f999389f16b714448fb39bbaa86a489"}, + {file = "rpds_py-0.20.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:220002c1b846db9afd83371d08d239fdc865e8f8c5795bbaec20916a76db3318"}, + {file = "rpds_py-0.20.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8d7919548df3f25374a1f5d01fbcd38dacab338ef5f33e044744b5c36729c8db"}, + {file = "rpds_py-0.20.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:758406267907b3781beee0f0edfe4a179fbd97c0be2e9b1154d7f0a1279cf8e5"}, + {file = "rpds_py-0.20.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3d61339e9f84a3f0767b1995adfb171a0d00a1185192718a17af6e124728e0f5"}, + {file = "rpds_py-0.20.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1259c7b3705ac0a0bd38197565a5d603218591d3f6cee6e614e380b6ba61c6f6"}, + {file = "rpds_py-0.20.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5c1dc0f53856b9cc9a0ccca0a7cc61d3d20a7088201c0937f3f4048c1718a209"}, + {file = "rpds_py-0.20.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7e60cb630f674a31f0368ed32b2a6b4331b8350d67de53c0359992444b116dd3"}, + {file = "rpds_py-0.20.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:dbe982f38565bb50cb7fb061ebf762c2f254ca3d8c20d4006878766e84266272"}, + {file = "rpds_py-0.20.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:514b3293b64187172bc77c8fb0cdae26981618021053b30d8371c3a902d4d5ad"}, + {file = "rpds_py-0.20.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:d0a26ffe9d4dd35e4dfdd1e71f46401cff0181c75ac174711ccff0459135fa58"}, + {file = "rpds_py-0.20.0-cp311-none-win32.whl", hash = "sha256:89c19a494bf3ad08c1da49445cc5d13d8fefc265f48ee7e7556839acdacf69d0"}, + {file = "rpds_py-0.20.0-cp311-none-win_amd64.whl", hash = "sha256:c638144ce971df84650d3ed0096e2ae7af8e62ecbbb7b201c8935c370df00a2c"}, + {file = "rpds_py-0.20.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:a84ab91cbe7aab97f7446652d0ed37d35b68a465aeef8fc41932a9d7eee2c1a6"}, + {file = "rpds_py-0.20.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:56e27147a5a4c2c21633ff8475d185734c0e4befd1c989b5b95a5d0db699b21b"}, + {file = "rpds_py-0.20.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2580b0c34583b85efec8c5c5ec9edf2dfe817330cc882ee972ae650e7b5ef739"}, + {file = "rpds_py-0.20.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b80d4a7900cf6b66bb9cee5c352b2d708e29e5a37fe9bf784fa97fc11504bf6c"}, + {file = "rpds_py-0.20.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:50eccbf054e62a7b2209b28dc7a22d6254860209d6753e6b78cfaeb0075d7bee"}, + {file = "rpds_py-0.20.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:49a8063ea4296b3a7e81a5dfb8f7b2d73f0b1c20c2af401fb0cdf22e14711a96"}, + {file = "rpds_py-0.20.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ea438162a9fcbee3ecf36c23e6c68237479f89f962f82dae83dc15feeceb37e4"}, + {file = "rpds_py-0.20.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:18d7585c463087bddcfa74c2ba267339f14f2515158ac4db30b1f9cbdb62c8ef"}, + {file = "rpds_py-0.20.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d4c7d1a051eeb39f5c9547e82ea27cbcc28338482242e3e0b7768033cb083821"}, + {file = "rpds_py-0.20.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:e4df1e3b3bec320790f699890d41c59d250f6beda159ea3c44c3f5bac1976940"}, + {file = "rpds_py-0.20.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2cf126d33a91ee6eedc7f3197b53e87a2acdac63602c0f03a02dd69e4b138174"}, + {file = "rpds_py-0.20.0-cp312-none-win32.whl", hash = "sha256:8bc7690f7caee50b04a79bf017a8d020c1f48c2a1077ffe172abec59870f1139"}, + {file = "rpds_py-0.20.0-cp312-none-win_amd64.whl", hash = "sha256:0e13e6952ef264c40587d510ad676a988df19adea20444c2b295e536457bc585"}, + {file = "rpds_py-0.20.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:aa9a0521aeca7d4941499a73ad7d4f8ffa3d1affc50b9ea11d992cd7eff18a29"}, + {file = "rpds_py-0.20.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:4a1f1d51eccb7e6c32ae89243cb352389228ea62f89cd80823ea7dd1b98e0b91"}, + {file = "rpds_py-0.20.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8a86a9b96070674fc88b6f9f71a97d2c1d3e5165574615d1f9168ecba4cecb24"}, + {file = "rpds_py-0.20.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6c8ef2ebf76df43f5750b46851ed1cdf8f109d7787ca40035fe19fbdc1acc5a7"}, + {file = "rpds_py-0.20.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b74b25f024b421d5859d156750ea9a65651793d51b76a2e9238c05c9d5f203a9"}, + {file = "rpds_py-0.20.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:57eb94a8c16ab08fef6404301c38318e2c5a32216bf5de453e2714c964c125c8"}, + {file = "rpds_py-0.20.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e1940dae14e715e2e02dfd5b0f64a52e8374a517a1e531ad9412319dc3ac7879"}, + {file = "rpds_py-0.20.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d20277fd62e1b992a50c43f13fbe13277a31f8c9f70d59759c88f644d66c619f"}, + {file = "rpds_py-0.20.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:06db23d43f26478303e954c34c75182356ca9aa7797d22c5345b16871ab9c45c"}, + {file = "rpds_py-0.20.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:b2a5db5397d82fa847e4c624b0c98fe59d2d9b7cf0ce6de09e4d2e80f8f5b3f2"}, + {file = "rpds_py-0.20.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:5a35df9f5548fd79cb2f52d27182108c3e6641a4feb0f39067911bf2adaa3e57"}, + {file = "rpds_py-0.20.0-cp313-none-win32.whl", hash = "sha256:fd2d84f40633bc475ef2d5490b9c19543fbf18596dcb1b291e3a12ea5d722f7a"}, + {file = "rpds_py-0.20.0-cp313-none-win_amd64.whl", hash = "sha256:9bc2d153989e3216b0559251b0c260cfd168ec78b1fac33dd485750a228db5a2"}, + {file = "rpds_py-0.20.0-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:f2fbf7db2012d4876fb0d66b5b9ba6591197b0f165db8d99371d976546472a24"}, + {file = "rpds_py-0.20.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:1e5f3cd7397c8f86c8cc72d5a791071431c108edd79872cdd96e00abd8497d29"}, + {file = "rpds_py-0.20.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ce9845054c13696f7af7f2b353e6b4f676dab1b4b215d7fe5e05c6f8bb06f965"}, + {file = "rpds_py-0.20.0-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c3e130fd0ec56cb76eb49ef52faead8ff09d13f4527e9b0c400307ff72b408e1"}, + {file = "rpds_py-0.20.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4b16aa0107ecb512b568244ef461f27697164d9a68d8b35090e9b0c1c8b27752"}, + {file = "rpds_py-0.20.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aa7f429242aae2947246587d2964fad750b79e8c233a2367f71b554e9447949c"}, + {file = "rpds_py-0.20.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:af0fc424a5842a11e28956e69395fbbeab2c97c42253169d87e90aac2886d751"}, + {file = "rpds_py-0.20.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b8c00a3b1e70c1d3891f0db1b05292747f0dbcfb49c43f9244d04c70fbc40eb8"}, + {file = "rpds_py-0.20.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:40ce74fc86ee4645d0a225498d091d8bc61f39b709ebef8204cb8b5a464d3c0e"}, + {file = "rpds_py-0.20.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:4fe84294c7019456e56d93e8ababdad5a329cd25975be749c3f5f558abb48253"}, + {file = "rpds_py-0.20.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:338ca4539aad4ce70a656e5187a3a31c5204f261aef9f6ab50e50bcdffaf050a"}, + {file = "rpds_py-0.20.0-cp38-none-win32.whl", hash = "sha256:54b43a2b07db18314669092bb2de584524d1ef414588780261e31e85846c26a5"}, + {file = "rpds_py-0.20.0-cp38-none-win_amd64.whl", hash = "sha256:a1862d2d7ce1674cffa6d186d53ca95c6e17ed2b06b3f4c476173565c862d232"}, + {file = "rpds_py-0.20.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:3fde368e9140312b6e8b6c09fb9f8c8c2f00999d1823403ae90cc00480221b22"}, + {file = "rpds_py-0.20.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9824fb430c9cf9af743cf7aaf6707bf14323fb51ee74425c380f4c846ea70789"}, + {file = "rpds_py-0.20.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:11ef6ce74616342888b69878d45e9f779b95d4bd48b382a229fe624a409b72c5"}, + {file = "rpds_py-0.20.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c52d3f2f82b763a24ef52f5d24358553e8403ce05f893b5347098014f2d9eff2"}, + {file = "rpds_py-0.20.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9d35cef91e59ebbeaa45214861874bc6f19eb35de96db73e467a8358d701a96c"}, + {file = "rpds_py-0.20.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d72278a30111e5b5525c1dd96120d9e958464316f55adb030433ea905866f4de"}, + {file = "rpds_py-0.20.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b4c29cbbba378759ac5786730d1c3cb4ec6f8ababf5c42a9ce303dc4b3d08cda"}, + {file = "rpds_py-0.20.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6632f2d04f15d1bd6fe0eedd3b86d9061b836ddca4c03d5cf5c7e9e6b7c14580"}, + {file = "rpds_py-0.20.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:d0b67d87bb45ed1cd020e8fbf2307d449b68abc45402fe1a4ac9e46c3c8b192b"}, + {file = "rpds_py-0.20.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:ec31a99ca63bf3cd7f1a5ac9fe95c5e2d060d3c768a09bc1d16e235840861420"}, + {file = "rpds_py-0.20.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:22e6c9976e38f4d8c4a63bd8a8edac5307dffd3ee7e6026d97f3cc3a2dc02a0b"}, + {file = "rpds_py-0.20.0-cp39-none-win32.whl", hash = "sha256:569b3ea770c2717b730b61998b6c54996adee3cef69fc28d444f3e7920313cf7"}, + {file = "rpds_py-0.20.0-cp39-none-win_amd64.whl", hash = "sha256:e6900ecdd50ce0facf703f7a00df12374b74bbc8ad9fe0f6559947fb20f82364"}, + {file = "rpds_py-0.20.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:617c7357272c67696fd052811e352ac54ed1d9b49ab370261a80d3b6ce385045"}, + {file = "rpds_py-0.20.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:9426133526f69fcaba6e42146b4e12d6bc6c839b8b555097020e2b78ce908dcc"}, + {file = "rpds_py-0.20.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:deb62214c42a261cb3eb04d474f7155279c1a8a8c30ac89b7dcb1721d92c3c02"}, + {file = "rpds_py-0.20.0-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fcaeb7b57f1a1e071ebd748984359fef83ecb026325b9d4ca847c95bc7311c92"}, + {file = "rpds_py-0.20.0-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d454b8749b4bd70dd0a79f428731ee263fa6995f83ccb8bada706e8d1d3ff89d"}, + {file = "rpds_py-0.20.0-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d807dc2051abe041b6649681dce568f8e10668e3c1c6543ebae58f2d7e617855"}, + {file = "rpds_py-0.20.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c3c20f0ddeb6e29126d45f89206b8291352b8c5b44384e78a6499d68b52ae511"}, + {file = "rpds_py-0.20.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b7f19250ceef892adf27f0399b9e5afad019288e9be756d6919cb58892129f51"}, + {file = "rpds_py-0.20.0-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:4f1ed4749a08379555cebf4650453f14452eaa9c43d0a95c49db50c18b7da075"}, + {file = "rpds_py-0.20.0-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:dcedf0b42bcb4cfff4101d7771a10532415a6106062f005ab97d1d0ab5681c60"}, + {file = "rpds_py-0.20.0-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:39ed0d010457a78f54090fafb5d108501b5aa5604cc22408fc1c0c77eac14344"}, + {file = "rpds_py-0.20.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:bb273176be34a746bdac0b0d7e4e2c467323d13640b736c4c477881a3220a989"}, + {file = "rpds_py-0.20.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:f918a1a130a6dfe1d7fe0f105064141342e7dd1611f2e6a21cd2f5c8cb1cfb3e"}, + {file = "rpds_py-0.20.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:f60012a73aa396be721558caa3a6fd49b3dd0033d1675c6d59c4502e870fcf0c"}, + {file = "rpds_py-0.20.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3d2b1ad682a3dfda2a4e8ad8572f3100f95fad98cb99faf37ff0ddfe9cbf9d03"}, + {file = "rpds_py-0.20.0-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:614fdafe9f5f19c63ea02817fa4861c606a59a604a77c8cdef5aa01d28b97921"}, + {file = "rpds_py-0.20.0-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fa518bcd7600c584bf42e6617ee8132869e877db2f76bcdc281ec6a4113a53ab"}, + {file = "rpds_py-0.20.0-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f0475242f447cc6cb8a9dd486d68b2ef7fbee84427124c232bff5f63b1fe11e5"}, + {file = "rpds_py-0.20.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f90a4cd061914a60bd51c68bcb4357086991bd0bb93d8aa66a6da7701370708f"}, + {file = "rpds_py-0.20.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:def7400461c3a3f26e49078302e1c1b38f6752342c77e3cf72ce91ca69fb1bc1"}, + {file = "rpds_py-0.20.0-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:65794e4048ee837494aea3c21a28ad5fc080994dfba5b036cf84de37f7ad5074"}, + {file = "rpds_py-0.20.0-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:faefcc78f53a88f3076b7f8be0a8f8d35133a3ecf7f3770895c25f8813460f08"}, + {file = "rpds_py-0.20.0-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:5b4f105deeffa28bbcdff6c49b34e74903139afa690e35d2d9e3c2c2fba18cec"}, + {file = "rpds_py-0.20.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:fdfc3a892927458d98f3d55428ae46b921d1f7543b89382fdb483f5640daaec8"}, + {file = "rpds_py-0.20.0.tar.gz", hash = "sha256:d72a210824facfdaf8768cf2d7ca25a042c30320b3020de2fa04640920d4e121"}, ] [[package]] @@ -1180,13 +1184,13 @@ files = [ [[package]] name = "tomlkit" -version = "0.12.5" +version = "0.13.0" description = "Style preserving TOML library" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "tomlkit-0.12.5-py3-none-any.whl", hash = "sha256:af914f5a9c59ed9d0762c7b64d3b5d5df007448eb9cd2edc8a46b1eafead172f"}, - {file = "tomlkit-0.12.5.tar.gz", hash = "sha256:eef34fba39834d4d6b73c9ba7f3e4d1c417a4e56f89a7e96e090dd0d24b8fb3c"}, + {file = "tomlkit-0.13.0-py3-none-any.whl", hash = "sha256:7075d3042d03b80f603482d69bf0c8f345c2b30e41699fd8883227f89972b264"}, + {file = "tomlkit-0.13.0.tar.gz", hash = "sha256:08ad192699734149f5b97b45f1f18dad7eb1b6d16bc72ad0c2335772650d7b72"}, ] [[package]] @@ -1320,13 +1324,13 @@ files = [ [[package]] name = "zipp" -version = "3.19.2" +version = "3.20.0" description = "Backport of pathlib-compatible object wrapper for zip files" optional = false python-versions = ">=3.8" files = [ - {file = "zipp-3.19.2-py3-none-any.whl", hash = "sha256:f091755f667055f2d02b32c53771a7a6c8b47e1fdbc4b72a8b9072b3eef8015c"}, - {file = "zipp-3.19.2.tar.gz", hash = "sha256:bf1dcf6450f873a13e952a29504887c89e6de7506209e5b1bcc3460135d4de19"}, + {file = "zipp-3.20.0-py3-none-any.whl", hash = "sha256:58da6168be89f0be59beb194da1250516fdaa062ccebd30127ac65d30045e10d"}, + {file = "zipp-3.20.0.tar.gz", hash = "sha256:0145e43d89664cfe1a2e533adc75adafed82fe2da404b4bbb6b026c0157bdb31"}, ] [package.extras] @@ -1336,4 +1340,4 @@ test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", [metadata] lock-version = "2.0" python-versions = ">=3.8,<3.13" -content-hash = "1d6d019d01bce7e0c14577e383182e6f1c693b6e7b1acafcfc7d6f577da42c7e" +content-hash = "af9a63b346f818bc5594d8e453d57f1f9764970ddc52451ddb4e19a1bdba916d" diff --git a/pyproject.toml b/pyproject.toml index c88075b..b60adf0 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "realit-singer-python" -version = "5.0.1" +version = "5.0.2" description = "Singer.io utility library - PipelineWise and Meltano compatible" authors = [] license = "Apache 2.0" @@ -38,17 +38,17 @@ ciso8601 = "^2.3.1" [tool.poetry.dev-dependencies] types-pytz = "^2024.1.0.20240417" types-python-dateutil = "^2.9.0.20240316" -pylint = "3.2.5" +pylint = "3.2.6" pytest = "8.*" coverage = ">= 6.3, < 8.0" ipython = "^8.12.1" ipdb = "^0.13.13" unify = "^0.5" tox = "^4.17.1" -flake8 = { version = "^7.1.0", python = ">=3.8.1" } -black = "^24.4.2" +flake8 = { version = "^7.1.1", python = ">=3.8.1" } +black = "^24.8.0" pydocstyle = "^6.3.0" -mypy = "^1.10.1" +mypy = "^1.11.1" isort = "^5.13.2" [tool.black] From fabcd63bc8f3d2f980fc84259056a6920e89c3b2 Mon Sep 17 00:00:00 2001 From: Steve Clarke <84364906+s7clarke10@users.noreply.github.com> Date: Mon, 19 Aug 2024 10:16:23 +1200 Subject: [PATCH 30/31] Trusted publish to pypi (#46) * Move to trusted publishing to pypi * Bumping lock file version --- .github/workflows/pythonpublish.yml | 4 +--- CHANGELOG.md | 3 +++ poetry.lock | 4 ++-- pyproject.toml | 2 +- 4 files changed, 7 insertions(+), 6 deletions(-) diff --git a/.github/workflows/pythonpublish.yml b/.github/workflows/pythonpublish.yml index 79cd058..eb5cb39 100644 --- a/.github/workflows/pythonpublish.yml +++ b/.github/workflows/pythonpublish.yml @@ -6,6 +6,7 @@ on: permissions: contents: read + id-token: write # IMPORTANT: this permission is mandatory for trusted publishing jobs: deploy: @@ -24,6 +25,3 @@ jobs: run: python -m build - name: Publish package uses: pypa/gh-action-pypi-publish@ec4db0b4ddc65acdf4bff5fa45ac92d78b56bdf0 - with: - user: __token__ - password: ${{ secrets.PYPI_API_TOKEN }} diff --git a/CHANGELOG.md b/CHANGELOG.md index 5deea25..57bd26b 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,8 @@ # Changelog +## 5.0.3 (2024-08-19) + * Moving to a trusted push to pypi + ## 5.0.2 (2024-08-13) * Bump black from 24.4.2 to 24.8.0 * Bump pylint from 3.2.5 to 3.2.6 diff --git a/poetry.lock b/poetry.lock index c6b5459..42d3e03 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.5.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. [[package]] name = "appnope" @@ -923,8 +923,8 @@ astroid = ">=3.2.4,<=3.3.0-dev0" colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""} dill = [ {version = ">=0.2", markers = "python_version < \"3.11\""}, - {version = ">=0.3.6", markers = "python_version >= \"3.11\""}, {version = ">=0.3.7", markers = "python_version >= \"3.12\""}, + {version = ">=0.3.6", markers = "python_version >= \"3.11\" and python_version < \"3.12\""}, ] isort = ">=4.2.5,<5.13.0 || >5.13.0,<6" mccabe = ">=0.6,<0.8" diff --git a/pyproject.toml b/pyproject.toml index b60adf0..3fccd21 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "realit-singer-python" -version = "5.0.2" +version = "5.0.3" description = "Singer.io utility library - PipelineWise and Meltano compatible" authors = [] license = "Apache 2.0" From 932960a6df70aaa5a1590c72b499293ec7004c15 Mon Sep 17 00:00:00 2001 From: Steve Clarke <84364906+s7clarke10@users.noreply.github.com> Date: Mon, 19 Aug 2024 11:02:53 +1200 Subject: [PATCH 31/31] debugging publishing (#47) --- .github/workflows/pythonpublish.yml | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/.github/workflows/pythonpublish.yml b/.github/workflows/pythonpublish.yml index eb5cb39..da6f3f3 100644 --- a/.github/workflows/pythonpublish.yml +++ b/.github/workflows/pythonpublish.yml @@ -4,12 +4,14 @@ on: release: types: [published] -permissions: - contents: read - id-token: write # IMPORTANT: this permission is mandatory for trusted publishing - jobs: deploy: + environment: + name: Production + url: https://pypi.org/p/realit-singer-python + permissions: + contents: read + id-token: write # IMPORTANT: this permission is mandatory for trusted publishing runs-on: ubuntu-latest steps: - uses: actions/checkout@v4