Skip to content

Commit

Permalink
chore: Enable Ruff checks in helper private modules (#2084)
Browse files Browse the repository at this point in the history
  • Loading branch information
edgarrmondragon authored Dec 4, 2023
1 parent fee6126 commit efecfe1
Show file tree
Hide file tree
Showing 6 changed files with 30 additions and 32 deletions.
8 changes: 1 addition & 7 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -151,9 +151,6 @@ xdoctest = ">=1.1.1"
[tool.poetry.group.benchmark.dependencies]
pytest-codspeed = ">=2.2.0"

[tool.black]
exclude = ".*simpleeval.*"

[tool.pytest.ini_options]
addopts = '--ignore=singer_sdk/helpers/_simpleeval.py -m "not external"'
markers = [
Expand Down Expand Up @@ -247,8 +244,7 @@ target-version = "py37"
[tool.ruff.lint]
exclude = [
"cookiecutter/*",
"singer_sdk/helpers/_simpleeval.py",
"tests/core/test_simpleeval.py",
"*simpleeval*",
]
ignore = [
"ANN101", # Missing type annotation for `self` in method
Expand Down Expand Up @@ -318,8 +314,6 @@ unfixable = [
"tests/*" = ["ANN", "D1", "D2", "FBT001", "FBT003", "PLR2004", "S101"]
# Disabled some checks in samples code
"samples/*" = ["ANN", "D"]
# Don't require docstrings conventions or type annotations in private modules
"singer_sdk/helpers/_*.py" = ["ANN", "D105"]
# Templates support a generic resource of type Any.
"singer_sdk/testing/*.py" = ["S101"]
"singer_sdk/testing/templates.py" = ["ANN401"]
Expand Down
10 changes: 5 additions & 5 deletions singer_sdk/helpers/_batch.py
Original file line number Diff line number Diff line change
Expand Up @@ -92,7 +92,7 @@ class SDKBatchMessage(Message):
manifest: list[str] = field(default_factory=list)
"""The manifest of files in the batch."""

def __post_init__(self):
def __post_init__(self) -> None:
if isinstance(self.encoding, dict):
self.encoding = BaseBatchFileEncoding.from_dict(self.encoding)

Expand All @@ -112,7 +112,7 @@ class StorageTarget:
params: dict = field(default_factory=dict)
""""The storage parameters."""

def asdict(self):
def asdict(self) -> dict[str, t.Any]:
"""Return a dictionary representation of the message.
Returns:
Expand Down Expand Up @@ -144,7 +144,7 @@ def split_url(url: str) -> tuple[str, str]:
"""
if platform.system() == "Windows" and "\\" in url:
# Original code from pyFileSystem split
# Augemnted slitly to properly Windows paths
# Augmented slightly to properly handle Windows paths
split = url.rsplit("\\", 1)
return (split[0] or "\\", split[1])

Expand Down Expand Up @@ -224,7 +224,7 @@ class BatchConfig:
batch_size: int = DEFAULT_BATCH_SIZE
"""The max number of records in a batch."""

def __post_init__(self):
def __post_init__(self) -> None:
if isinstance(self.encoding, dict):
self.encoding = BaseBatchFileEncoding.from_dict(self.encoding)

Expand All @@ -234,7 +234,7 @@ def __post_init__(self):
if self.batch_size is None:
self.batch_size = DEFAULT_BATCH_SIZE

def asdict(self):
def asdict(self) -> dict[str, t.Any]:
"""Return a dictionary representation of the message.
Returns:
Expand Down
14 changes: 9 additions & 5 deletions singer_sdk/helpers/_flattening.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
"""Internal helper library for record flatteting functions."""
"""Internal helper library for record flattening functions."""

from __future__ import annotations

Expand Down Expand Up @@ -70,7 +70,7 @@ def flatten_key(key_name: str, parent_keys: list[str], separator: str = "__") ->
inflection.camelize(inflected_key[reducer_index]),
)
inflected_key[reducer_index] = (
reduced_key if len(reduced_key) > 1 else inflected_key[reducer_index][0:3]
reduced_key if len(reduced_key) > 1 else inflected_key[reducer_index][:3]
).lower()
reducer_index += 1

Expand Down Expand Up @@ -358,8 +358,8 @@ def _flatten_schema( # noqa: C901, PLR0912
items.append((new_key, next(iter(field_schema.values()))[0]))

# Sort and check for duplicates
def _key_func(item):
return item[0] # first item is tuple is the key name.
def _key_func(item: tuple[str, dict]) -> str:
return item[0] # first item in tuple is the key name.

sorted_items = sorted(items, key=_key_func)
for field_name, g in itertools.groupby(sorted_items, key=_key_func):
Expand Down Expand Up @@ -451,7 +451,11 @@ def _flatten_record(
return dict(items)


def _should_jsondump_value(key: str, value: t.Any, flattened_schema=None) -> bool:
def _should_jsondump_value(
key: str,
value: t.Any, # noqa: ANN401
flattened_schema: dict[str, t.Any] | None = None,
) -> bool:
"""Return True if json.dump() should be used to serialize the value.
Args:
Expand Down
2 changes: 1 addition & 1 deletion singer_sdk/helpers/_secrets.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ def is_common_secret_key(key_name: str) -> bool:
class SecretString(str):
"""For now, this class wraps a sensitive string to be identified as such later."""

def __init__(self, contents):
def __init__(self, contents: str) -> None:
"""Initialize secret string."""
self.contents = contents

Expand Down
14 changes: 7 additions & 7 deletions singer_sdk/helpers/_state.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ def get_state_if_exists(
tap_stream_id: str,
state_partition_context: dict | None = None,
key: str | None = None,
) -> t.Any | None:
) -> t.Any | None: # noqa: ANN401
"""Return the stream or partition state, creating a new one if it does not exist.
Args:
Expand Down Expand Up @@ -135,10 +135,10 @@ def get_writeable_state_dict(


def write_stream_state(
tap_state,
tap_state: dict,
tap_stream_id: str,
key,
val,
key: str,
val: t.Any, # noqa: ANN401
*,
state_partition_context: dict | None = None,
) -> None:
Expand All @@ -165,21 +165,21 @@ def reset_state_progress_markers(stream_or_partition_state: dict) -> dict | None

def write_replication_key_signpost(
stream_or_partition_state: dict,
new_signpost_value: t.Any,
new_signpost_value: t.Any, # noqa: ANN401
) -> None:
"""Write signpost value."""
stream_or_partition_state[SIGNPOST_MARKER] = to_json_compatible(new_signpost_value)


def write_starting_replication_value(
stream_or_partition_state: dict,
initial_value: t.Any,
initial_value: t.Any, # noqa: ANN401
) -> None:
"""Write initial replication value to state."""
stream_or_partition_state[STARTING_MARKER] = to_json_compatible(initial_value)


def get_starting_replication_value(stream_or_partition_state: dict):
def get_starting_replication_value(stream_or_partition_state: dict) -> t.Any | None: # noqa: ANN401
"""Retrieve initial replication marker value from state."""
if not stream_or_partition_state:
return None
Expand Down
14 changes: 7 additions & 7 deletions singer_sdk/helpers/_typing.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ def __init__(self, *args: object) -> None:
super().__init__(msg, *args)


def to_json_compatible(val: t.Any) -> t.Any:
def to_json_compatible(val: t.Any) -> t.Any: # noqa: ANN401
"""Return as string if datetime. JSON does not support proper datetime types.
If given a naive datetime object, pendulum automatically makes it utc
Expand Down Expand Up @@ -185,7 +185,7 @@ def get_datelike_property_type(property_schema: dict) -> str | None:
return None


def _is_string_with_format(type_dict):
def _is_string_with_format(type_dict: dict[str, t.Any]) -> bool | None:
if "string" in type_dict.get("type", []) and type_dict.get("format") in {
"date-time",
"time",
Expand All @@ -196,14 +196,14 @@ def _is_string_with_format(type_dict):


def handle_invalid_timestamp_in_record(
record, # noqa: ARG001
record: dict[str, t.Any], # noqa: ARG001
key_breadcrumb: list[str],
invalid_value: str,
datelike_typename: str,
ex: Exception,
treatment: DatetimeErrorTreatmentEnum | None,
logger: logging.Logger,
) -> t.Any:
) -> t.Any: # noqa: ANN401
"""Apply treatment or raise an error for invalid time values."""
treatment = treatment or DatetimeErrorTreatmentEnum.ERROR
msg = (
Expand Down Expand Up @@ -331,7 +331,7 @@ def _warn_unmapped_properties(
stream_name: str,
property_names: tuple[str],
logger: logging.Logger,
):
) -> None:
logger.warning(
"Properties %s were present in the '%s' stream but "
"not found in catalog schema. Ignoring.",
Expand Down Expand Up @@ -470,9 +470,9 @@ def _conform_record_data_types( # noqa: PLR0912


def _conform_primitive_property( # noqa: PLR0911
elem: t.Any,
elem: t.Any, # noqa: ANN401
property_schema: dict,
) -> t.Any:
) -> t.Any: # noqa: ANN401
"""Converts a primitive (i.e. not object or array) to a json compatible type."""
if isinstance(elem, (datetime.datetime, pendulum.DateTime)):
return to_json_compatible(elem)
Expand Down

0 comments on commit efecfe1

Please sign in to comment.