Skip to content

Commit

Permalink
chore: Enable more Ruff rules (#1488)
Browse files Browse the repository at this point in the history
Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com>
  • Loading branch information
edgarrmondragon and pre-commit-ci[bot] authored Mar 6, 2023
1 parent 44aa0a0 commit 7ad6574
Show file tree
Hide file tree
Showing 15 changed files with 55 additions and 44 deletions.
11 changes: 10 additions & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -224,10 +224,15 @@ line-length = 88
select = [
"E",
"F",
"ANN", # flake8-annotations
"I", # isort
"D", # pydocstyle/flake8-docstrings
"UP", # pyupgrade
"YTT", # flake8-2020
"ANN", # flake8-annotations
"T10", # flake8-debugger
"ICN", # flake8-import-conventions
"PIE", # flake8-pie
"PT", # flake8-pytest-style
]
src = ["samples", "singer_sdk", "tests"]
target-version = "py37"
Expand All @@ -248,6 +253,10 @@ allow-star-arg-any = true
mypy-init-return = true
suppress-dummy-args = true

[tool.ruff.flake8-pytest-style]
fixture-parentheses = false
parametrize-names-type = "csv"

[tool.ruff.isort]
known-first-party = ["singer_sdk", "samples", "tests"]
required-imports = ["from __future__ import annotations"]
Expand Down
6 changes: 2 additions & 4 deletions singer_sdk/helpers/_secrets.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,10 +20,8 @@ def is_common_secret_key(key_name: str) -> bool:
if key_name in COMMON_SECRET_KEYS:
return True
return any(
[
key_name.lower().endswith(key_suffix)
for key_suffix in COMMON_SECRET_KEY_SUFFIXES
]
key_name.lower().endswith(key_suffix)
for key_suffix in COMMON_SECRET_KEY_SUFFIXES
)


Expand Down
4 changes: 2 additions & 2 deletions singer_sdk/helpers/_typing.py
Original file line number Diff line number Diff line change
Expand Up @@ -221,7 +221,7 @@ def is_string_array_type(type_dict: dict) -> bool:
)

if "anyOf" in type_dict:
return any([is_string_array_type(t) for t in type_dict["anyOf"]])
return any(is_string_array_type(t) for t in type_dict["anyOf"])

if "type" not in type_dict:
raise ValueError(f"Could not detect type from schema '{type_dict}'")
Expand All @@ -238,7 +238,7 @@ def is_array_type(type_dict: dict) -> bool:
)

if "anyOf" in type_dict:
return any([is_array_type(t) for t in type_dict["anyOf"]])
return any(is_array_type(t) for t in type_dict["anyOf"])

if "type" not in type_dict:
raise ValueError(f"Could not detect type from schema '{type_dict}'")
Expand Down
2 changes: 0 additions & 2 deletions singer_sdk/sinks/batch.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,6 @@ def start_batch(self, context: dict) -> None:
Args:
context: Stream partition or context dictionary.
"""
pass

def process_record(self, record: dict, context: dict) -> None:
"""Load the latest record from the stream.
Expand Down Expand Up @@ -89,4 +88,3 @@ def process_batch(self, context: dict) -> None:
Args:
context: Stream partition or context dictionary.
"""
pass
4 changes: 0 additions & 4 deletions singer_sdk/sinks/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -344,7 +344,6 @@ def _after_process_record(self, context: dict) -> None:
Args:
context: Stream partition or context dictionary.
"""
pass

# SDK developer overrides:

Expand Down Expand Up @@ -379,7 +378,6 @@ def process_record(self, record: dict, context: dict) -> None:
record: Individual record in the stream.
context: Stream partition or context dictionary.
"""
pass

def start_drain(self) -> dict:
"""Set and return `self._context_draining`.
Expand Down Expand Up @@ -438,7 +436,6 @@ def setup(self) -> None:
Setup is executed once per Sink instance, after instantiation. If a Schema
change is detected, a new Sink is instantiated and this method is called again.
"""
pass

def clean_up(self) -> None:
"""Perform any clean up actions required at end of a stream.
Expand All @@ -447,7 +444,6 @@ def clean_up(self) -> None:
that may be in use from other instances of the same sink. Stream name alone
should not be relied on, it's recommended to use a uuid as well.
"""
pass

def process_batch_files(
self,
Expand Down
3 changes: 0 additions & 3 deletions singer_sdk/sinks/record.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,6 @@ def process_batch(self, context: dict) -> None:
Args:
context: Stream partition or context dictionary.
"""
pass

@final
def start_batch(self, context: dict) -> None:
Expand All @@ -47,7 +46,6 @@ def start_batch(self, context: dict) -> None:
Args:
context: Stream partition or context dictionary.
"""
pass

@abc.abstractmethod
def process_record(self, record: dict, context: dict) -> None:
Expand All @@ -65,4 +63,3 @@ def process_record(self, record: dict, context: dict) -> None:
record: Individual record in the stream.
context: Stream partition or context dictionary.
"""
pass
1 change: 0 additions & 1 deletion singer_sdk/streams/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -1219,7 +1219,6 @@ def get_records(self, context: dict | None) -> Iterable[dict | tuple[dict, dict]
Args:
context: Stream partition or context dictionary.
"""
pass

def get_batch_config(self, config: Mapping) -> BatchConfig | None:
"""Return the batch config for this stream.
Expand Down
1 change: 0 additions & 1 deletion singer_sdk/streams/rest.py
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,6 @@ class RESTStream(Stream, Generic[_TToken], metaclass=abc.ABCMeta):
@abc.abstractmethod
def url_base(self) -> str:
"""Return the base url, e.g. ``https://api.mysite.com/v3/``."""
pass

def __init__(
self,
Expand Down
4 changes: 2 additions & 2 deletions singer_sdk/testing/factory.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,8 +46,8 @@ def config(self) -> SuiteConfig:
return suite_config or SuiteConfig()

@pytest.fixture
def resource(self) -> Any: # noqa: ANN401
yield
def resource(self) -> Any: # noqa: ANN401, PT004
yield # noqa: PT022

@pytest.fixture(scope="class")
def runner(self) -> TapTestRunner | TargetTestRunner:
Expand Down
20 changes: 13 additions & 7 deletions tests/core/configuration/test_dict_config.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,14 +52,20 @@ def test_get_env_var_config():
assert env_config["prop1"] == "hello"
assert env_config["prop3"] == ["val1", "val2"]
assert "PROP1" not in env_config
assert "prop2" not in env_config and "PROP2" not in env_config
assert "prop4" not in env_config and "PROP4" not in env_config
assert "prop2" not in env_config
assert "PROP2" not in env_config
assert "prop4" not in env_config
assert "PROP4" not in env_config

no_env_config = parse_environment_config(CONFIG_JSONSCHEMA, "PLUGIN_TEST_")
assert "prop1" not in no_env_config and "PROP1" not in env_config
assert "prop2" not in no_env_config and "PROP2" not in env_config
assert "prop3" not in no_env_config and "PROP3" not in env_config
assert "prop4" not in no_env_config and "PROP4" not in env_config
assert "prop1" not in no_env_config
assert "PROP1" not in env_config
assert "prop2" not in no_env_config
assert "PROP2" not in env_config
assert "prop3" not in no_env_config
assert "PROP3" not in env_config
assert "prop4" not in no_env_config
assert "PROP4" not in env_config


def test_get_dotenv_config(tmpdir, monkeypatch: pytest.MonkeyPatch):
Expand All @@ -84,7 +90,7 @@ def test_get_env_var_config_not_parsable():
"PLUGIN_TEST_PROP3": '["repeated"]',
},
):
with pytest.raises(ValueError):
with pytest.raises(ValueError, match="A bracketed list was detected"):
parse_environment_config(CONFIG_JSONSCHEMA, "PLUGIN_TEST_")


Expand Down
9 changes: 5 additions & 4 deletions tests/core/test_connector_sql.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ def stringify(in_dict):
class TestConnectorSQL:
"""Test the SQLConnector class."""

@pytest.fixture()
@pytest.fixture
def connector(self):
return SQLConnector(config={"sqlalchemy_url": "sqlite:///"})

Expand Down Expand Up @@ -167,9 +167,10 @@ def test_connect_calls_connect(self, connector):
mock_conn.assert_called_once()

def test_connect_raises_on_operational_failure(self, connector):
with pytest.raises(sqlalchemy.exc.OperationalError) as _:
with connector._connect() as conn:
conn.execute("SELECT * FROM fake_table")
with pytest.raises(
sqlalchemy.exc.OperationalError
) as _, connector._connect() as conn:
conn.execute("SELECT * FROM fake_table")

def test_rename_column_uses_connect_correctly(self, connector):
attached_engine = connector._engine
Expand Down
7 changes: 5 additions & 2 deletions tests/core/test_jsonschema_helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -140,7 +140,10 @@ def test_nested_complex_objects():
),
)
test2b = test2a.to_dict()
assert test1a and test1b and test2a and test2b
assert test1a
assert test1b
assert test2a
assert test2b


def test_default_value():
Expand Down Expand Up @@ -486,7 +489,7 @@ def test_array_type():
assert ArrayType(wrapped_type).type_dict == expected_json_schema


@pytest.mark.snapshot
@pytest.mark.snapshot()
@pytest.mark.parametrize(
"schema_obj,snapshot_name",
[
Expand Down
6 changes: 3 additions & 3 deletions tests/core/test_mapper.py
Original file line number Diff line number Diff line change
Expand Up @@ -444,14 +444,15 @@ def discover_streams(self):


@pytest.fixture
def clear_schema_cache() -> None:
def _clear_schema_cache() -> None:
"""Schemas are cached, so the cache needs to be cleared between test invocations."""
yield
get_selected_schema.cache_clear()


@freeze_time("2022-01-01T00:00:00Z")
@pytest.mark.snapshot
@pytest.mark.snapshot()
@pytest.mark.usefixtures("_clear_schema_cache")
@pytest.mark.parametrize(
"stream_maps,flatten,flatten_max_depth,snapshot_name",
[
Expand Down Expand Up @@ -601,7 +602,6 @@ def clear_schema_cache() -> None:
def test_mapped_stream(
snapshot: Snapshot,
snapshot_dir: Path,
clear_schema_cache: None,
stream_maps: dict,
flatten: bool,
flatten_max_depth: int | None,
Expand Down
15 changes: 10 additions & 5 deletions tests/core/test_plugin_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,10 +29,15 @@ def test_get_env_var_config():
env_config = PluginTest._env_var_config
assert env_config["prop1"] == "hello"
assert "PROP1" not in env_config
assert "prop2" not in env_config and "PROP2" not in env_config
assert "prop3" not in env_config and "PROP3" not in env_config
assert "prop2" not in env_config
assert "PROP2" not in env_config
assert "prop3" not in env_config
assert "PROP3" not in env_config

no_env_config = PluginTest._env_var_config
assert "prop1" not in no_env_config and "PROP1" not in env_config
assert "prop2" not in no_env_config and "PROP2" not in env_config
assert "prop3" not in no_env_config and "PROP3" not in env_config
assert "prop1" not in no_env_config
assert "PROP1" not in env_config
assert "prop2" not in no_env_config
assert "PROP2" not in env_config
assert "prop3" not in no_env_config
assert "PROP3" not in env_config
6 changes: 3 additions & 3 deletions tests/samples/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ def csv_config(outdir: str) -> dict:


@pytest.fixture
def sqlite_sample_db(sqlite_connector):
def _sqlite_sample_db(sqlite_connector):
"""Return a path to a newly constructed sample DB."""
for t in range(3):
sqlite_connector.connection.execute(f"DROP TABLE IF EXISTS t{t}")
Expand All @@ -32,8 +32,8 @@ def sqlite_sample_db(sqlite_connector):


@pytest.fixture
def sqlite_sample_tap(sqlite_sample_db, sqlite_sample_db_config) -> SQLiteTap:
_ = sqlite_sample_db
def sqlite_sample_tap(_sqlite_sample_db, sqlite_sample_db_config) -> SQLiteTap:
_ = _sqlite_sample_db
catalog_obj = Catalog.from_dict(
_get_tap_catalog(SQLiteTap, config=sqlite_sample_db_config, select_all=True)
)
Expand Down

0 comments on commit 7ad6574

Please sign in to comment.