From bbac850b3acceecf17d1475d9105ee76ae99687e Mon Sep 17 00:00:00 2001 From: Gabriele Sarti Date: Fri, 20 Jan 2023 12:45:46 +0100 Subject: [PATCH] `ruff` stylechecking (#159) * Removed isort, flake8, bandit, pylint, pyupgrade, added ruff * Bump ruff version to 0.0.227 * Apply ruff fixes --- .pre-commit-config.yaml | 27 +- Makefile | 9 +- inseq/attr/__init__.py | 1 - inseq/attr/attribution_decorators.py | 4 +- inseq/attr/feat/__init__.py | 1 - inseq/attr/feat/attention_attribution.py | 4 +- inseq/attr/feat/attribution_utils.py | 8 +- inseq/attr/feat/feature_attribution.py | 8 +- inseq/attr/feat/gradient_attribution.py | 4 +- inseq/attr/feat/ops/__init__.py | 1 - inseq/attr/feat/ops/basic_attention.py | 8 +- .../ops/discretized_integrated_gradients.py | 3 +- inseq/attr/feat/ops/monotonic_path_builder.py | 11 +- inseq/commands/attribute.py | 7 +- inseq/commands/attribute_dataset.py | 4 +- inseq/commands/base.py | 4 +- inseq/commands/cli.py | 4 +- inseq/data/__init__.py | 1 - inseq/data/aggregator.py | 8 +- inseq/data/attribution.py | 4 +- inseq/data/batch.py | 3 +- inseq/data/data_utils.py | 10 +- inseq/data/viz.py | 14 +- inseq/models/__init__.py | 4 +- inseq/models/attribution_model.py | 7 +- inseq/models/decoder_only.py | 8 +- inseq/models/encoder_decoder.py | 12 +- inseq/models/huggingface_model.py | 5 +- inseq/models/model_decorators.py | 3 +- inseq/utils/__init__.py | 1 - inseq/utils/argparse.py | 12 +- inseq/utils/cache.py | 4 +- inseq/utils/errors.py | 2 +- inseq/utils/import_utils.py | 1 - inseq/utils/misc.py | 24 +- inseq/utils/registry.py | 6 +- inseq/utils/serialization.py | 22 +- inseq/utils/torch_utils.py | 4 +- inseq/utils/typing.py | 4 +- poetry.lock | 330 +++--------------- pyproject.toml | 125 ++++--- requirements-dev.txt | 18 +- setup.cfg | 47 --- tests/__init__.py | 1 - tests/attr/feat/ops/test_attention_utils.py | 7 +- .../feat/ops/test_monotonic_path_builder.py | 10 +- tests/data/test_aggregator.py | 1 - tests/models/test_huggingface_model.py | 1 - 48 files changed, 234 insertions(+), 573 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 1df710e5..0720a966 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -18,22 +18,6 @@ repos: - id: end-of-file-fixer exclude: LICENSE - - repo: local - hooks: - - id: pyupgrade - name: pyupgrade - entry: poetry run pyupgrade --py38-plus - types: [python] - language: system - - - repo: local - hooks: - - id: isort - name: isort - entry: poetry run isort --settings-path pyproject.toml - types: [python] - language: system - - repo: local hooks: - id: black @@ -42,13 +26,12 @@ repos: types: [python] language: system - - repo: local + - repo: https://github.com/charliermarsh/ruff-pre-commit + rev: 'v0.0.227' hooks: - - id: flake8 - name: flake8 - entry: poetry run flake8 --config setup.cfg - types: [python] - language: system + - id: ruff + # Respect `exclude` and `extend-exclude` settings. + args: ["--force-exclude"] - repo: local hooks: diff --git a/Makefile b/Makefile index adf5e1ee..086eb10a 100644 --- a/Makefile +++ b/Makefile @@ -78,26 +78,23 @@ update-deps: #* Linting .PHONY: check-style check-style: - poetry run isort --diff --check-only --settings-path pyproject.toml ./ poetry run black --diff --check --config pyproject.toml ./ + poetry run ruff --no-fix --config pyproject.toml ./ # poetry run darglint --verbosity 2 inseq tests - poetry run flake8 --config setup.cfg ./ # poetry run mypy --config-file pyproject.toml ./ .PHONY: fix-style fix-style: - poetry run pyupgrade --exit-zero-even-if-changed --py38-plus **/*.py - poetry run isort --settings-path pyproject.toml ./ poetry run black --config pyproject.toml ./ + poetry run ruff --config pyproject.toml ./ .PHONY: check-safety check-safety: poetry check poetry run safety check --full-report -i 51499 -i 51457 - poetry run bandit -ll --recursive inseq tests .PHONY: lint -lint: check-style check-safety +lint: fix-style check-safety #* Linting .PHONY: test diff --git a/inseq/attr/__init__.py b/inseq/attr/__init__.py index f2f897b0..15cc4595 100644 --- a/inseq/attr/__init__.py +++ b/inseq/attr/__init__.py @@ -7,7 +7,6 @@ register_step_score, ) - __all__ = [ "FeatureAttribution", "list_feature_attribution_methods", diff --git a/inseq/attr/attribution_decorators.py b/inseq/attr/attribution_decorators.py index 465f93f2..bbb58685 100644 --- a/inseq/attr/attribution_decorators.py +++ b/inseq/attr/attribution_decorators.py @@ -13,14 +13,12 @@ # limitations under the License. """ Decorators for attribution methods. """ -from typing import Any, Callable, List, Optional, Sequence - import logging from functools import wraps +from typing import Any, Callable, List, Optional, Sequence from ..data.data_utils import TensorWrapper - logger = logging.getLogger(__name__) diff --git a/inseq/attr/feat/__init__.py b/inseq/attr/feat/__init__.py index 991985a8..7b6eb497 100644 --- a/inseq/attr/feat/__init__.py +++ b/inseq/attr/feat/__init__.py @@ -13,7 +13,6 @@ SaliencyAttribution, ) - __all__ = [ "FeatureAttribution", "extract_args", diff --git a/inseq/attr/feat/attention_attribution.py b/inseq/attr/feat/attention_attribution.py index bd60dbd4..be4b1e0f 100644 --- a/inseq/attr/feat/attention_attribution.py +++ b/inseq/attr/feat/attention_attribution.py @@ -13,9 +13,8 @@ # limitations under the License. """ Attention-based feature attribution methods. """ -from typing import Any, Callable, Dict, Union - import logging +from typing import Any, Callable, Dict, Union from ...data import Batch, EncoderDecoderBatch, FeatureAttributionStepOutput from ...utils import Registry, pretty_tensor @@ -25,7 +24,6 @@ from .feature_attribution import FeatureAttribution from .ops import Attention - logger = logging.getLogger(__name__) diff --git a/inseq/attr/feat/attribution_utils.py b/inseq/attr/feat/attribution_utils.py index 2c1df4c1..00a3b76f 100644 --- a/inseq/attr/feat/attribution_utils.py +++ b/inseq/attr/feat/attribution_utils.py @@ -1,7 +1,6 @@ -from typing import TYPE_CHECKING, Any, Callable, Dict, List, Optional, Tuple, Union - import logging import math +from typing import TYPE_CHECKING, Any, Callable, Dict, List, Optional, Tuple, Union import torch @@ -20,7 +19,6 @@ TokenWithId, ) - if TYPE_CHECKING: from ...models import AttributionModel from .feature_attribution import FeatureAttribution @@ -163,8 +161,8 @@ def extract_args( if step_score not in STEP_SCORES_MAP: raise AttributeError( f"Step score {step_score} not found. Available step scores are: " - f"{', '.join([x for x in STEP_SCORES_MAP.keys()])}. Use the inseq.register_step_score" - f"function to register a custom step score." + f"{', '.join(list(STEP_SCORES_MAP.keys()))}. Use the inseq.register_step_score" + "function to register a custom step score." ) extra_step_scores_args.update( **extract_signature_args( diff --git a/inseq/attr/feat/feature_attribution.py b/inseq/attr/feat/feature_attribution.py index 8f308f94..e07d0d83 100644 --- a/inseq/attr/feat/feature_attribution.py +++ b/inseq/attr/feat/feature_attribution.py @@ -16,11 +16,10 @@ Todo: * 🟡: Allow custom arguments for model loading in the :class:`FeatureAttribution` :meth:`load` method. """ -from typing import TYPE_CHECKING, Any, Callable, Dict, List, Optional, Sequence, Tuple, Union - import logging from abc import abstractmethod from datetime import datetime +from typing import TYPE_CHECKING, Any, Callable, Dict, List, Optional, Sequence, Tuple, Union from torchtyping import TensorType @@ -46,7 +45,6 @@ from ..attribution_decorators import batched, set_hook, unset_hook from .attribution_utils import STEP_SCORES_MAP, check_attribute_positions, get_step_scores, tok2string - if TYPE_CHECKING: from ...models import AttributionModel @@ -465,8 +463,8 @@ def filtered_attribute_step( if step_score not in STEP_SCORES_MAP: raise AttributeError( f"Step score {step_score} not found. Available step scores are: " - f"{', '.join([x for x in STEP_SCORES_MAP.keys()])}. Use the inseq.register_step_score" - f"function to register a custom step score." + f"{', '.join(list(STEP_SCORES_MAP.keys()))}. Use the inseq.register_step_score" + "function to register a custom step score." ) step_output.step_scores[step_score] = get_step_scores( self.attribution_model, batch, target_ids, step_score, step_scores_args diff --git a/inseq/attr/feat/gradient_attribution.py b/inseq/attr/feat/gradient_attribution.py index 8066c537..42273710 100644 --- a/inseq/attr/feat/gradient_attribution.py +++ b/inseq/attr/feat/gradient_attribution.py @@ -13,9 +13,8 @@ # limitations under the License. """ Gradient-based feature attribution methods. """ -from typing import Any, Dict - import logging +from typing import Any, Dict from captum.attr import ( DeepLift, @@ -34,7 +33,6 @@ from .feature_attribution import FeatureAttribution from .ops import DiscretetizedIntegratedGradients - logger = logging.getLogger(__name__) diff --git a/inseq/attr/feat/ops/__init__.py b/inseq/attr/feat/ops/__init__.py index 93533e2d..349defaf 100644 --- a/inseq/attr/feat/ops/__init__.py +++ b/inseq/attr/feat/ops/__init__.py @@ -2,5 +2,4 @@ from .discretized_integrated_gradients import DiscretetizedIntegratedGradients from .monotonic_path_builder import MonotonicPathBuilder - __all__ = ["DiscretetizedIntegratedGradients", "MonotonicPathBuilder", "Attention"] diff --git a/inseq/attr/feat/ops/basic_attention.py b/inseq/attr/feat/ops/basic_attention.py index 687003ce..3b5e643c 100644 --- a/inseq/attr/feat/ops/basic_attention.py +++ b/inseq/attr/feat/ops/basic_attention.py @@ -12,9 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. -from typing import Any, Dict, List, Protocol, Tuple, Union - import logging +from typing import Any, Dict, List, Protocol, Tuple, Union import torch from captum._utils.typing import TensorOrTupleOfTensorsGeneric @@ -24,7 +23,6 @@ from ....data import Batch, EncoderDecoderBatch from ....utils.typing import AggregatedLayerAttentionTensor, FullAttentionOutput, FullLayerAttentionTensor - logger = logging.getLogger(__name__) @@ -115,7 +113,7 @@ def _aggregate_attention_heads( if isinstance(aggregate_fn, str): if aggregate_fn not in cls.AGGREGATE_FN_OPTIONS: raise RuntimeError( - "Invalid aggregation method specified." f"Valid methods are: {cls.AGGREGATE_FN_OPTIONS.keys()}" + f"Invalid aggregation method specified.Valid methods are: {cls.AGGREGATE_FN_OPTIONS.keys()}" ) aggregate_fn = cls.AGGREGATE_FN_OPTIONS[aggregate_fn] if heads is None: @@ -201,7 +199,7 @@ def _aggregate_layers( if isinstance(aggregate_fn, str): if aggregate_fn not in cls.AGGREGATE_FN_OPTIONS: raise RuntimeError( - "Invalid aggregation method specified." f"Valid methods are: {cls.AGGREGATE_FN_OPTIONS.keys()}" + f"Invalid aggregation method specified.Valid methods are: {cls.AGGREGATE_FN_OPTIONS.keys()}" ) aggregate_fn = cls.AGGREGATE_FN_OPTIONS[aggregate_fn] if layers is None: diff --git a/inseq/attr/feat/ops/discretized_integrated_gradients.py b/inseq/attr/feat/ops/discretized_integrated_gradients.py index 21feae1e..b0cd0655 100644 --- a/inseq/attr/feat/ops/discretized_integrated_gradients.py +++ b/inseq/attr/feat/ops/discretized_integrated_gradients.py @@ -16,9 +16,8 @@ # WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE # OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -from typing import Any, Callable, List, Tuple, Union - from pathlib import Path +from typing import Any, Callable, List, Tuple, Union import torch from captum._utils.common import ( diff --git a/inseq/attr/feat/ops/monotonic_path_builder.py b/inseq/attr/feat/ops/monotonic_path_builder.py index 0d5b4ff9..2119aa23 100644 --- a/inseq/attr/feat/ops/monotonic_path_builder.py +++ b/inseq/attr/feat/ops/monotonic_path_builder.py @@ -17,13 +17,12 @@ # WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE # OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -from typing import Any, List, Optional, Tuple, Union - import logging import os from enum import Enum from itertools import islice from pathlib import Path +from typing import Any, List, Optional, Tuple, Union import torch from scipy.sparse import csr_matrix @@ -31,7 +30,6 @@ from ....utils import is_joblib_available, is_scikitlearn_available - if is_joblib_available(): from joblib import Parallel, delayed @@ -41,7 +39,6 @@ from ....utils import INSEQ_ARTIFACTS_CACHE, cache_results, euclidean_distance from ....utils.typing import MultiStepEmbeddingsTensor, VocabularyEmbeddingsTensor - logger = logging.getLogger(__name__) @@ -59,8 +56,10 @@ def __init__( *args: Tuple[Any], ) -> None: super().__init__( - f"Unknown strategy: {strategy}.\nAvailable strategies: " - f"{','.join([s.value for s in PathBuildingStrategies])}", + ( + f"Unknown strategy: {strategy}.\nAvailable strategies: " + f"{','.join([s.value for s in PathBuildingStrategies])}" + ), *args, ) diff --git a/inseq/commands/attribute.py b/inseq/commands/attribute.py index f6015bb4..3286774b 100644 --- a/inseq/commands/attribute.py +++ b/inseq/commands/attribute.py @@ -1,7 +1,6 @@ -from typing import List, Optional - import logging from dataclasses import dataclass, field +from typing import List, Optional from .. import list_feature_attribution_methods, load_model from ..utils import get_default_device @@ -123,9 +122,9 @@ def __post_init__(self): if self.input_texts is None: raise RuntimeError("Input texts must be specified.") if isinstance(self.input_texts, str): - self.input_texts = [t for t in self.input_texts] + self.input_texts = list(self.input_texts) if isinstance(self.generated_texts, str): - self.generated_texts = [t for t in self.generated_texts] + self.generated_texts = list(self.generated_texts) def attribute(input_texts, generated_texts, args: AttributeBaseArgs): diff --git a/inseq/commands/attribute_dataset.py b/inseq/commands/attribute_dataset.py index 4956a920..c9635ec1 100644 --- a/inseq/commands/attribute_dataset.py +++ b/inseq/commands/attribute_dataset.py @@ -1,12 +1,10 @@ -from typing import List, Optional, Tuple - from dataclasses import dataclass, field +from typing import List, Optional, Tuple from ..utils import is_datasets_available from .attribute import AttributeBaseArgs, attribute from .base import BaseCLICommand - if is_datasets_available(): from datasets import load_dataset diff --git a/inseq/commands/base.py b/inseq/commands/base.py index d0db4a18..6afb1157 100644 --- a/inseq/commands/base.py +++ b/inseq/commands/base.py @@ -1,12 +1,10 @@ -from typing import Any, Iterable, NewType, Union - import dataclasses from abc import ABC, abstractstaticmethod from argparse import Namespace +from typing import Any, Iterable, NewType, Union from ..utils import InseqArgumentParser - DataClassType = NewType("DataClassType", Any) OneOrMoreDataClasses = Union[DataClassType, Iterable[DataClassType]] diff --git a/inseq/commands/cli.py b/inseq/commands/cli.py index d5641894..7ad21222 100644 --- a/inseq/commands/cli.py +++ b/inseq/commands/cli.py @@ -1,10 +1,10 @@ """Adapted from https://github.com/huggingface/transformers/blob/main/src/transformers/commands/transformers_cli.py""" +import sys from ..utils import InseqArgumentParser from .attribute import AttributeCommand from .attribute_dataset import AttributeDatasetCommand - COMMANDS = [AttributeCommand, AttributeDatasetCommand] @@ -19,7 +19,7 @@ def main(): if not hasattr(args, "factory_method"): parser.print_help() - exit(1) + sys.exit(1) # Run command, command_args = args.factory_method(args) diff --git a/inseq/data/__init__.py b/inseq/data/__init__.py index b854ab99..3b191115 100644 --- a/inseq/data/__init__.py +++ b/inseq/data/__init__.py @@ -16,7 +16,6 @@ from .batch import Batch, BatchEmbedding, BatchEncoding, DecoderOnlyBatch, EncoderDecoderBatch from .viz import show_attributions - __all__ = [ "Aggregator", "AggregatorPipeline", diff --git a/inseq/data/aggregator.py b/inseq/data/aggregator.py index f3b9ca8e..34f9f42d 100644 --- a/inseq/data/aggregator.py +++ b/inseq/data/aggregator.py @@ -1,7 +1,6 @@ +from abc import ABC, abstractmethod from typing import Any, Callable, Dict, List, Optional, Tuple, Type, Union -from abc import ABC - from ..utils import abs_max, aggregate_contiguous, aggregate_token_pair, aggregate_token_sequence, identity_fn from ..utils.typing import IndexSpan, TokenWithId from .data_utils import TensorWrapper @@ -22,6 +21,7 @@ def __init__(self, default: Optional[Any] = None, *args, **kwargs): class Aggregator(ABC): @classmethod + @abstractmethod def start_aggregation_hook(cls, tensors: TensorWrapper, **kwargs): """Hook called at the start of the aggregation process. @@ -33,6 +33,7 @@ def start_aggregation_hook(cls, tensors: TensorWrapper, **kwargs): pass @classmethod + @abstractmethod def pre_aggregate_hook(cls, tensors: TensorWrapper, **kwargs): """Hook called right before the aggregation function is called. @@ -74,6 +75,7 @@ def aggregate( return aggregated @classmethod + @abstractmethod def post_aggregate_hook(cls, tensors: TensorWrapper, **kwargs): """Hook called right after the aggregation function is called. @@ -82,6 +84,7 @@ def post_aggregate_hook(cls, tensors: TensorWrapper, **kwargs): pass @classmethod + @abstractmethod def end_aggregation_hook(cls, tensors: TensorWrapper, **kwargs): """Hook called at the end of the aggregation process. @@ -126,6 +129,7 @@ def aggregate( aggregator = self._aggregator return aggregator.aggregate(self, **kwargs) + @abstractmethod def __post_init__(self): pass diff --git a/inseq/data/attribution.py b/inseq/data/attribution.py index 76cab8cb..412f25cb 100644 --- a/inseq/data/attribution.py +++ b/inseq/data/attribution.py @@ -1,9 +1,8 @@ -from typing import Any, Dict, List, Optional, Type, Union - import logging from copy import deepcopy from dataclasses import dataclass, field from pathlib import Path +from typing import Any, Dict, List, Optional, Type, Union import torch @@ -36,7 +35,6 @@ from .batch import Batch, BatchEncoding from .data_utils import TensorWrapper - FeatureAttributionInput = Union[TextInput, BatchEncoding, Batch] DEFAULT_ATTRIBUTION_AGGREGATE_DICT = { diff --git a/inseq/data/batch.py b/inseq/data/batch.py index d1f048c7..5c494bd1 100644 --- a/inseq/data/batch.py +++ b/inseq/data/batch.py @@ -1,6 +1,5 @@ -from typing import List, Optional, Tuple, Union - from dataclasses import dataclass +from typing import List, Optional, Tuple, Union from ..utils.typing import EmbeddingsTensor, ExpandedTargetIdsTensor, IdsTensor, OneOrMoreTokenSequences from .data_utils import TensorWrapper diff --git a/inseq/data/data_utils.py b/inseq/data/data_utils.py index 5cf79c07..482d0f9b 100644 --- a/inseq/data/data_utils.py +++ b/inseq/data/data_utils.py @@ -33,7 +33,7 @@ def _slice_batch(attr, subscript): return attr[subscript] if len(attr.shape) >= 2: return attr[subscript, ...] - elif isinstance(attr, TensorWrapper) or isinstance(attr, list): + elif isinstance(attr, (TensorWrapper, list)): return attr[subscript] elif isinstance(attr, dict): return {key: TensorWrapper._slice_batch(val, subscript) for key, val in attr.items()} @@ -64,7 +64,7 @@ def _select_active(attr, mask): @staticmethod def _to(attr, device: str): - if isinstance(attr, torch.Tensor) or isinstance(attr, TensorWrapper): + if isinstance(attr, (torch.Tensor, TensorWrapper)): return attr.to(device) elif isinstance(attr, dict): return {key: TensorWrapper._to(val, device) for key, val in attr.items()} @@ -73,7 +73,7 @@ def _to(attr, device: str): @staticmethod def _detach(attr): - if isinstance(attr, torch.Tensor) or isinstance(attr, TensorWrapper): + if isinstance(attr, (torch.Tensor, TensorWrapper)): return attr.detach() elif isinstance(attr, dict): return {key: TensorWrapper._detach(val) for key, val in attr.items()} @@ -82,7 +82,7 @@ def _detach(attr): @staticmethod def _numpy(attr): - if isinstance(attr, torch.Tensor) or isinstance(attr, TensorWrapper): + if isinstance(attr, (torch.Tensor, TensorWrapper)): np_array = attr.numpy() if isinstance(np_array, np.ndarray): return np.ascontiguousarray(np_array, dtype=np_array.dtype) @@ -162,7 +162,7 @@ def clone(self): out_params = {} for field in fields(self.__class__): attr = getattr(self, field.name) - if isinstance(attr, torch.Tensor) or isinstance(attr, TensorWrapper): + if isinstance(attr, (torch.Tensor, TensorWrapper)): out_params[field.name] = attr.clone() elif attr is not None: out_params[field.name] = deepcopy(attr) diff --git a/inseq/data/viz.py b/inseq/data/viz.py index e77736e8..0c2817a0 100644 --- a/inseq/data/viz.py +++ b/inseq/data/viz.py @@ -16,10 +16,9 @@ # WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE # OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -from typing import Dict, List, Literal, Optional, Tuple, Union - import random import string +from typing import Dict, List, Literal, Optional, Tuple, Union import numpy as np from matplotlib.colors import Colormap @@ -214,10 +213,11 @@ def get_saliency_heatmap_html( threshold = step_scores_threshold else: threshold = step_scores_threshold.get(step_score_name, 0.5) - style = lambda val: abs(val) >= threshold + style = lambda val, limit: abs(val) >= limit for col_index in range(scores.shape[1]): score = round(float(step_score_values[col_index]), 3) - out += f'{"" if style(score) else ""}{score}{"" if style(score) else ""}' + is_bold = style(score, threshold) + out += f'{"" if is_bold else ""}{score}{"" if is_bold else ""}' out += "" saliency_heatmap_markup = saliency_heatmap_html.format(uuid=uuid, content=out, label=label) plot_uuid = "".join(random.choices(string.ascii_lowercase, k=20)) @@ -262,10 +262,12 @@ def get_saliency_heatmap_rich( threshold = step_scores_threshold else: threshold = step_scores_threshold.get(step_score_name, 0.5) - style = lambda val: "bold" if abs(val) >= threshold else "" + style = lambda val, limit: "bold" if abs(val) >= limit else "" score_row = [Text(step_score_name, style="bold")] for score in step_score_values: - score_row.append(Text(f"{score:.2f}", justify="center", style=style(round(float(score), 2)))) + score_row.append( + Text(f"{score:.2f}", justify="center", style=style(round(float(score), 2), threshold)) + ) table.add_row(*score_row, end_section=True) return table diff --git a/inseq/models/__init__.py b/inseq/models/__init__.py index ef314cff..b2c34ae3 100644 --- a/inseq/models/__init__.py +++ b/inseq/models/__init__.py @@ -1,6 +1,5 @@ -from typing import List, Optional, Union - import logging +from typing import List, Optional, Union from rich.status import Status @@ -11,7 +10,6 @@ from .encoder_decoder import EncoderDecoderAttributionModel from .huggingface_model import HuggingfaceDecoderOnlyModel, HuggingfaceEncoderDecoderModel, HuggingfaceModel - logger = logging.getLogger(__name__) FRAMEWORKS_MAP = { diff --git a/inseq/models/attribution_model.py b/inseq/models/attribution_model.py index 9c6e0cc5..89401d79 100644 --- a/inseq/models/attribution_model.py +++ b/inseq/models/attribution_model.py @@ -1,7 +1,6 @@ -from typing import Any, Callable, Dict, List, Optional, Tuple, TypeVar, Union - import logging from abc import ABC, abstractmethod +from typing import Any, Callable, Dict, List, Optional, Tuple, TypeVar, Union import torch @@ -31,7 +30,6 @@ ) from .model_decorators import unhooked - ModelOutput = TypeVar("ModelOutput") @@ -39,7 +37,6 @@ class AttributionModel(ABC, torch.nn.Module): - # Default arguments for custom attributed functions # in the AttributionModel.forward method. _DEFAULT_ATTRIBUTED_FN_ARGS = [ @@ -133,7 +130,7 @@ def get_attributed_fn( if isinstance(attributed_fn, str): if attributed_fn not in STEP_SCORES_MAP: raise ValueError( - f"Unknown function: {attributed_fn}." "Register custom functions with inseq.register_step_score" + f"Unknown function: {attributed_fn}. Register custom functions with inseq.register_step_score" ) attributed_fn = STEP_SCORES_MAP[attributed_fn] return attributed_fn diff --git a/inseq/models/decoder_only.py b/inseq/models/decoder_only.py index 4cb28cf9..e9bc1783 100644 --- a/inseq/models/decoder_only.py +++ b/inseq/models/decoder_only.py @@ -1,6 +1,5 @@ -from typing import Any, Callable, Dict, List, Optional, Tuple, Union - import logging +from typing import Any, Callable, Dict, List, Optional, Tuple, Union from ..attr.feat import join_token_ids from ..data import ( @@ -26,7 +25,6 @@ ) from .attribution_model import AttributionModel, ModelOutput - logger = logging.getLogger(__name__) @@ -41,7 +39,7 @@ def prepare_inputs_for_attribution( if isinstance(inputs, Batch): batch = inputs else: - if isinstance(inputs, str) or isinstance(inputs, list): + if isinstance(inputs, (str, list)): # Decoder-only model do not tokenize as targets, # since a single tokenizer is available. encodings: BatchEncoding = self.encode( @@ -53,7 +51,7 @@ def prepare_inputs_for_attribution( encodings = inputs else: raise ValueError( - f"targets must be either a string, a list of strings, a BatchEncoding or a Batch, " + "targets must be either a string, a list of strings, a BatchEncoding or a Batch, " f"not {type(inputs)}" ) baseline_embeds = self.embed(encodings.baseline_ids) diff --git a/inseq/models/encoder_decoder.py b/inseq/models/encoder_decoder.py index a1677d8d..82fcc066 100644 --- a/inseq/models/encoder_decoder.py +++ b/inseq/models/encoder_decoder.py @@ -1,6 +1,5 @@ -from typing import Any, Callable, Dict, List, Optional, Tuple, Union - import logging +from typing import Any, Callable, Dict, List, Optional, Tuple, Union from ..attr.feat import join_token_ids from ..data import ( @@ -26,7 +25,6 @@ ) from .attribution_model import AttributionModel, ModelOutput - logger = logging.getLogger(__name__) @@ -67,7 +65,7 @@ def prepare_inputs_for_attribution( if isinstance(sources, Batch): source_batch = sources else: - if isinstance(sources, str) or isinstance(sources, list): + if isinstance(sources, (str, list)): source_encodings: BatchEncoding = self.encode( sources, return_baseline=True, include_eos_baseline=include_eos_baseline ) @@ -75,7 +73,7 @@ def prepare_inputs_for_attribution( source_encodings = sources else: raise ValueError( - f"sources must be either a string, a list of strings, a BatchEncoding or a Batch, " + "sources must be either a string, a list of strings, a BatchEncoding or a Batch, " f"not {type(sources)}" ) # Even when we are performing layer attribution, we might need the embeddings @@ -89,7 +87,7 @@ def prepare_inputs_for_attribution( if isinstance(targets, Batch): target_batch = targets else: - if isinstance(targets, str) or isinstance(targets, list): + if isinstance(targets, (str, list)): target_encodings: BatchEncoding = self.encode( targets, as_targets=True, @@ -100,7 +98,7 @@ def prepare_inputs_for_attribution( target_encodings = targets else: raise ValueError( - f"targets must be either a string, a list of strings, a BatchEncoding or a Batch, " + "targets must be either a string, a list of strings, a BatchEncoding or a Batch, " f"not {type(targets)}" ) baseline_embeds = self.embed(target_encodings.baseline_ids, as_targets=True) diff --git a/inseq/models/huggingface_model.py b/inseq/models/huggingface_model.py index 25a585ad..56c1adc2 100644 --- a/inseq/models/huggingface_model.py +++ b/inseq/models/huggingface_model.py @@ -1,8 +1,7 @@ """ HuggingFace Seq2seq model """ -from typing import Dict, List, NoReturn, Optional, Tuple, Union - import logging from abc import abstractmethod +from typing import Dict, List, NoReturn, Optional, Tuple, Union import torch from torch import long @@ -31,7 +30,6 @@ from .encoder_decoder import EncoderDecoderAttributionModel from .model_decorators import unhooked - logger = logging.getLogger(__name__) logging.getLogger("urllib3").setLevel(logging.WARNING) @@ -92,7 +90,6 @@ def __init__( if isinstance(model, PreTrainedModel): self.model = model else: - if "output_attentions" not in model_kwargs: model_kwargs["output_attentions"] = True diff --git a/inseq/models/model_decorators.py b/inseq/models/model_decorators.py index 4995a4bb..8141f432 100644 --- a/inseq/models/model_decorators.py +++ b/inseq/models/model_decorators.py @@ -1,6 +1,5 @@ -from typing import Any, Callable - from functools import wraps +from typing import Any, Callable def unhooked(f: Callable[..., Any]) -> Callable[..., Any]: diff --git a/inseq/utils/__init__.py b/inseq/utils/__init__.py index 606d6226..7f56332d 100644 --- a/inseq/utils/__init__.py +++ b/inseq/utils/__init__.py @@ -61,7 +61,6 @@ sum_normalize_attributions, ) - __all__ = [ "LengthMismatchError", "MissingAttributionMethodError", diff --git a/inseq/utils/argparse.py b/inseq/utils/argparse.py index 140f47d9..54489d24 100644 --- a/inseq/utils/argparse.py +++ b/inseq/utils/argparse.py @@ -12,8 +12,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -from typing import Any, Dict, Iterable, NewType, Optional, Tuple, Union, get_type_hints - import dataclasses import json import sys @@ -22,7 +20,7 @@ from enum import Enum from inspect import isclass from pathlib import Path - +from typing import Any, Dict, Iterable, NewType, Optional, Tuple, Union, get_type_hints DataClass = NewType("DataClass", Any) DataClassType = NewType("DataClassType", Any) @@ -154,12 +152,12 @@ def _add_dataclass_arguments(self, dtype: DataClassType): try: type_hints: Dict[str, type] = get_type_hints(dtype) - except NameError: + except NameError as err: raise RuntimeError( f"Type resolution failed for f{dtype}. Try declaring the class in global scope or " - f"removing line of `from __future__ import annotations` which opts in Postponed " - f"Evaluation of Annotations (PEP 563)" - ) + "removing line of `from __future__ import annotations` which opts in Postponed " + "Evaluation of Annotations (PEP 563)" + ) from err for field in dataclasses.fields(dtype): if not field.init: diff --git a/inseq/utils/cache.py b/inseq/utils/cache.py index d45a3dfc..cab7c39a 100644 --- a/inseq/utils/cache.py +++ b/inseq/utils/cache.py @@ -1,11 +1,9 @@ -from typing import Any, Callable - import logging import os import pickle from functools import wraps from pathlib import Path - +from typing import Any, Callable logger = logging.getLogger(__name__) diff --git a/inseq/utils/errors.py b/inseq/utils/errors.py index 1288d9a2..19a4f4e4 100644 --- a/inseq/utils/errors.py +++ b/inseq/utils/errors.py @@ -14,7 +14,7 @@ class UnknownAttributionMethodError(Exception): """Raised when an attribution method is not valid""" UNKNOWN_ATTRIBUTION_METHOD_MSG = ( - "Unknown attribution method: {attribution_method}.\n" "Available methods: {available_methods}" + "Unknown attribution method: {attribution_method}.\nAvailable methods: {available_methods}" ) def __init__( diff --git a/inseq/utils/import_utils.py b/inseq/utils/import_utils.py index 1a96ae67..cbd03420 100644 --- a/inseq/utils/import_utils.py +++ b/inseq/utils/import_utils.py @@ -1,6 +1,5 @@ from importlib.util import find_spec - _ipywidgets_available = find_spec("ipywidgets") is not None _scikitlearn_available = find_spec("sklearn") is not None _transformers_available = find_spec("transformers") is not None diff --git a/inseq/utils/misc.py b/inseq/utils/misc.py index 7f06f386..9b9326bd 100644 --- a/inseq/utils/misc.py +++ b/inseq/utils/misc.py @@ -1,5 +1,3 @@ -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union - import functools import gzip import io @@ -14,6 +12,7 @@ from inspect import signature from itertools import dropwhile from os import PathLike, fsync +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union from numpy import asarray, frombuffer from torch import Tensor @@ -21,7 +20,6 @@ from .errors import LengthMismatchError from .typing import TextInput, TokenWithId - logger = logging.getLogger(__name__) @@ -97,7 +95,7 @@ def pretty_dict(d: Dict[str, Any], lpad: int = 4) -> str: out_txt = "{\n" for k, v in d.items(): out_txt += f"{' ' * lpad}{k}: " - if isinstance(v, list) or isinstance(v, tuple): + if isinstance(v, (list, tuple)): out_txt += pretty_list(v, lpad + 4) elif isinstance(v, Tensor): out_txt += pretty_tensor(v, lpad + 4) @@ -209,8 +207,9 @@ def format_input_texts( reference_texts = [ref_texts] if isinstance(ref_texts, str) else ref_texts if reference_texts and len(texts) != len(reference_texts): raise LengthMismatchError( - "Length mismatch for texts and reference_texts." - "Input length: {}, reference length: {} ".format(len(texts), len(reference_texts)) + "Length mismatch for texts and reference_texts.Input length: {}, reference length: {} ".format( + len(texts), len(reference_texts) + ) ) return texts, reference_texts @@ -337,12 +336,13 @@ def save_to_file_wrapper( fh = fp try: if compression and "b" not in getattr(fh, "mode", "b?") and not isinstance(txt, str): - raise IOError("If compression is enabled, the file must be opened in binary mode.") + raise OSError("If compression is enabled, the file must be opened in binary mode.") try: fh.write(txt) except TypeError as err: err.args = ( - err.args[0] + ". A possible reason is that the file is not opened in binary mode; " + err.args[0] + + ". A possible reason is that the file is not opened in binary mode; " "be sure to set file mode to something like 'wb'.", ) raise @@ -352,7 +352,7 @@ def save_to_file_wrapper( try: if fh.fileno() is not None: fsync(fh.fileno()) - except (ValueError,): + except ValueError: pass if isinstance(fp, str): fh.close() @@ -369,7 +369,7 @@ def bin_str_to_ndarray(data, order, shape, dtype): assert order in [ None, "C", - ], "specifying different memory order is not (yet) supported " "for binary numpy format (got order = {})".format( + ], "specifying different memory order is not (yet) supported for binary numpy format (got order = {})".format( order ) if data.startswith("b64.gz:"): @@ -410,13 +410,13 @@ def get_cls_from_instance_type(mod, name, cls_lookup_map): if mod is None: try: curr_class = getattr((__import__("__main__")), name) - except (ImportError, AttributeError): + except (ImportError, AttributeError) as err: if name not in cls_lookup_map: raise ImportError( f"class {name} seems to have been exported from the main file, which means " "it has no module/import path set; you need to provide loads argument" f"`cls_lookup_map={{'{name}': Class}}` to locate the class" - ) + ) from err curr_class = cls_lookup_map[name] else: imp_err = None diff --git a/inseq/utils/registry.py b/inseq/utils/registry.py index 57468987..2ab0c6b0 100644 --- a/inseq/utils/registry.py +++ b/inseq/utils/registry.py @@ -1,9 +1,7 @@ from __future__ import annotations -from typing import TypeVar - from abc import ABC - +from typing import TypeVar R = TypeVar("R", bound="Registry") @@ -36,4 +34,4 @@ def available_classes(cls: type[R]) -> dict[str, type[R]]: def get_available_methods(cls: type[Registry]) -> list[str]: - return [n for n in cls.available_classes().keys()] + return list(cls.available_classes().keys()) diff --git a/inseq/utils/serialization.py b/inseq/utils/serialization.py index 4653446a..c0d78f8d 100644 --- a/inseq/utils/serialization.py +++ b/inseq/utils/serialization.py @@ -29,12 +29,11 @@ # OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE # USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -from typing import Any, Callable, Dict, List, Optional, TypeVar, Union - import json from collections import OrderedDict from json import JSONEncoder from os import PathLike +from typing import Any, Callable, Dict, List, Optional, TypeVar, Union from numpy import generic, ndarray @@ -50,7 +49,6 @@ scalar_to_numpy, ) - EncodableObject = TypeVar("EncodableObject") DecodableObject = TypeVar("DecodableObject") @@ -60,7 +58,7 @@ def class_instance_encode(obj: EncodableObject, use_primitives: bool = True, **k Encodes a class instance to json. Note that it can only be recovered if the environment allows the class to be imported in the same way. """ - if isinstance(obj, list) or isinstance(obj, dict): + if isinstance(obj, (list, dict)): return obj if hasattr(obj, "__class__") and hasattr(obj, "__dict__"): if not hasattr(obj, "__new__"): @@ -69,11 +67,11 @@ def class_instance_encode(obj: EncodableObject, use_primitives: bool = True, **k raise TypeError(f"instance '{obj}' of class '{obj.__class__}' cannot be encoded, it is a function.") try: obj.__new__(obj.__class__) - except TypeError: + except TypeError as err: raise TypeError( f"instance '{obj}' of class '{obj.__class__}' cannot be encoded, perhaps because its" " __new__ method cannot be called because it requires extra parameters" - ) + ) from err mod = get_module_name_from_object(obj) name = obj.__class__.__name__ if hasattr(obj, "__json_encode__"): @@ -328,8 +326,10 @@ def class_instance_hook(dct: Any, cls_lookup_map: Optional[Dict[str, type]] = No curr_class = get_cls_from_instance_type(mod, name, cls_lookup_map=cls_lookup_map) try: obj = curr_class.__new__(curr_class) - except TypeError: - raise TypeError(f"problem while decoding instance of '{name}'; this instance has a special __new__ method") + except TypeError as err: + raise TypeError( + f"problem while decoding instance of '{name}'; this instance has a special __new__ method" + ) from err if hasattr(obj, "__json_decode__"): properties = {} if "attributes" in dct: @@ -437,16 +437,16 @@ def json_advanced_load( The loaded object. """ try: - if isinstance(fp, str) or isinstance(fp, bytes) or isinstance(fp, PathLike): + if isinstance(fp, (PathLike, bytes, str)): with open(fp, "rb" if decompression else "r") as fh: string = fh.read() else: string = fp.read() - except UnicodeDecodeError: + except UnicodeDecodeError as err: raise Exception( "There was a problem decoding the file content. A possible reason is that the file is not " "opened in binary mode; be sure to set file mode to something like 'rb'." - ) + ) from err return json_advanced_loads( string=string, ordered=ordered, diff --git a/inseq/utils/torch_utils.py b/inseq/utils/torch_utils.py index 86f0b9eb..e0c51baa 100644 --- a/inseq/utils/torch_utils.py +++ b/inseq/utils/torch_utils.py @@ -1,6 +1,5 @@ -from typing import TYPE_CHECKING, Any, Callable, List, Optional, Sequence, Tuple, Union - import logging +from typing import TYPE_CHECKING, Any, Callable, List, Optional, Sequence, Tuple, Union import torch from torch.backends.cuda import is_built as is_cuda_built @@ -17,7 +16,6 @@ TokenSequenceAttributionTensor, ) - if TYPE_CHECKING: from ..models import AttributionModel diff --git a/inseq/utils/typing.py b/inseq/utils/typing.py index 5ac9a19e..5c87adc2 100644 --- a/inseq/utils/typing.py +++ b/inseq/utils/typing.py @@ -1,12 +1,10 @@ -from typing import Optional, Sequence, Tuple, Union - from dataclasses import dataclass +from typing import Optional, Sequence, Tuple, Union from torch import float32, long from torchtyping import TensorType from transformers import PreTrainedModel - TextInput = Union[str, Sequence[str]] diff --git a/poetry.lock b/poetry.lock index c9f2f736..90ecf1b4 100644 --- a/poetry.lock +++ b/poetry.lock @@ -82,26 +82,6 @@ files = [ {file = "appnope-0.1.3.tar.gz", hash = "sha256:02bd91c4de869fbb1e1c50aafc4098827a7a54ab2f39d9dcba6c9547ed920e24"}, ] -[[package]] -name = "astroid" -version = "2.13.2" -description = "An abstract syntax tree for Python with inference support." -category = "dev" -optional = false -python-versions = ">=3.7.2" -files = [ - {file = "astroid-2.13.2-py3-none-any.whl", hash = "sha256:8f6a8d40c4ad161d6fc419545ae4b2f275ed86d1c989c97825772120842ee0d2"}, - {file = "astroid-2.13.2.tar.gz", hash = "sha256:3bc7834720e1a24ca797fd785d77efb14f7a28ee8e635ef040b6e2d80ccb3303"}, -] - -[package.dependencies] -lazy-object-proxy = ">=1.4.0" -typing-extensions = ">=4.0.0" -wrapt = [ - {version = ">=1.11,<2", markers = "python_version < \"3.11\""}, - {version = ">=1.14,<2", markers = "python_version >= \"3.11\""}, -] - [[package]] name = "asttokens" version = "2.2.1" @@ -785,7 +765,7 @@ name = "dill" version = "0.3.6" description = "serialize all of python" category = "main" -optional = false +optional = true python-versions = ">=3.7" files = [ {file = "dill-0.3.6-py3-none-any.whl", hash = "sha256:a07ffd2351b8c678dfc4a856a3005f8067aea51d6ba6c700796a4d9e280f39f0"}, @@ -897,23 +877,6 @@ files = [ docs = ["furo (>=2022.12.7)", "sphinx (>=5.3)", "sphinx-autodoc-typehints (>=1.19.5)"] testing = ["covdefaults (>=2.2.2)", "coverage (>=7.0.1)", "pytest (>=7.2)", "pytest-cov (>=4)", "pytest-timeout (>=2.1)"] -[[package]] -name = "flake8" -version = "6.0.0" -description = "the modular source code checker: pep8 pyflakes and co" -category = "dev" -optional = false -python-versions = ">=3.8.1" -files = [ - {file = "flake8-6.0.0-py2.py3-none-any.whl", hash = "sha256:3833794e27ff64ea4e9cf5d410082a8b97ff1a06c16aa3d2027339cd0f1195c7"}, - {file = "flake8-6.0.0.tar.gz", hash = "sha256:c61007e76655af75e6785a931f452915b371dc48f56efd765247c8fe68f2b181"}, -] - -[package.dependencies] -mccabe = ">=0.7.0,<0.8.0" -pycodestyle = ">=2.10.0,<2.11.0" -pyflakes = ">=3.0.0,<3.1.0" - [[package]] name = "fonttools" version = "4.38.0" @@ -942,14 +905,14 @@ woff = ["brotli (>=1.0.1)", "brotlicffi (>=0.8.0)", "zopfli (>=0.1.4)"] [[package]] name = "fsspec" -version = "2022.11.0" +version = "2023.1.0" description = "File-system specification" category = "main" optional = true python-versions = ">=3.7" files = [ - {file = "fsspec-2022.11.0-py3-none-any.whl", hash = "sha256:d6e462003e3dcdcb8c7aa84c73a228f8227e72453cd22570e2363e8844edfe7b"}, - {file = "fsspec-2022.11.0.tar.gz", hash = "sha256:259d5fd5c8e756ff2ea72f42e7613c32667dc2049a4ac3d84364a7ca034acb8b"}, + {file = "fsspec-2023.1.0-py3-none-any.whl", hash = "sha256:b833e2e541e9e8cde0ab549414187871243177feb3d344f9d27b25a93f5d8139"}, + {file = "fsspec-2023.1.0.tar.gz", hash = "sha256:fbae7f20ff801eb5f7d0bedf81f25c787c0dfac5e982d98fa3884a9cde2b5411"}, ] [package.dependencies] @@ -1205,27 +1168,6 @@ widgetsnbextension = ">=4.0,<5.0" [package.extras] test = ["jsonschema", "pytest (>=3.6.0)", "pytest-cov", "pytz"] -[[package]] -name = "isort" -version = "5.11.4" -description = "A Python utility / library to sort Python imports." -category = "dev" -optional = false -python-versions = ">=3.7.0" -files = [ - {file = "isort-5.11.4-py3-none-any.whl", hash = "sha256:c033fd0edb91000a7f09527fe5c75321878f98322a77ddcc81adbd83724afb7b"}, - {file = "isort-5.11.4.tar.gz", hash = "sha256:6db30c5ded9815d813932c04c2f85a360bcdd35fed496f4d8f35495ef0a261b6"}, -] - -[package.dependencies] -colorama = {version = ">=0.4.3,<0.5.0", optional = true, markers = "extra == \"colors\""} - -[package.extras] -colors = ["colorama (>=0.4.3,<0.5.0)"] -pipfile-deprecated-finder = ["pipreqs", "requirementslib"] -plugins = ["setuptools"] -requirements-deprecated-finder = ["pip-api", "pipreqs"] - [[package]] name = "jedi" version = "0.18.2" @@ -1412,52 +1354,6 @@ files = [ {file = "kiwisolver-1.4.4.tar.gz", hash = "sha256:d41997519fcba4a1e46eb4a2fe31bc12f0ff957b2b81bac28db24744f333e955"}, ] -[[package]] -name = "lazy-object-proxy" -version = "1.9.0" -description = "A fast and thorough lazy object proxy." -category = "dev" -optional = false -python-versions = ">=3.7" -files = [ - {file = "lazy-object-proxy-1.9.0.tar.gz", hash = "sha256:659fb5809fa4629b8a1ac5106f669cfc7bef26fbb389dda53b3e010d1ac4ebae"}, - {file = "lazy_object_proxy-1.9.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b40387277b0ed2d0602b8293b94d7257e17d1479e257b4de114ea11a8cb7f2d7"}, - {file = "lazy_object_proxy-1.9.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8c6cfb338b133fbdbc5cfaa10fe3c6aeea827db80c978dbd13bc9dd8526b7d4"}, - {file = "lazy_object_proxy-1.9.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:721532711daa7db0d8b779b0bb0318fa87af1c10d7fe5e52ef30f8eff254d0cd"}, - {file = "lazy_object_proxy-1.9.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:66a3de4a3ec06cd8af3f61b8e1ec67614fbb7c995d02fa224813cb7afefee701"}, - {file = "lazy_object_proxy-1.9.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:1aa3de4088c89a1b69f8ec0dcc169aa725b0ff017899ac568fe44ddc1396df46"}, - {file = "lazy_object_proxy-1.9.0-cp310-cp310-win32.whl", hash = "sha256:f0705c376533ed2a9e5e97aacdbfe04cecd71e0aa84c7c0595d02ef93b6e4455"}, - {file = "lazy_object_proxy-1.9.0-cp310-cp310-win_amd64.whl", hash = "sha256:ea806fd4c37bf7e7ad82537b0757999264d5f70c45468447bb2b91afdbe73a6e"}, - {file = "lazy_object_proxy-1.9.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:946d27deaff6cf8452ed0dba83ba38839a87f4f7a9732e8f9fd4107b21e6ff07"}, - {file = "lazy_object_proxy-1.9.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:79a31b086e7e68b24b99b23d57723ef7e2c6d81ed21007b6281ebcd1688acb0a"}, - {file = "lazy_object_proxy-1.9.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f699ac1c768270c9e384e4cbd268d6e67aebcfae6cd623b4d7c3bfde5a35db59"}, - {file = "lazy_object_proxy-1.9.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:bfb38f9ffb53b942f2b5954e0f610f1e721ccebe9cce9025a38c8ccf4a5183a4"}, - {file = "lazy_object_proxy-1.9.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:189bbd5d41ae7a498397287c408617fe5c48633e7755287b21d741f7db2706a9"}, - {file = "lazy_object_proxy-1.9.0-cp311-cp311-win32.whl", hash = "sha256:81fc4d08b062b535d95c9ea70dbe8a335c45c04029878e62d744bdced5141586"}, - {file = "lazy_object_proxy-1.9.0-cp311-cp311-win_amd64.whl", hash = "sha256:f2457189d8257dd41ae9b434ba33298aec198e30adf2dcdaaa3a28b9994f6adb"}, - {file = "lazy_object_proxy-1.9.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:d9e25ef10a39e8afe59a5c348a4dbf29b4868ab76269f81ce1674494e2565a6e"}, - {file = "lazy_object_proxy-1.9.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cbf9b082426036e19c6924a9ce90c740a9861e2bdc27a4834fd0a910742ac1e8"}, - {file = "lazy_object_proxy-1.9.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9f5fa4a61ce2438267163891961cfd5e32ec97a2c444e5b842d574251ade27d2"}, - {file = "lazy_object_proxy-1.9.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:8fa02eaab317b1e9e03f69aab1f91e120e7899b392c4fc19807a8278a07a97e8"}, - {file = "lazy_object_proxy-1.9.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:e7c21c95cae3c05c14aafffe2865bbd5e377cfc1348c4f7751d9dc9a48ca4bda"}, - {file = "lazy_object_proxy-1.9.0-cp37-cp37m-win32.whl", hash = "sha256:f12ad7126ae0c98d601a7ee504c1122bcef553d1d5e0c3bfa77b16b3968d2734"}, - {file = "lazy_object_proxy-1.9.0-cp37-cp37m-win_amd64.whl", hash = "sha256:edd20c5a55acb67c7ed471fa2b5fb66cb17f61430b7a6b9c3b4a1e40293b1671"}, - {file = "lazy_object_proxy-1.9.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2d0daa332786cf3bb49e10dc6a17a52f6a8f9601b4cf5c295a4f85854d61de63"}, - {file = "lazy_object_proxy-1.9.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cd077f3d04a58e83d04b20e334f678c2b0ff9879b9375ed107d5d07ff160171"}, - {file = "lazy_object_proxy-1.9.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:660c94ea760b3ce47d1855a30984c78327500493d396eac4dfd8bd82041b22be"}, - {file = "lazy_object_proxy-1.9.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:212774e4dfa851e74d393a2370871e174d7ff0ebc980907723bb67d25c8a7c30"}, - {file = "lazy_object_proxy-1.9.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:f0117049dd1d5635bbff65444496c90e0baa48ea405125c088e93d9cf4525b11"}, - {file = "lazy_object_proxy-1.9.0-cp38-cp38-win32.whl", hash = "sha256:0a891e4e41b54fd5b8313b96399f8b0e173bbbfc03c7631f01efbe29bb0bcf82"}, - {file = "lazy_object_proxy-1.9.0-cp38-cp38-win_amd64.whl", hash = "sha256:9990d8e71b9f6488e91ad25f322898c136b008d87bf852ff65391b004da5e17b"}, - {file = "lazy_object_proxy-1.9.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9e7551208b2aded9c1447453ee366f1c4070602b3d932ace044715d89666899b"}, - {file = "lazy_object_proxy-1.9.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f83ac4d83ef0ab017683d715ed356e30dd48a93746309c8f3517e1287523ef4"}, - {file = "lazy_object_proxy-1.9.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7322c3d6f1766d4ef1e51a465f47955f1e8123caee67dd641e67d539a534d006"}, - {file = "lazy_object_proxy-1.9.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:18b78ec83edbbeb69efdc0e9c1cb41a3b1b1ed11ddd8ded602464c3fc6020494"}, - {file = "lazy_object_proxy-1.9.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:09763491ce220c0299688940f8dc2c5d05fd1f45af1e42e636b2e8b2303e4382"}, - {file = "lazy_object_proxy-1.9.0-cp39-cp39-win32.whl", hash = "sha256:9090d8e53235aa280fc9239a86ae3ea8ac58eff66a705fa6aa2ec4968b95c821"}, - {file = "lazy_object_proxy-1.9.0-cp39-cp39-win_amd64.whl", hash = "sha256:db1c1722726f47e10e0b5fdbf15ac3b8adb58c091d12b3ab713965795036985f"}, -] - [[package]] name = "markdown" version = "3.4.1" @@ -1613,18 +1509,6 @@ files = [ [package.dependencies] traitlets = "*" -[[package]] -name = "mccabe" -version = "0.7.0" -description = "McCabe checker, plugin for flake8" -category = "dev" -optional = false -python-versions = ">=3.6" -files = [ - {file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"}, - {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, -] - [[package]] name = "multidict" version = "6.0.4" @@ -2033,6 +1917,13 @@ files = [ {file = "Pillow-9.4.0-1-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:b8c2f6eb0df979ee99433d8b3f6d193d9590f735cf12274c108bd954e30ca858"}, {file = "Pillow-9.4.0-1-pp38-pypy38_pp73-macosx_10_10_x86_64.whl", hash = "sha256:b70756ec9417c34e097f987b4d8c510975216ad26ba6e57ccb53bc758f490dab"}, {file = "Pillow-9.4.0-1-pp39-pypy39_pp73-macosx_10_10_x86_64.whl", hash = "sha256:43521ce2c4b865d385e78579a082b6ad1166ebed2b1a2293c3be1d68dd7ca3b9"}, + {file = "Pillow-9.4.0-2-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:9d9a62576b68cd90f7075876f4e8444487db5eeea0e4df3ba298ee38a8d067b0"}, + {file = "Pillow-9.4.0-2-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:87708d78a14d56a990fbf4f9cb350b7d89ee8988705e58e39bdf4d82c149210f"}, + {file = "Pillow-9.4.0-2-cp37-cp37m-macosx_10_10_x86_64.whl", hash = "sha256:8a2b5874d17e72dfb80d917213abd55d7e1ed2479f38f001f264f7ce7bae757c"}, + {file = "Pillow-9.4.0-2-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:83125753a60cfc8c412de5896d10a0a405e0bd88d0470ad82e0869ddf0cb3848"}, + {file = "Pillow-9.4.0-2-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:9e5f94742033898bfe84c93c831a6f552bb629448d4072dd312306bab3bd96f1"}, + {file = "Pillow-9.4.0-2-pp38-pypy38_pp73-macosx_10_10_x86_64.whl", hash = "sha256:013016af6b3a12a2f40b704677f8b51f72cb007dac785a9933d5c86a72a7fe33"}, + {file = "Pillow-9.4.0-2-pp39-pypy39_pp73-macosx_10_10_x86_64.whl", hash = "sha256:99d92d148dd03fd19d16175b6d355cc1b01faf80dae93c6c3eb4163709edc0a9"}, {file = "Pillow-9.4.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:2968c58feca624bb6c8502f9564dd187d0e1389964898f5e9e1fbc8533169157"}, {file = "Pillow-9.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c5c1362c14aee73f50143d74389b2c158707b4abce2cb055b7ad37ce60738d47"}, {file = "Pillow-9.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd752c5ff1b4a870b7661234694f24b1d2b9076b8bf337321a814c612665f343"}, @@ -2311,18 +2202,6 @@ files = [ [package.dependencies] numpy = ">=1.16.6" -[[package]] -name = "pycodestyle" -version = "2.10.0" -description = "Python style guide checker" -category = "dev" -optional = false -python-versions = ">=3.6" -files = [ - {file = "pycodestyle-2.10.0-py2.py3-none-any.whl", hash = "sha256:8a4eaf0d0495c7395bdab3589ac2db602797d76207242c17d470186815706610"}, - {file = "pycodestyle-2.10.0.tar.gz", hash = "sha256:347187bdb476329d98f695c213d7295a846d1152ff4fe9bacb8a9590b8ee7053"}, -] - [[package]] name = "pycparser" version = "2.21" @@ -2335,18 +2214,6 @@ files = [ {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"}, ] -[[package]] -name = "pyflakes" -version = "3.0.1" -description = "passive checker of Python programs" -category = "dev" -optional = false -python-versions = ">=3.6" -files = [ - {file = "pyflakes-3.0.1-py2.py3-none-any.whl", hash = "sha256:ec55bf7fe21fff7f1ad2f7da62363d749e2a470500eab1b555334b67aa1ef8cf"}, - {file = "pyflakes-3.0.1.tar.gz", hash = "sha256:ec8b276a6b60bd80defed25add7e439881c19e64850afd9b346283d4165fd0fd"}, -] - [[package]] name = "pygments" version = "2.14.0" @@ -2362,36 +2229,6 @@ files = [ [package.extras] plugins = ["importlib-metadata"] -[[package]] -name = "pylint" -version = "2.15.10" -description = "python code static checker" -category = "dev" -optional = false -python-versions = ">=3.7.2" -files = [ - {file = "pylint-2.15.10-py3-none-any.whl", hash = "sha256:9df0d07e8948a1c3ffa3b6e2d7e6e63d9fb457c5da5b961ed63106594780cc7e"}, - {file = "pylint-2.15.10.tar.gz", hash = "sha256:b3dc5ef7d33858f297ac0d06cc73862f01e4f2e74025ec3eff347ce0bc60baf5"}, -] - -[package.dependencies] -astroid = ">=2.12.13,<=2.14.0-dev0" -colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""} -dill = [ - {version = ">=0.2", markers = "python_version < \"3.11\""}, - {version = ">=0.3.6", markers = "python_version >= \"3.11\""}, -] -isort = ">=4.2.5,<6" -mccabe = ">=0.6,<0.8" -platformdirs = ">=2.2.0" -tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} -tomlkit = ">=0.10.1" -typing-extensions = {version = ">=3.10.0", markers = "python_version < \"3.10\""} - -[package.extras] -spelling = ["pyenchant (>=3.2,<4.0)"] -testutils = ["gitpython (>3)"] - [[package]] name = "pyparsing" version = "3.0.9" @@ -2477,21 +2314,6 @@ files = [ {file = "pytz-2022.7.1.tar.gz", hash = "sha256:01a0681c4b9684a28304615eba55d1ab31ae00bf68ec157ec3708a8182dbbcd0"}, ] -[[package]] -name = "pyupgrade" -version = "2.38.4" -description = "A tool to automatically upgrade syntax for newer versions." -category = "dev" -optional = false -python-versions = ">=3.7" -files = [ - {file = "pyupgrade-2.38.4-py2.py3-none-any.whl", hash = "sha256:944ff993c396ddc2b9012eb3de4cda138eb4c149b22c6c560d4c8bfd0e180982"}, - {file = "pyupgrade-2.38.4.tar.gz", hash = "sha256:1eb43a49f416752929741ba4d706bf3f33593d3cac9bdc217fc1ef55c047c1f4"}, -] - -[package.dependencies] -tokenize-rt = "<5" - [[package]] name = "pywin32" version = "305" @@ -2895,6 +2717,32 @@ files = [ {file = "ruamel.yaml.clib-0.2.7.tar.gz", hash = "sha256:1f08fd5a2bea9c4180db71678e850b995d2a5f4537be0e94557668cf0f5f9497"}, ] +[[package]] +name = "ruff" +version = "0.0.227" +description = "An extremely fast Python linter, written in Rust." +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "ruff-0.0.227-py3-none-macosx_10_7_x86_64.whl", hash = "sha256:2571a607e099e8fdd9d5aaf7450942a722b0de70215129c7240c3512dd73a2c7"}, + {file = "ruff-0.0.227-py3-none-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:329a19f3fffbd4ac58f1e601c299f9970561b40c1c83bdd85c7233c04dd132b9"}, + {file = "ruff-0.0.227-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7b2258a48901ac81cbefb971405c9baed7765fc6be3e4a939cdf4161a2e45e7f"}, + {file = "ruff-0.0.227-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:cde152a5b29ed6ea33920d4a1e0bd57dd4ff854fcc4ec836e8d60fb42a5146ac"}, + {file = "ruff-0.0.227-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:091ce41b94e1c774dc6c89d10314fdba2f3646141ce9c7afea42f2943fc900da"}, + {file = "ruff-0.0.227-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:858869f6b1562472ded90c7de9d9cb9a1dac042200f2aba3d6fc42e92644a453"}, + {file = "ruff-0.0.227-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8494e55a7149b6ece0a63b7b3abb22babc0376580a2a03ee90b854961ed053b9"}, + {file = "ruff-0.0.227-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1c22ac3899ce27e336a97520d978ce3a63d6a33f75c12fdd0aea0bb944fe279a"}, + {file = "ruff-0.0.227-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5e117190dae6c3a7fe6386e8b6d18fe4e207c8abb3675f6b8d21a488ebc6018f"}, + {file = "ruff-0.0.227-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:2926a63c469da3a647195b3874a398fbcc1b703d8cea2e961747e5884b93c4b2"}, + {file = "ruff-0.0.227-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:79f9fe7d512e4fef9b448509cbdfe9ee67db17a0abd3f84c1e5d6efe8f544327"}, + {file = "ruff-0.0.227-py3-none-musllinux_1_2_i686.whl", hash = "sha256:956b9a806da5f8c1e5b54c933d6a1782a892849e401cfaa901d0e1f0a8542683"}, + {file = "ruff-0.0.227-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:f08ec3a4d9ecf70baee3922900d19098a56bf707410703411ea40e0e0db25fff"}, + {file = "ruff-0.0.227-py3-none-win32.whl", hash = "sha256:fbb2bb2a16041b192f10fee6ddab36f0ca847f0d33ca6a49128004ac1ba6d1ca"}, + {file = "ruff-0.0.227-py3-none-win_amd64.whl", hash = "sha256:59ee7ab9e610d43220ee12bc5f8845dddff0f857a48b08f630c87d337abcd5f0"}, + {file = "ruff-0.0.227.tar.gz", hash = "sha256:1da5eca99f4b35b8329391f0d5802b379c2672525a2526a2e0b64083e52adc03"}, +] + [[package]] name = "safety" version = "2.3.4" @@ -3377,18 +3225,6 @@ files = [ {file = "threadpoolctl-3.1.0.tar.gz", hash = "sha256:a335baacfaa4400ae1f0d8e3a58d6674d2f8828e3716bb2802c44955ad391380"}, ] -[[package]] -name = "tokenize-rt" -version = "4.2.1" -description = "A wrapper around the stdlib `tokenize` which roundtrips." -category = "dev" -optional = false -python-versions = ">=3.6.1" -files = [ - {file = "tokenize_rt-4.2.1-py2.py3-none-any.whl", hash = "sha256:08a27fa032a81cf45e8858d0ac706004fcd523e8463415ddf1442be38e204ea8"}, - {file = "tokenize_rt-4.2.1.tar.gz", hash = "sha256:0d4f69026fed520f8a1e0103aa36c406ef4661417f20ca643f913e33531b3b94"}, -] - [[package]] name = "tokenizers" version = "0.13.2" @@ -3405,11 +3241,14 @@ files = [ {file = "tokenizers-0.13.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47ef745dbf9f49281e900e9e72915356d69de3a4e4d8a475bda26bfdb5047736"}, {file = "tokenizers-0.13.2-cp310-cp310-win32.whl", hash = "sha256:96cedf83864bcc15a3ffd088a6f81a8a8f55b8b188eabd7a7f2a4469477036df"}, {file = "tokenizers-0.13.2-cp310-cp310-win_amd64.whl", hash = "sha256:eda77de40a0262690c666134baf19ec5c4f5b8bde213055911d9f5a718c506e1"}, + {file = "tokenizers-0.13.2-cp311-cp311-macosx_10_11_universal2.whl", hash = "sha256:9eee037bb5aa14daeb56b4c39956164b2bebbe6ab4ca7779d88aa16b79bd4e17"}, + {file = "tokenizers-0.13.2-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:d1b079c4c9332048fec4cb9c2055c2373c74fbb336716a5524c9a720206d787e"}, {file = "tokenizers-0.13.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a689654fc745135cce4eea3b15e29c372c3e0b01717c6978b563de5c38af9811"}, {file = "tokenizers-0.13.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3606528c07cda0566cff6cbfbda2b167f923661be595feac95701ffcdcbdbb21"}, {file = "tokenizers-0.13.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:41291d0160946084cbd53c8ec3d029df3dc2af2673d46b25ff1a7f31a9d55d51"}, {file = "tokenizers-0.13.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7892325f9ca1cc5fca0333d5bfd96a19044ce9b092ce2df625652109a3de16b8"}, {file = "tokenizers-0.13.2-cp311-cp311-win32.whl", hash = "sha256:93714958d4ebe5362d3de7a6bd73dc86c36b5af5941ebef6c325ac900fa58865"}, + {file = "tokenizers-0.13.2-cp311-cp311-win_amd64.whl", hash = "sha256:fa7ef7ee380b1f49211bbcfac8a006b1a3fa2fa4c7f4ee134ae384eb4ea5e453"}, {file = "tokenizers-0.13.2-cp37-cp37m-macosx_10_11_x86_64.whl", hash = "sha256:da521bfa94df6a08a6254bb8214ea04854bb9044d61063ae2529361688b5440a"}, {file = "tokenizers-0.13.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a739d4d973d422e1073989769723f3b6ad8b11e59e635a63de99aea4b2208188"}, {file = "tokenizers-0.13.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cac01fc0b868e4d0a3aa7c5c53396da0a0a63136e81475d32fcf5c348fcb2866"}, @@ -3418,6 +3257,7 @@ files = [ {file = "tokenizers-0.13.2-cp37-cp37m-win32.whl", hash = "sha256:a537061ee18ba104b7f3daa735060c39db3a22c8a9595845c55b6c01d36c5e87"}, {file = "tokenizers-0.13.2-cp37-cp37m-win_amd64.whl", hash = "sha256:c82fb87b1cbfa984d8f05b2b3c3c73e428b216c1d4f0e286d0a3b27f521b32eb"}, {file = "tokenizers-0.13.2-cp38-cp38-macosx_10_11_x86_64.whl", hash = "sha256:ce298605a833ac7f81b8062d3102a42dcd9fa890493e8f756112c346339fe5c5"}, + {file = "tokenizers-0.13.2-cp38-cp38-macosx_12_0_arm64.whl", hash = "sha256:f44d59bafe3d61e8a56b9e0a963075187c0f0091023120b13fbe37a87936f171"}, {file = "tokenizers-0.13.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a51b93932daba12ed07060935978a6779593a59709deab04a0d10e6fd5c29e60"}, {file = "tokenizers-0.13.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6969e5ea7ccb909ce7d6d4dfd009115dc72799b0362a2ea353267168667408c4"}, {file = "tokenizers-0.13.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:92f040c4d938ea64683526b45dfc81c580e3b35aaebe847e7eec374961231734"}, @@ -3464,18 +3304,6 @@ files = [ {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, ] -[[package]] -name = "tomlkit" -version = "0.11.6" -description = "Style preserving TOML library" -category = "dev" -optional = false -python-versions = ">=3.6" -files = [ - {file = "tomlkit-0.11.6-py3-none-any.whl", hash = "sha256:07de26b0d8cfc18f871aec595fda24d95b08fef89d147caa861939f37230bf4b"}, - {file = "tomlkit-0.11.6.tar.gz", hash = "sha256:71b952e5721688937fb02cf9d354dbcf0785066149d2855e44531ebdd2b65d73"}, -] - [[package]] name = "torch" version = "1.13.1" @@ -3765,80 +3593,6 @@ files = [ {file = "widgetsnbextension-4.0.5.tar.gz", hash = "sha256:003f716d930d385be3fd9de42dd9bf008e30053f73bddde235d14fbeaeff19af"}, ] -[[package]] -name = "wrapt" -version = "1.14.1" -description = "Module for decorators, wrappers and monkey patching." -category = "dev" -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" -files = [ - {file = "wrapt-1.14.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:1b376b3f4896e7930f1f772ac4b064ac12598d1c38d04907e696cc4d794b43d3"}, - {file = "wrapt-1.14.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:903500616422a40a98a5a3c4ff4ed9d0066f3b4c951fa286018ecdf0750194ef"}, - {file = "wrapt-1.14.1-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:5a9a0d155deafd9448baff28c08e150d9b24ff010e899311ddd63c45c2445e28"}, - {file = "wrapt-1.14.1-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:ddaea91abf8b0d13443f6dac52e89051a5063c7d014710dcb4d4abb2ff811a59"}, - {file = "wrapt-1.14.1-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:36f582d0c6bc99d5f39cd3ac2a9062e57f3cf606ade29a0a0d6b323462f4dd87"}, - {file = "wrapt-1.14.1-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:7ef58fb89674095bfc57c4069e95d7a31cfdc0939e2a579882ac7d55aadfd2a1"}, - {file = "wrapt-1.14.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:e2f83e18fe2f4c9e7db597e988f72712c0c3676d337d8b101f6758107c42425b"}, - {file = "wrapt-1.14.1-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:ee2b1b1769f6707a8a445162ea16dddf74285c3964f605877a20e38545c3c462"}, - {file = "wrapt-1.14.1-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:833b58d5d0b7e5b9832869f039203389ac7cbf01765639c7309fd50ef619e0b1"}, - {file = "wrapt-1.14.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:80bb5c256f1415f747011dc3604b59bc1f91c6e7150bd7db03b19170ee06b320"}, - {file = "wrapt-1.14.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:07f7a7d0f388028b2df1d916e94bbb40624c59b48ecc6cbc232546706fac74c2"}, - {file = "wrapt-1.14.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:02b41b633c6261feff8ddd8d11c711df6842aba629fdd3da10249a53211a72c4"}, - {file = "wrapt-1.14.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2fe803deacd09a233e4762a1adcea5db5d31e6be577a43352936179d14d90069"}, - {file = "wrapt-1.14.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:257fd78c513e0fb5cdbe058c27a0624c9884e735bbd131935fd49e9fe719d310"}, - {file = "wrapt-1.14.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:4fcc4649dc762cddacd193e6b55bc02edca674067f5f98166d7713b193932b7f"}, - {file = "wrapt-1.14.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:11871514607b15cfeb87c547a49bca19fde402f32e2b1c24a632506c0a756656"}, - {file = "wrapt-1.14.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8ad85f7f4e20964db4daadcab70b47ab05c7c1cf2a7c1e51087bfaa83831854c"}, - {file = "wrapt-1.14.1-cp310-cp310-win32.whl", hash = "sha256:a9a52172be0b5aae932bef82a79ec0a0ce87288c7d132946d645eba03f0ad8a8"}, - {file = "wrapt-1.14.1-cp310-cp310-win_amd64.whl", hash = "sha256:6d323e1554b3d22cfc03cd3243b5bb815a51f5249fdcbb86fda4bf62bab9e164"}, - {file = "wrapt-1.14.1-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:43ca3bbbe97af00f49efb06e352eae40434ca9d915906f77def219b88e85d907"}, - {file = "wrapt-1.14.1-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:6b1a564e6cb69922c7fe3a678b9f9a3c54e72b469875aa8018f18b4d1dd1adf3"}, - {file = "wrapt-1.14.1-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:00b6d4ea20a906c0ca56d84f93065b398ab74b927a7a3dbd470f6fc503f95dc3"}, - {file = "wrapt-1.14.1-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:a85d2b46be66a71bedde836d9e41859879cc54a2a04fad1191eb50c2066f6e9d"}, - {file = "wrapt-1.14.1-cp35-cp35m-win32.whl", hash = "sha256:dbcda74c67263139358f4d188ae5faae95c30929281bc6866d00573783c422b7"}, - {file = "wrapt-1.14.1-cp35-cp35m-win_amd64.whl", hash = "sha256:b21bb4c09ffabfa0e85e3a6b623e19b80e7acd709b9f91452b8297ace2a8ab00"}, - {file = "wrapt-1.14.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:9e0fd32e0148dd5dea6af5fee42beb949098564cc23211a88d799e434255a1f4"}, - {file = "wrapt-1.14.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9736af4641846491aedb3c3f56b9bc5568d92b0692303b5a305301a95dfd38b1"}, - {file = "wrapt-1.14.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5b02d65b9ccf0ef6c34cba6cf5bf2aab1bb2f49c6090bafeecc9cd81ad4ea1c1"}, - {file = "wrapt-1.14.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21ac0156c4b089b330b7666db40feee30a5d52634cc4560e1905d6529a3897ff"}, - {file = "wrapt-1.14.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:9f3e6f9e05148ff90002b884fbc2a86bd303ae847e472f44ecc06c2cd2fcdb2d"}, - {file = "wrapt-1.14.1-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:6e743de5e9c3d1b7185870f480587b75b1cb604832e380d64f9504a0535912d1"}, - {file = "wrapt-1.14.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:d79d7d5dc8a32b7093e81e97dad755127ff77bcc899e845f41bf71747af0c569"}, - {file = "wrapt-1.14.1-cp36-cp36m-win32.whl", hash = "sha256:81b19725065dcb43df02b37e03278c011a09e49757287dca60c5aecdd5a0b8ed"}, - {file = "wrapt-1.14.1-cp36-cp36m-win_amd64.whl", hash = "sha256:b014c23646a467558be7da3d6b9fa409b2c567d2110599b7cf9a0c5992b3b471"}, - {file = "wrapt-1.14.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:88bd7b6bd70a5b6803c1abf6bca012f7ed963e58c68d76ee20b9d751c74a3248"}, - {file = "wrapt-1.14.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b5901a312f4d14c59918c221323068fad0540e34324925c8475263841dbdfe68"}, - {file = "wrapt-1.14.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d77c85fedff92cf788face9bfa3ebaa364448ebb1d765302e9af11bf449ca36d"}, - {file = "wrapt-1.14.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d649d616e5c6a678b26d15ece345354f7c2286acd6db868e65fcc5ff7c24a77"}, - {file = "wrapt-1.14.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7d2872609603cb35ca513d7404a94d6d608fc13211563571117046c9d2bcc3d7"}, - {file = "wrapt-1.14.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:ee6acae74a2b91865910eef5e7de37dc6895ad96fa23603d1d27ea69df545015"}, - {file = "wrapt-1.14.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:2b39d38039a1fdad98c87279b48bc5dce2c0ca0d73483b12cb72aa9609278e8a"}, - {file = "wrapt-1.14.1-cp37-cp37m-win32.whl", hash = "sha256:60db23fa423575eeb65ea430cee741acb7c26a1365d103f7b0f6ec412b893853"}, - {file = "wrapt-1.14.1-cp37-cp37m-win_amd64.whl", hash = "sha256:709fe01086a55cf79d20f741f39325018f4df051ef39fe921b1ebe780a66184c"}, - {file = "wrapt-1.14.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8c0ce1e99116d5ab21355d8ebe53d9460366704ea38ae4d9f6933188f327b456"}, - {file = "wrapt-1.14.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e3fb1677c720409d5f671e39bac6c9e0e422584e5f518bfd50aa4cbbea02433f"}, - {file = "wrapt-1.14.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:642c2e7a804fcf18c222e1060df25fc210b9c58db7c91416fb055897fc27e8cc"}, - {file = "wrapt-1.14.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7b7c050ae976e286906dd3f26009e117eb000fb2cf3533398c5ad9ccc86867b1"}, - {file = "wrapt-1.14.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ef3f72c9666bba2bab70d2a8b79f2c6d2c1a42a7f7e2b0ec83bb2f9e383950af"}, - {file = "wrapt-1.14.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:01c205616a89d09827986bc4e859bcabd64f5a0662a7fe95e0d359424e0e071b"}, - {file = "wrapt-1.14.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:5a0f54ce2c092aaf439813735584b9537cad479575a09892b8352fea5e988dc0"}, - {file = "wrapt-1.14.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2cf71233a0ed05ccdabe209c606fe0bac7379fdcf687f39b944420d2a09fdb57"}, - {file = "wrapt-1.14.1-cp38-cp38-win32.whl", hash = "sha256:aa31fdcc33fef9eb2552cbcbfee7773d5a6792c137b359e82879c101e98584c5"}, - {file = "wrapt-1.14.1-cp38-cp38-win_amd64.whl", hash = "sha256:d1967f46ea8f2db647c786e78d8cc7e4313dbd1b0aca360592d8027b8508e24d"}, - {file = "wrapt-1.14.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3232822c7d98d23895ccc443bbdf57c7412c5a65996c30442ebe6ed3df335383"}, - {file = "wrapt-1.14.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:988635d122aaf2bdcef9e795435662bcd65b02f4f4c1ae37fbee7401c440b3a7"}, - {file = "wrapt-1.14.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cca3c2cdadb362116235fdbd411735de4328c61425b0aa9f872fd76d02c4e86"}, - {file = "wrapt-1.14.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d52a25136894c63de15a35bc0bdc5adb4b0e173b9c0d07a2be9d3ca64a332735"}, - {file = "wrapt-1.14.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40e7bc81c9e2b2734ea4bc1aceb8a8f0ceaac7c5299bc5d69e37c44d9081d43b"}, - {file = "wrapt-1.14.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b9b7a708dd92306328117d8c4b62e2194d00c365f18eff11a9b53c6f923b01e3"}, - {file = "wrapt-1.14.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:6a9a25751acb379b466ff6be78a315e2b439d4c94c1e99cb7266d40a537995d3"}, - {file = "wrapt-1.14.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:34aa51c45f28ba7f12accd624225e2b1e5a3a45206aa191f6f9aac931d9d56fe"}, - {file = "wrapt-1.14.1-cp39-cp39-win32.whl", hash = "sha256:dee0ce50c6a2dd9056c20db781e9c1cfd33e77d2d569f5d1d9321c641bb903d5"}, - {file = "wrapt-1.14.1-cp39-cp39-win_amd64.whl", hash = "sha256:dee60e1de1898bde3b238f18340eec6148986da0455d8ba7848d50470a7a32fb"}, - {file = "wrapt-1.14.1.tar.gz", hash = "sha256:380a85cf89e0e69b7cfbe2ea9f765f004ff419f34194018a6827ac0e3edfed4d"}, -] - [[package]] name = "xxhash" version = "3.2.0" @@ -4059,4 +3813,4 @@ sklearn = ["scikit-learn", "joblib"] [metadata] lock-version = "2.0" python-versions = ">=3.8.1,<3.12" -content-hash = "1ee1c300a2ed497d53e839292edb3b2581a9cd4f61365838e41f3968108d1164" +content-hash = "4cb2321ea8dddf031061bc4c37a04b2262100817bbf95f199c2800bcffeca06b" diff --git a/pyproject.toml b/pyproject.toml index 9b7bec25..514f1247 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -87,13 +87,10 @@ bandit = "^1.7.4" safety = "^2.2.0" black = "^22.6.0" darglint = "^1.8.1" -flake8 = "^6.0.0" -isort = {extras = ["colors"], version = "^5.10.1"} pre-commit = "^2.19.0" -pylint = "^2.15.7" -pyupgrade = "^2.34.0" pytest = "^7.2.0" pytest-cov = "^4.0.0" +ruff = "^0.0.227" [tool.poetry.extras] sklearn = ["scikit-learn", "joblib"] @@ -108,6 +105,7 @@ torch-cuda11 = "python -m pip install torch --extra-index-url https://download.p [tool.black] # https://github.com/psf/black target-version = ["py38"] +preview = true line-length = 119 color = true @@ -127,49 +125,11 @@ exclude = ''' )/ ''' -[tool.isort] -# https://github.com/timothycrosley/isort/ -py_version = 38 -line_length = 119 - -known_typing = ["typing", "types", "typing_extensions", "mypy", "mypy_extensions"] -sections = ["FUTURE", "TYPING", "STDLIB", "THIRDPARTY", "FIRSTPARTY", "LOCALFOLDER"] -include_trailing_comma = true -profile = "black" -multi_line_output = 3 -indent = 4 -color_output = true -lines_after_imports = 2 -use_parentheses = true -known_first_party = "inseq" -force_grid_wrap = 0 -ensure_newline_before_comments = true [tool.mypy] # https://mypy.readthedocs.io/en/latest/config_file.html#using-a-pyproject-toml-file python_version = 3.8 -pretty = true -show_traceback = true -color_output = true - -allow_redefinition = false -check_untyped_defs = true -disallow_any_generics = true -disallow_incomplete_defs = true -ignore_missing_imports = true -implicit_reexport = false -no_implicit_optional = true -show_column_numbers = true -show_error_codes = true -show_error_context = true -strict_equality = true -strict_optional = true -warn_no_return = true -warn_redundant_casts = true -warn_return_any = true -warn_unreachable = true -warn_unused_configs = true -warn_unused_ignores = true +strict = true [tool.pytest.ini_options] @@ -194,3 +154,82 @@ addopts = [ "--disable-pytest-warnings", "--color=yes" ] + +[tool.coverage.run] +parallel = true +source = [ + "inseq", +] +omit = [ + "*/__main__.py", +] + +[tool.coverage.report] +exclude_lines = [ + "pragma: no-cover", + "pass", + "raise", + "except", + "raise NotImplementedError", +] + +[tool.bandit] +targets = ["inseq"] +skips = ["B301"] + +[tool.ruff] +target-version = "py38" +exclude = [ + ".git", + ".vscode", + ".github", + "__pycache__", + "docs/source/conf.py", + "old", + "build", + "htmlcov", + "dev_examples", + "dist", + ".tox", + "temp", + "*.egg", + "venv", + ".venv", +] +fix = true +ignore = [ + "C901", + "E731", + "E741", + "F821", + "C901", + "B006", + "PLR2004" +] +line-length = 119 +select = [ + "E", # pycodestyle errors + "F", # pyflakes + "W", # pycodestyle warnings + "C", # flake8-comprehensions + "B", # flake8-bugbear + "Q", # flake8-quotes + "I", # isort + "UP", # flake8-pyupgrade + "PLC", # flake8-pylint + "PLE", # flake8-pylint + "PLR", # flake8-pylint + "PLW", # flake8-pylint +] +src = ["inseq", "tests"] + + +[tool.ruff.per-file-ignores] +"__init__.py" = ["F401"] + +[tool.ruff.isort] +known-first-party = ["inseq"] +order-by-type = true + +[tool.ruff.pyupgrade] +keep-runtime-typing = true diff --git a/requirements-dev.txt b/requirements-dev.txt index 15159370..9c70cbe6 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -1,7 +1,6 @@ aiohttp==3.7.4.post0 ; python_full_version >= "3.8.1" and python_version < "3.12" alabaster==0.7.13 ; python_full_version >= "3.8.1" and python_version < "3.12" appnope==0.1.3 ; python_full_version >= "3.8.1" and python_version < "3.12" and platform_system == "Darwin" or python_full_version >= "3.8.1" and python_version < "3.12" and sys_platform == "darwin" -astroid==2.13.2 ; python_full_version >= "3.8.1" and python_version < "3.12" asttokens==2.2.1 ; python_full_version >= "3.8.1" and python_version < "3.12" async-timeout==3.0.1 ; python_full_version >= "3.8.1" and python_version < "3.12" attrs==22.2.0 ; python_full_version >= "3.8.1" and python_version < "3.12" @@ -26,7 +25,7 @@ darglint==1.8.1 ; python_full_version >= "3.8.1" and python_version < "3.12" datasets[datasets]==2.8.0 ; python_full_version >= "3.8.1" and python_version < "3.12" debugpy==1.6.5 ; python_full_version >= "3.8.1" and python_version < "3.12" decorator==5.1.1 ; python_full_version >= "3.8.1" and python_version < "3.12" -dill==0.3.6 ; python_version < "3.12" and python_full_version >= "3.8.1" +dill==0.3.6 ; python_full_version >= "3.8.1" and python_version < "3.12" distlib==0.3.6 ; python_full_version >= "3.8.1" and python_version < "3.12" docutils==0.17.1 ; python_full_version >= "3.8.1" and python_version < "3.12" dparse==0.6.2 ; python_full_version >= "3.8.1" and python_version < "3.12" @@ -34,9 +33,8 @@ entrypoints==0.4 ; python_full_version >= "3.8.1" and python_version < "3.12" exceptiongroup==1.1.0 ; python_full_version >= "3.8.1" and python_version < "3.11" executing==1.2.0 ; python_full_version >= "3.8.1" and python_version < "3.12" filelock==3.9.0 ; python_full_version >= "3.8.1" and python_version < "3.12" -flake8==6.0.0 ; python_full_version >= "3.8.1" and python_version < "3.12" fonttools==4.38.0 ; python_full_version >= "3.8.1" and python_version < "3.12" -fsspec[http]==2022.11.0 ; python_full_version >= "3.8.1" and python_version < "3.12" +fsspec[http]==2023.1.0 ; python_full_version >= "3.8.1" and python_version < "3.12" gitdb==4.0.10 ; python_full_version >= "3.8.1" and python_version < "3.12" gitpython==3.1.30 ; python_full_version >= "3.8.1" and python_version < "3.12" huggingface-hub==0.11.1 ; python_full_version >= "3.8.1" and python_version < "3.12" @@ -49,8 +47,6 @@ ipykernel==6.20.2 ; python_full_version >= "3.8.1" and python_version < "3.12" ipykernel[notebook]==6.20.2 ; python_full_version >= "3.8.1" and python_version < "3.12" ipython==8.8.0 ; python_full_version >= "3.8.1" and python_version < "3.12" ipywidgets[notebook]==8.0.4 ; python_full_version >= "3.8.1" and python_version < "3.12" -isort==5.11.4 ; python_full_version >= "3.8.1" and python_version < "3.12" -isort[colors]==5.11.4 ; python_full_version >= "3.8.1" and python_version < "3.12" jedi==0.18.2 ; python_full_version >= "3.8.1" and python_version < "3.12" jinja2==3.1.2 ; python_full_version >= "3.8.1" and python_version < "3.12" joblib==1.2.0 ; python_full_version >= "3.8.1" and python_version < "3.12" @@ -59,12 +55,10 @@ jupyter-client==7.4.9 ; python_full_version >= "3.8.1" and python_version < "3.1 jupyter-core==5.1.3 ; python_full_version >= "3.8.1" and python_version < "3.12" jupyterlab-widgets==3.0.5 ; python_full_version >= "3.8.1" and python_version < "3.12" kiwisolver==1.4.4 ; python_full_version >= "3.8.1" and python_version < "3.12" -lazy-object-proxy==1.9.0 ; python_full_version >= "3.8.1" and python_version < "3.12" markdown==3.4.1 ; python_full_version >= "3.8.1" and python_version < "3.12" markupsafe==2.1.2 ; python_full_version >= "3.8.1" and python_version < "3.12" matplotlib-inline==0.1.6 ; python_full_version >= "3.8.1" and python_version < "3.12" matplotlib==3.6.3 ; python_full_version >= "3.8.1" and python_version < "3.12" -mccabe==0.7.0 ; python_full_version >= "3.8.1" and python_version < "3.12" multidict==6.0.4 ; python_full_version >= "3.8.1" and python_version < "3.12" multiprocess==0.70.14 ; python_full_version >= "3.8.1" and python_version < "3.12" mypy-extensions==0.4.3 ; python_full_version >= "3.8.1" and python_version < "3.12" @@ -94,17 +88,13 @@ psutil==5.9.4 ; python_full_version >= "3.8.1" and python_version < "3.12" ptyprocess==0.7.0 ; python_full_version >= "3.8.1" and python_version < "3.12" and sys_platform != "win32" pure-eval==0.2.2 ; python_full_version >= "3.8.1" and python_version < "3.12" pyarrow==10.0.1 ; python_full_version >= "3.8.1" and python_version < "3.12" -pycodestyle==2.10.0 ; python_full_version >= "3.8.1" and python_version < "3.12" pycparser==2.21 ; python_full_version >= "3.8.1" and python_version < "3.12" and implementation_name == "pypy" -pyflakes==3.0.1 ; python_full_version >= "3.8.1" and python_version < "3.12" pygments==2.14.0 ; python_full_version >= "3.8.1" and python_version < "3.12" -pylint==2.15.10 ; python_full_version >= "3.8.1" and python_version < "3.12" pyparsing==3.0.9 ; python_full_version >= "3.8.1" and python_version < "3.12" pytest-cov==4.0.0 ; python_full_version >= "3.8.1" and python_version < "3.12" pytest==7.2.1 ; python_full_version >= "3.8.1" and python_version < "3.12" python-dateutil==2.8.2 ; python_full_version >= "3.8.1" and python_version < "3.12" pytz==2022.7.1 ; python_full_version >= "3.8.1" and python_version < "3.12" -pyupgrade==2.38.4 ; python_full_version >= "3.8.1" and python_version < "3.12" pywin32==305 ; sys_platform == "win32" and platform_python_implementation != "PyPy" and python_full_version >= "3.8.1" and python_version < "3.12" pyyaml==6.0 ; python_full_version >= "3.8.1" and python_version < "3.12" pyzmq==25.0.0 ; python_full_version >= "3.8.1" and python_version < "3.12" @@ -115,6 +105,7 @@ responses==0.18.0 ; python_full_version >= "3.8.1" and python_version < "3.12" rich==10.16.2 ; python_full_version >= "3.8.1" and python_version < "3.12" ruamel-yaml-clib==0.2.7 ; platform_python_implementation == "CPython" and python_version < "3.11" and python_full_version >= "3.8.1" ruamel-yaml==0.17.21 ; python_full_version >= "3.8.1" and python_version < "3.12" +ruff==0.0.227 ; python_full_version >= "3.8.1" and python_version < "3.12" safety==2.3.4 ; python_full_version >= "3.8.1" and python_version < "3.12" scikit-learn[sklearn]==1.2.0 ; python_full_version >= "3.8.1" and python_version < "3.12" scipy==1.10.0 ; python_full_version >= "3.8.1" and python_version < "3.12" @@ -139,11 +130,9 @@ sphinxext-opengraph==0.4.2 ; python_full_version >= "3.8.1" and python_version < stack-data==0.6.2 ; python_full_version >= "3.8.1" and python_version < "3.12" stevedore==4.1.1 ; python_full_version >= "3.8.1" and python_version < "3.12" threadpoolctl==3.1.0 ; python_full_version >= "3.8.1" and python_version < "3.12" -tokenize-rt==4.2.1 ; python_full_version >= "3.8.1" and python_version < "3.12" tokenizers==0.13.2 ; python_full_version >= "3.8.1" and python_version < "3.12" toml==0.10.2 ; python_full_version >= "3.8.1" and python_version < "3.12" tomli==2.0.1 ; python_full_version >= "3.8.1" and python_version < "3.12" -tomlkit==0.11.6 ; python_full_version >= "3.8.1" and python_version < "3.12" torch==1.13.1 ; python_full_version >= "3.8.1" and python_version < "3.12" torchtyping==0.1.4 ; python_full_version >= "3.8.1" and python_version < "3.12" tornado==6.2 ; python_full_version >= "3.8.1" and python_version < "3.12" @@ -157,7 +146,6 @@ virtualenv==20.17.1 ; python_full_version >= "3.8.1" and python_version < "3.12" wcwidth==0.2.6 ; python_full_version >= "3.8.1" and python_version < "3.12" wheel==0.38.4 ; python_full_version >= "3.8.1" and python_version < "3.12" and platform_system == "Linux" widgetsnbextension==4.0.5 ; python_full_version >= "3.8.1" and python_version < "3.12" -wrapt==1.14.1 ; python_full_version >= "3.8.1" and python_version < "3.12" xxhash==3.2.0 ; python_full_version >= "3.8.1" and python_version < "3.12" yarl==1.8.2 ; python_full_version >= "3.8.1" and python_version < "3.12" zipp==3.11.0 ; python_full_version >= "3.8.1" and python_version < "3.10" diff --git a/setup.cfg b/setup.cfg index 1f8b1526..3c46a08c 100644 --- a/setup.cfg +++ b/setup.cfg @@ -2,50 +2,3 @@ # https://github.com/terrencepreilly/darglint strictness = long docstring_style = google - -[coverage:report] -exclude_lines = - pragma: no-cover - pass - raise - except - raise NotImplementedError - -[coverage:run] -source=inseq -omit = - */__main__.py - -[flake8] -exclude = - .git, - .vscode, - .github, - __pycache__, - docs/source/conf.py, - old, - build, - htmlcov, - examples, - dist, - .tox, - temp, - *.egg, - venv, - .venv - dev_examples -select = E,W,F -doctests = False -verbose = 2 -format = pylint -max-line-length = 119 -per-file-ignores = __init__.py:F401 -ignore= E203, E731, E741, F821, W503, C901 - -[metadata] -license_file = LICENSE -description-file = README.md - -[bandit] -targets = ["inseq"] -skips = ["B301"] diff --git a/tests/__init__.py b/tests/__init__.py index 5e69ae86..2923b7b7 100644 --- a/tests/__init__.py +++ b/tests/__init__.py @@ -1,4 +1,3 @@ import os - TEST_DIR = os.path.dirname(os.path.abspath(__file__)) diff --git a/tests/attr/feat/ops/test_attention_utils.py b/tests/attr/feat/ops/test_attention_utils.py index ca3af3b6..b0c3d55d 100644 --- a/tests/attr/feat/ops/test_attention_utils.py +++ b/tests/attr/feat/ops/test_attention_utils.py @@ -5,7 +5,6 @@ from inseq.attr.feat.ops.basic_attention import BaseAttentionAttribution - AGGREGATE_FN_OPTIONS = list(BaseAttentionAttribution.AGGREGATE_FN_OPTIONS.keys()) + [None] AGGREGATE_OPTIONS = ["int", "range", "list", "none"] @@ -14,15 +13,14 @@ @mark.parametrize("aggr_method", AGGREGATE_FN_OPTIONS) @mark.parametrize("aggr_layers", AGGREGATE_OPTIONS) def test_layer_aggregation(aggr_method: str, aggr_layers: str) -> None: - - layerAttention = tuple() + layerAttention = () shape = (5, 8, 7, 7) layers = 0 max_layer = random.randint(4, 10) - for i in range(max_layer): + for _ in range(max_layer): attention = torch.rand(size=shape, dtype=torch.float) layerAttention = layerAttention + (attention,) @@ -49,7 +47,6 @@ def test_layer_aggregation(aggr_method: str, aggr_layers: str) -> None: @mark.parametrize("aggr_method", AGGREGATE_FN_OPTIONS) @mark.parametrize("aggr_heads", AGGREGATE_OPTIONS) def test_head_aggregation(aggr_method: str, aggr_heads: str) -> None: - num_heads = random.randint(4, 12) in_shape = (5, num_heads, 7, 7) diff --git a/tests/attr/feat/ops/test_monotonic_path_builder.py b/tests/attr/feat/ops/test_monotonic_path_builder.py index 4550fc4d..802317c8 100644 --- a/tests/attr/feat/ops/test_monotonic_path_builder.py +++ b/tests/attr/feat/ops/test_monotonic_path_builder.py @@ -1,10 +1,8 @@ -from typing import List, Tuple - from itertools import islice +from typing import List, Tuple from inseq.utils import is_joblib_available - if is_joblib_available(): from joblib import Parallel, delayed @@ -60,7 +58,7 @@ def walrus_operator_find_word_path(wrd_idx: int, n_steps: int): @pytest.mark.parametrize( - ("input_dims"), + "input_dims", [ ((512,)), ((512, 12)), @@ -106,7 +104,7 @@ def test_walrus_find_word_path(wrd_idx: int, n_steps: int) -> None: @pytest.mark.slow @pytest.mark.parametrize( - ("word_idx"), + "word_idx", [(0), (1), (735), (111), (10296)], ) def test_scaled_monotonic_path_embeddings(word_idx: int, dig_model) -> None: @@ -122,7 +120,7 @@ def test_scaled_monotonic_path_embeddings(word_idx: int, dig_model) -> None: reason="joblib is not available", ) @pytest.mark.parametrize( - ("ids"), + "ids", [ ( [ diff --git a/tests/data/test_aggregator.py b/tests/data/test_aggregator.py index 5eb24fb5..e7c35576 100644 --- a/tests/data/test_aggregator.py +++ b/tests/data/test_aggregator.py @@ -13,7 +13,6 @@ SubwordAggregator, ) - EXAMPLES_FILE = os.path.join(os.path.dirname(os.path.abspath(__file__)), "../fixtures/aggregator.json") EXAMPLES = json.load(open(EXAMPLES_FILE)) diff --git a/tests/models/test_huggingface_model.py b/tests/models/test_huggingface_model.py index d53bc399..8ec8c70a 100644 --- a/tests/models/test_huggingface_model.py +++ b/tests/models/test_huggingface_model.py @@ -15,7 +15,6 @@ from inseq.data import FeatureAttributionOutput, FeatureAttributionSequenceOutput from inseq.utils import get_default_device - EXAMPLES_FILE = os.path.join(os.path.dirname(os.path.abspath(__file__)), "../fixtures/huggingface_model.json") EXAMPLES = json.load(open(EXAMPLES_FILE))