Skip to content

Commit

Permalink
ci: fix deprecated type hints reported by ruff check (#535)
Browse files Browse the repository at this point in the history
* ci: fix deprecated type hints reported by `ruff check`

* ci: update python matrix to remove end-of-life versions and add currently supported one

* ci: remove Python 3.13 from version matrix
  • Loading branch information
lvaylet authored Dec 11, 2024
1 parent 868c5c6 commit b1200b3
Show file tree
Hide file tree
Showing 6 changed files with 24 additions and 25 deletions.
4 changes: 2 additions & 2 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ jobs:
matrix:
os: [ubuntu-latest]
architecture: ['x64']
python-version: ['3.8', '3.9', '3.10', '3.11']
python-version: ['3.9', '3.10', '3.11', '3.12']
runs-on: ${{ matrix.os }}
steps:
- uses: actions/checkout@v4
Expand All @@ -31,7 +31,7 @@ jobs:
matrix:
os: [ubuntu-latest]
architecture: ['x64']
python-version: ['3.8', '3.9', '3.10', '3.11']
python-version: ['3.9', '3.10', '3.11', '3.12']
runs-on: ${{ matrix.os }}
steps:
- uses: actions/checkout@v4
Expand Down
22 changes: 11 additions & 11 deletions slo_generator/backends/cloud_monitoring_mql.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@
import warnings
from collections import OrderedDict
from datetime import datetime, timezone
from typing import List, Optional, Tuple
from typing import Optional

from google.api.distribution_pb2 import Distribution
from google.cloud.monitoring_v3 import QueryTimeSeriesRequest
Expand Down Expand Up @@ -60,7 +60,7 @@ def good_bad_ratio(
timestamp: int,
window: int,
slo_config: dict,
) -> Tuple[int, int]:
) -> tuple[int, int]:
"""Query two timeseries, one containing 'good' events, one containing
'bad' events.
Expand All @@ -78,16 +78,16 @@ def good_bad_ratio(
filter_valid: Optional[str] = measurement.get("filter_valid")

# Query 'good events' timeseries
good_ts: List[TimeSeries] = self.query(timestamp, window, filter_good)
good_ts: list[TimeSeries] = self.query(timestamp, window, filter_good)
good_event_count: int = CM.count(good_ts)

# Query 'bad events' timeseries
bad_event_count: int
if filter_bad:
bad_ts: List[TimeSeries] = self.query(timestamp, window, filter_bad)
bad_ts: list[TimeSeries] = self.query(timestamp, window, filter_bad)
bad_event_count = CM.count(bad_ts)
elif filter_valid:
valid_ts: List[TimeSeries] = self.query(timestamp, window, filter_valid)
valid_ts: list[TimeSeries] = self.query(timestamp, window, filter_valid)
bad_event_count = CM.count(valid_ts) - good_event_count
else:
raise ValueError("One of `filter_bad` or `filter_valid` is required.")
Expand All @@ -103,7 +103,7 @@ def distribution_cut(
timestamp: int,
window: int,
slo_config: dict,
) -> Tuple[int, int]:
) -> tuple[int, int]:
"""Query one timeseries of type 'exponential'.
Args:
Expand Down Expand Up @@ -162,7 +162,7 @@ def distribution_cut(

return good_event_count, bad_event_count

def exponential_distribution_cut(self, *args, **kwargs) -> Tuple[int, int]:
def exponential_distribution_cut(self, *args, **kwargs) -> tuple[int, int]:
"""Alias for `distribution_cut` method to allow for backwards
compatibility.
"""
Expand Down Expand Up @@ -192,12 +192,12 @@ def query_sli(
"""
measurement: dict = slo_config["spec"]["service_level_indicator"]
query: str = measurement["query"]
series: List[TimeSeries] = self.query(timestamp, window, query)
series: list[TimeSeries] = self.query(timestamp, window, query)
sli_value: float = series[0].point_data[0].values[0].double_value
LOGGER.debug(f"SLI value: {sli_value}")
return sli_value

def query(self, timestamp: float, window: int, query: str) -> List[TimeSeries]:
def query(self, timestamp: float, window: int, query: str) -> list[TimeSeries]:
"""Query timeseries from Cloud Monitoring using MQL.
Args:
Expand All @@ -219,7 +219,7 @@ def query(self, timestamp: float, window: int, query: str) -> List[TimeSeries]:
self.client.query_time_series(request) # type: ignore[union-attr]
)
# fmt: on
timeseries: List[TimeSeries] = list(timeseries_pager)
timeseries: list[TimeSeries] = list(timeseries_pager)
LOGGER.debug(pprint.pformat(timeseries))
return timeseries

Expand Down Expand Up @@ -251,7 +251,7 @@ def enrich_query_with_time_horizon_and_period(
return query_with_time_horizon_and_period

@staticmethod
def count(timeseries: List[TimeSeries]) -> int:
def count(timeseries: list[TimeSeries]) -> int:
"""Count events in time series assuming it was aligned with ALIGN_SUM
and reduced with REDUCE_SUM (default).
Expand Down
3 changes: 2 additions & 1 deletion slo_generator/backends/cloud_service_monitoring.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,8 @@
import logging
import os
import warnings
from typing import Optional, Sequence, Union
from collections.abc import Sequence
from typing import Optional, Union

import google.api_core.exceptions
from google.cloud.monitoring_v3 import ServiceMonitoringServiceClient
Expand Down
8 changes: 4 additions & 4 deletions slo_generator/backends/prometheus.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@
import logging
import os
import pprint
from typing import Dict, List, Optional, Tuple, Union
from typing import Optional, Union

from prometheus_http_client import Prometheus

Expand Down Expand Up @@ -99,7 +99,7 @@ def good_bad_ratio(self, timestamp, window, slo_config):

def distribution_cut(
self, timestamp: int, window: int, slo_config: dict
) -> Tuple[float, float]:
) -> tuple[float, float]:
"""Query events for distributions (histograms).
Args:
Expand Down Expand Up @@ -189,8 +189,8 @@ def count(response: dict) -> float:
def _fmt_query(
query: str,
window: int,
operators: Union[List[str], None] = None,
labels: Union[Dict[str, str], None] = None,
operators: Union[list[str], None] = None,
labels: Union[dict[str, str], None] = None,
) -> str:
"""Format Prometheus query:
Expand Down
9 changes: 4 additions & 5 deletions slo_generator/constants.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,6 @@
"""

import os
from typing import Dict, List, Tuple

# Compute
NO_DATA: int = -1
Expand All @@ -30,7 +29,7 @@
DEBUG: int = int(os.environ.get("DEBUG", "0"))

# Exporters supporting v2 SLO report format
V2_EXPORTERS: Tuple[str, ...] = ("Pubsub", "Cloudevent")
V2_EXPORTERS: tuple[str, ...] = ("Pubsub", "Cloudevent")

# Config skeletons
CONFIG_SCHEMA: dict = {
Expand All @@ -53,15 +52,15 @@

# Providers that have changed with v2 YAML config format. This mapping helps
# migrate them to their updated names.
PROVIDERS_COMPAT: Dict[str, str] = {
PROVIDERS_COMPAT: dict[str, str] = {
"Stackdriver": "CloudMonitoring",
"StackdriverServiceMonitoring": "CloudServiceMonitoring",
}

# Fields that have changed name with v2 YAML config format. This mapping helps
# migrate them back to their former name, so that exporters are backward-
# compatible with v1.
METRIC_LABELS_COMPAT: Dict[str, str] = {
METRIC_LABELS_COMPAT: dict[str, str] = {
"goal": "slo_target",
"description": "slo_description",
"error_budget_burn_rate_threshold": "alerting_burn_rate_threshold",
Expand All @@ -70,7 +69,7 @@
# Fields that used to be specified in top-level of YAML config are now specified
# in metadata fields. This mapping helps migrate them back to the top level when
# exporting reports, so that exporters are backward-compatible with v1.
METRIC_METADATA_LABELS_TOP_COMPAT: List[str] = [
METRIC_METADATA_LABELS_TOP_COMPAT: list[str] = [
"service_name",
"feature_name",
"slo_name",
Expand Down
3 changes: 1 addition & 2 deletions slo_generator/report.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,6 @@

import logging
from dataclasses import asdict, dataclass, field, fields
from typing import List

from slo_generator import utils
from slo_generator.constants import COLORED_OUTPUT, MIN_VALID_EVENTS, NO_DATA, Colors
Expand Down Expand Up @@ -85,7 +84,7 @@ class SLOReport:
metadata: dict = field(default_factory=dict)

# Data validation
errors: List[str] = field(default_factory=list)
errors: list[str] = field(default_factory=list)

def __init__(self, config, backend, step, timestamp, client=None, delete=False): # noqa: PLR0913
# Init dataclass fields from SLO config and Error Budget Policy
Expand Down

0 comments on commit b1200b3

Please sign in to comment.