Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Run pyupgrade for 3.8 #3236

Merged
merged 3 commits into from
May 23, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 1 addition & 6 deletions redis/__init__.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
import sys
from importlib import metadata

from redis import asyncio # noqa
from redis.backoff import default_backoff
Expand Down Expand Up @@ -36,11 +36,6 @@
)
from redis.utils import from_url

if sys.version_info >= (3, 8):
from importlib import metadata
else:
import importlib_metadata as metadata


def int_or_str(value):
try:
Expand Down
2 changes: 1 addition & 1 deletion redis/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -831,7 +831,7 @@ def clean_health_check_responses(self) -> None:
else:
raise PubSubError(
"A non health check response was cleaned by "
"execute_command: {0}".format(response)
"execute_command: {}".format(response)
)
ttl -= 1

Expand Down
3 changes: 1 addition & 2 deletions redis/cluster.py
Original file line number Diff line number Diff line change
Expand Up @@ -1851,8 +1851,7 @@ def _sharded_message_generator(self):

def _pubsubs_generator(self):
while True:
for pubsub in self.node_pubsub_mapping.values():
yield pubsub
yield from self.node_pubsub_mapping.values()

def get_sharded_message(
self, ignore_subscribe_messages=False, timeout=0.0, target_node=None
Expand Down
2 changes: 1 addition & 1 deletion redis/commands/bf/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
from .info import BFInfo, CFInfo, CMSInfo, TDigestInfo, TopKInfo


class AbstractBloom(object):
class AbstractBloom:
"""
The client allows to interact with RedisBloom and use all of
it's functionality.
Expand Down
10 changes: 5 additions & 5 deletions redis/commands/bf/info.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
from ..helpers import nativestr


class BFInfo(object):
class BFInfo:
capacity = None
size = None
filterNum = None
Expand All @@ -26,7 +26,7 @@ def __getitem__(self, item):
return getattr(self, item)


class CFInfo(object):
class CFInfo:
size = None
bucketNum = None
filterNum = None
Expand Down Expand Up @@ -57,7 +57,7 @@ def __getitem__(self, item):
return getattr(self, item)


class CMSInfo(object):
class CMSInfo:
width = None
depth = None
count = None
Expand All @@ -72,7 +72,7 @@ def __getitem__(self, item):
return getattr(self, item)


class TopKInfo(object):
class TopKInfo:
k = None
width = None
depth = None
Expand All @@ -89,7 +89,7 @@ def __getitem__(self, item):
return getattr(self, item)


class TDigestInfo(object):
class TDigestInfo:
compression = None
capacity = None
merged_nodes = None
Expand Down
2 changes: 1 addition & 1 deletion redis/commands/graph/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -252,7 +252,7 @@ async def call_procedure(self, procedure, *args, read_only=False, **kwagrs):
return await self.query(q, read_only=read_only)

async def labels(self):
return ((await self.call_procedure(DB_LABELS, read_only=True))).result_set
return (await self.call_procedure(DB_LABELS, read_only=True)).result_set

async def property_keys(self):
return (await self.call_procedure(DB_PROPERTYKEYS, read_only=True)).result_set
Expand Down
2 changes: 1 addition & 1 deletion redis/commands/json/commands.py
Original file line number Diff line number Diff line change
Expand Up @@ -314,7 +314,7 @@ def set_file(

"""

with open(file_name, "r") as fp:
with open(file_name) as fp:
file_content = loads(fp.read())

return self.set(name, path, file_content, nx=nx, xx=xx, decode_keys=decode_keys)
Expand Down
6 changes: 1 addition & 5 deletions redis/utils.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
import logging
import sys
from contextlib import contextmanager
from functools import wraps
from typing import Any, Dict, Mapping, Union
Expand Down Expand Up @@ -28,10 +27,7 @@
except ImportError:
CRYPTOGRAPHY_AVAILABLE = False

if sys.version_info >= (3, 8):
from importlib import metadata
else:
import importlib_metadata as metadata
from importlib import metadata


def from_url(url, **kwargs):
Expand Down
2 changes: 1 addition & 1 deletion tests/ssl_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,6 @@ def get_ssl_filename(name):
os.path.join(root, "..", "dockers", "stunnel", "keys")
)
if not os.path.isdir(cert_dir):
raise IOError(f"No SSL certificates found. They should be in {cert_dir}")
raise OSError(f"No SSL certificates found. They should be in {cert_dir}")

return os.path.join(cert_dir, name)
2 changes: 1 addition & 1 deletion tests/test_asyncio/compat.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
try:
mock.AsyncMock
except AttributeError:
import mock
from unittest import mock

try:
from contextlib import aclosing
Expand Down
8 changes: 4 additions & 4 deletions tests/test_asyncio/test_search.py
Original file line number Diff line number Diff line change
Expand Up @@ -1487,14 +1487,14 @@ async def test_withsuffixtrie(decoded_r: redis.Redis):
assert await decoded_r.ft().dropindex("idx")

# create withsuffixtrie index (text field)
assert await decoded_r.ft().create_index((TextField("t", withsuffixtrie=True)))
assert await decoded_r.ft().create_index(TextField("t", withsuffixtrie=True))
await waitForIndex(decoded_r, getattr(decoded_r.ft(), "index_name", "idx"))
info = await decoded_r.ft().info()
assert "WITHSUFFIXTRIE" in info["attributes"][0]
assert await decoded_r.ft().dropindex("idx")

# create withsuffixtrie index (tag field)
assert await decoded_r.ft().create_index((TagField("t", withsuffixtrie=True)))
assert await decoded_r.ft().create_index(TagField("t", withsuffixtrie=True))
await waitForIndex(decoded_r, getattr(decoded_r.ft(), "index_name", "idx"))
info = await decoded_r.ft().info()
assert "WITHSUFFIXTRIE" in info["attributes"][0]
Expand All @@ -1504,14 +1504,14 @@ async def test_withsuffixtrie(decoded_r: redis.Redis):
assert await decoded_r.ft().dropindex("idx")

# create withsuffixtrie index (text fields)
assert await decoded_r.ft().create_index((TextField("t", withsuffixtrie=True)))
assert await decoded_r.ft().create_index(TextField("t", withsuffixtrie=True))
waitForIndex(decoded_r, getattr(decoded_r.ft(), "index_name", "idx"))
info = await decoded_r.ft().info()
assert "WITHSUFFIXTRIE" in info["attributes"][0]["flags"]
assert await decoded_r.ft().dropindex("idx")

# create withsuffixtrie index (tag field)
assert await decoded_r.ft().create_index((TagField("t", withsuffixtrie=True)))
assert await decoded_r.ft().create_index(TagField("t", withsuffixtrie=True))
waitForIndex(decoded_r, getattr(decoded_r.ft(), "index_name", "idx"))
info = await decoded_r.ft().info()
assert "WITHSUFFIXTRIE" in info["attributes"][0]["flags"]
Expand Down
28 changes: 13 additions & 15 deletions tests/test_search.py
Original file line number Diff line number Diff line change
Expand Up @@ -1383,16 +1383,14 @@ def test_aggregations_apply(client):
)
res = client.ft().aggregate(req)
if is_resp2_connection(client):
res_set = set([res.rows[0][1], res.rows[1][1]])
assert res_set == set(["6373878785249699840", "6373878758592700416"])
res_set = {res.rows[0][1], res.rows[1][1]}
assert res_set == {"6373878785249699840", "6373878758592700416"}
else:
res_set = set(
[
res["results"][0]["extra_attributes"]["CreatedDateTimeUTC"],
res["results"][1]["extra_attributes"]["CreatedDateTimeUTC"],
],
)
assert res_set == set(["6373878785249699840", "6373878758592700416"])
res_set = {
res["results"][0]["extra_attributes"]["CreatedDateTimeUTC"],
res["results"][1]["extra_attributes"]["CreatedDateTimeUTC"],
}
assert res_set == {"6373878785249699840", "6373878758592700416"}


@pytest.mark.redismod
Expand Down Expand Up @@ -2099,7 +2097,7 @@ def test_numeric_params(client):
@pytest.mark.redismod
@skip_ifmodversion_lt("2.4.3", "search")
def test_geo_params(client):
client.ft().create_index((GeoField("g")))
client.ft().create_index(GeoField("g"))
client.hset("doc1", mapping={"g": "29.69465, 34.95126"})
client.hset("doc2", mapping={"g": "29.69350, 34.94737"})
client.hset("doc3", mapping={"g": "29.68746, 34.94882"})
Expand Down Expand Up @@ -2228,14 +2226,14 @@ def test_withsuffixtrie(client: redis.Redis):
assert client.ft().dropindex("idx")

# create withsuffixtrie index (text fields)
assert client.ft().create_index((TextField("t", withsuffixtrie=True)))
assert client.ft().create_index(TextField("t", withsuffixtrie=True))
waitForIndex(client, getattr(client.ft(), "index_name", "idx"))
info = client.ft().info()
assert "WITHSUFFIXTRIE" in info["attributes"][0]
assert client.ft().dropindex("idx")

# create withsuffixtrie index (tag field)
assert client.ft().create_index((TagField("t", withsuffixtrie=True)))
assert client.ft().create_index(TagField("t", withsuffixtrie=True))
waitForIndex(client, getattr(client.ft(), "index_name", "idx"))
info = client.ft().info()
assert "WITHSUFFIXTRIE" in info["attributes"][0]
Expand All @@ -2245,14 +2243,14 @@ def test_withsuffixtrie(client: redis.Redis):
assert client.ft().dropindex("idx")

# create withsuffixtrie index (text fields)
assert client.ft().create_index((TextField("t", withsuffixtrie=True)))
assert client.ft().create_index(TextField("t", withsuffixtrie=True))
waitForIndex(client, getattr(client.ft(), "index_name", "idx"))
info = client.ft().info()
assert "WITHSUFFIXTRIE" in info["attributes"][0]["flags"]
assert client.ft().dropindex("idx")

# create withsuffixtrie index (tag field)
assert client.ft().create_index((TagField("t", withsuffixtrie=True)))
assert client.ft().create_index(TagField("t", withsuffixtrie=True))
waitForIndex(client, getattr(client.ft(), "index_name", "idx"))
info = client.ft().info()
assert "WITHSUFFIXTRIE" in info["attributes"][0]["flags"]
Expand All @@ -2271,7 +2269,7 @@ def test_query_timeout(r: redis.Redis):

@pytest.mark.redismod
def test_geoshape(client: redis.Redis):
client.ft().create_index((GeoShapeField("geom", GeoShapeField.FLAT)))
client.ft().create_index(GeoShapeField("geom", GeoShapeField.FLAT))
waitForIndex(client, getattr(client.ft(), "index_name", "idx"))
client.hset("small", "geom", "POLYGON((1 1, 1 100, 100 100, 100 1, 1 1))")
client.hset("large", "geom", "POLYGON((1 1, 1 200, 200 200, 200 1, 1 1))")
Expand Down
Loading