From db03189b874b6e3e1803be9ed5e840d001de2160 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 5 Oct 2023 09:41:05 +0300 Subject: [PATCH 01/48] Bump actions/checkout from 3 to 4 (#2969) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/codeql-analysis.yml | 2 +- .github/workflows/docs.yaml | 2 +- .github/workflows/integration.yaml | 12 ++++++------ .github/workflows/pypi-publish.yaml | 2 +- .github/workflows/spellcheck.yml | 2 +- 5 files changed, 10 insertions(+), 10 deletions(-) diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml index e82e7e1530..61da2fce55 100644 --- a/.github/workflows/codeql-analysis.yml +++ b/.github/workflows/codeql-analysis.yml @@ -36,7 +36,7 @@ jobs: steps: - name: Checkout repository - uses: actions/checkout@v3 + uses: actions/checkout@v4 # Initializes the CodeQL tools for scanning. - name: Initialize CodeQL diff --git a/.github/workflows/docs.yaml b/.github/workflows/docs.yaml index 61ec76e9f8..56f16fa2b0 100644 --- a/.github/workflows/docs.yaml +++ b/.github/workflows/docs.yaml @@ -25,7 +25,7 @@ jobs: name: Build docs runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - uses: actions/setup-python@v4 with: python-version: 3.9 diff --git a/.github/workflows/integration.yaml b/.github/workflows/integration.yaml index 7e0fea2e41..96b51fbafb 100644 --- a/.github/workflows/integration.yaml +++ b/.github/workflows/integration.yaml @@ -29,7 +29,7 @@ jobs: name: Dependency audit runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - uses: pypa/gh-action-pip-audit@v1.0.8 with: inputs: requirements.txt dev_requirements.txt @@ -40,7 +40,7 @@ jobs: name: Code linters runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - uses: actions/setup-python@v4 with: python-version: 3.9 @@ -64,7 +64,7 @@ jobs: ACTIONS_ALLOW_UNSECURE_COMMANDS: true name: Python ${{ matrix.python-version }} ${{matrix.test-type}}-${{matrix.connection-type}} tests steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - uses: actions/setup-python@v4 with: python-version: ${{ matrix.python-version }} @@ -119,7 +119,7 @@ jobs: ACTIONS_ALLOW_UNSECURE_COMMANDS: true name: RESP3 [${{ matrix.python-version }} ${{matrix.test-type}}-${{matrix.connection-type}}] steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - uses: actions/setup-python@v4 with: python-version: ${{ matrix.python-version }} @@ -146,7 +146,7 @@ jobs: matrix: extension: ['tar.gz', 'whl'] steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - uses: actions/setup-python@v4 with: python-version: 3.9 @@ -162,7 +162,7 @@ jobs: matrix: python-version: ['3.7', '3.8', '3.9', '3.10', '3.11', 'pypy-3.7', 'pypy-3.8', 'pypy-3.9'] steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - uses: actions/setup-python@v4 with: python-version: ${{ matrix.python-version }} diff --git a/.github/workflows/pypi-publish.yaml b/.github/workflows/pypi-publish.yaml index 50332c1995..4f8833372f 100644 --- a/.github/workflows/pypi-publish.yaml +++ b/.github/workflows/pypi-publish.yaml @@ -12,7 +12,7 @@ jobs: build_and_package: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: install python uses: actions/setup-python@v4 with: diff --git a/.github/workflows/spellcheck.yml b/.github/workflows/spellcheck.yml index e152841553..3bd776cf1e 100644 --- a/.github/workflows/spellcheck.yml +++ b/.github/workflows/spellcheck.yml @@ -6,7 +6,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Check Spelling uses: rojopolis/spellcheck-github-actions@0.33.1 with: From e7cc3bbace03437d7db9b6a5df023e09ef8ae4b6 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 5 Oct 2023 09:41:17 +0300 Subject: [PATCH 02/48] Bump rojopolis/spellcheck-github-actions from 0.33.1 to 0.34.0 (#2970) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/spellcheck.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/spellcheck.yml b/.github/workflows/spellcheck.yml index 3bd776cf1e..46c629b2cc 100644 --- a/.github/workflows/spellcheck.yml +++ b/.github/workflows/spellcheck.yml @@ -8,7 +8,7 @@ jobs: - name: Checkout uses: actions/checkout@v4 - name: Check Spelling - uses: rojopolis/spellcheck-github-actions@0.33.1 + uses: rojopolis/spellcheck-github-actions@0.34.0 with: config_path: .github/spellcheck-settings.yml task_name: Markdown From 0172deda2cb148bc67e332d3ccee36af98696abb Mon Sep 17 00:00:00 2001 From: Artem Diubkov Date: Tue, 10 Oct 2023 02:35:36 +0300 Subject: [PATCH 03/48] fix type hint (#2963) Co-authored-by: d184230 --- redis/client.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/redis/client.py b/redis/client.py index 4923143543..b526d7787b 100755 --- a/redis/client.py +++ b/redis/client.py @@ -94,7 +94,7 @@ class Redis(RedisModuleCommands, CoreCommands, SentinelCommands): """ @classmethod - def from_url(cls, url: str, **kwargs) -> None: + def from_url(cls, url: str, **kwargs) -> "Redis": """ Return a Redis client object configured from the given URL From 565171cc29ba7fa77adca3c888484a08b3986f99 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristj=C3=A1n=20Valur=20J=C3=B3nsson?= Date: Wed, 11 Oct 2023 12:41:22 +0000 Subject: [PATCH 04/48] Don't perform blocking connect inside the BlockingConnectionQueue Condition variable. (#2997) --- redis/asyncio/connection.py | 34 +++++++++++++++++++++++----------- 1 file changed, 23 insertions(+), 11 deletions(-) diff --git a/redis/asyncio/connection.py b/redis/asyncio/connection.py index 65fa58643b..1ef9960ff3 100644 --- a/redis/asyncio/connection.py +++ b/redis/asyncio/connection.py @@ -1027,7 +1027,18 @@ def can_get_connection(self) -> bool: ) async def get_connection(self, command_name, *keys, **options): - """Get a connection from the pool""" + """Get a connected connection from the pool""" + connection = self.get_available_connection() + try: + await self.ensure_connection(connection) + except BaseException: + await self.release(connection) + raise + + return connection + + def get_available_connection(self): + """Get a connection from the pool, without making sure it is connected""" try: connection = self._available_connections.pop() except IndexError: @@ -1035,13 +1046,6 @@ async def get_connection(self, command_name, *keys, **options): raise ConnectionError("Too many connections") from None connection = self.make_connection() self._in_use_connections.add(connection) - - try: - await self.ensure_connection(connection) - except BaseException: - await self.release(connection) - raise - return connection def get_encoder(self): @@ -1166,13 +1170,21 @@ def __init__( async def get_connection(self, command_name, *keys, **options): """Gets a connection from the pool, blocking until one is available""" try: - async with async_timeout(self.timeout): - async with self._condition: + async with self._condition: + async with async_timeout(self.timeout): await self._condition.wait_for(self.can_get_connection) - return await super().get_connection(command_name, *keys, **options) + connection = super().get_available_connection() except asyncio.TimeoutError as err: raise ConnectionError("No connection available.") from err + # We now perform the connection check outside of the lock. + try: + await self.ensure_connection(connection) + return connection + except BaseException: + await self.release(connection) + raise + async def release(self, connection: AbstractConnection): """Releases the connection back to the pool.""" async with self._condition: From 697da37dd7a8e47bfc4bba4e98c12f4a063fa668 Mon Sep 17 00:00:00 2001 From: Chayim Date: Wed, 11 Oct 2023 15:45:01 +0300 Subject: [PATCH 05/48] Creating CODEOWNERS for the examples (#2993) --- .github/CODEOWNERS | 1 + 1 file changed, 1 insertion(+) create mode 100644 .github/CODEOWNERS diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS new file mode 100644 index 0000000000..1af2323fe9 --- /dev/null +++ b/.github/CODEOWNERS @@ -0,0 +1 @@ +doctests/* @dmaier-redislabs From 9970fb4675185273103621cc4b38576ee5a7ce12 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristj=C3=A1n=20Valur=20J=C3=B3nsson?= Date: Mon, 16 Oct 2023 10:36:58 +0000 Subject: [PATCH 06/48] Close various objects created during asyncio tests (#3005) * Close various objects created during asyncio tests * Fix resource leake in test_cwe_404.py Need to wait for individual handler tasks when shutting down server. --- tests/test_asyncio/test_commands.py | 2 ++ tests/test_asyncio/test_connect.py | 2 ++ tests/test_asyncio/test_connection.py | 4 ++++ tests/test_asyncio/test_cwe_404.py | 15 +++++++++---- tests/test_asyncio/test_retry.py | 3 +++ tests/test_asyncio/test_sentinel.py | 22 +++++++++---------- .../test_sentinel_managed_connection.py | 1 + 7 files changed, 34 insertions(+), 15 deletions(-) diff --git a/tests/test_asyncio/test_commands.py b/tests/test_asyncio/test_commands.py index 9b9852e9ef..35b9f2a29f 100644 --- a/tests/test_asyncio/test_commands.py +++ b/tests/test_asyncio/test_commands.py @@ -370,10 +370,12 @@ async def test_client_setinfo(self, r: redis.Redis): info = await r2.client_info() assert info["lib-name"] == "test2" assert info["lib-ver"] == "1234" + await r2.aclose() r3 = redis.asyncio.Redis(lib_name=None, lib_version=None) info = await r3.client_info() assert info["lib-name"] == "" assert info["lib-ver"] == "" + await r3.aclose() @skip_if_server_version_lt("2.6.9") @pytest.mark.onlynoncluster diff --git a/tests/test_asyncio/test_connect.py b/tests/test_asyncio/test_connect.py index 0b2d7c2afa..5e6b120fb3 100644 --- a/tests/test_asyncio/test_connect.py +++ b/tests/test_asyncio/test_connect.py @@ -73,6 +73,8 @@ async def _handler(reader, writer): try: return await _redis_request_handler(reader, writer, stop_event) finally: + writer.close() + await writer.wait_closed() finished.set() if isinstance(server_address, str): diff --git a/tests/test_asyncio/test_connection.py b/tests/test_asyncio/test_connection.py index 28e6b0d9c3..9c7f25bf87 100644 --- a/tests/test_asyncio/test_connection.py +++ b/tests/test_asyncio/test_connection.py @@ -85,6 +85,8 @@ async def get_conn(_): assert init_call_count == 1 assert command_call_count == 2 + r.connection = None # it was a Mock + await r.aclose() @skip_if_server_version_lt("4.0.0") @@ -143,6 +145,7 @@ async def mock_connect(): conn._connect.side_effect = mock_connect await conn.connect() assert conn._connect.call_count == 3 + await conn.disconnect() async def test_connect_without_retry_on_os_error(): @@ -194,6 +197,7 @@ async def test_connection_parse_response_resume(r: redis.Redis): pytest.fail("didn't receive a response") assert response assert i > 0 + await conn.disconnect() @pytest.mark.onlynoncluster diff --git a/tests/test_asyncio/test_cwe_404.py b/tests/test_asyncio/test_cwe_404.py index 17ed6822ac..bb9f1780ac 100644 --- a/tests/test_asyncio/test_cwe_404.py +++ b/tests/test_asyncio/test_cwe_404.py @@ -15,6 +15,8 @@ def __init__(self, addr, redis_addr, delay: float = 0.0): self.send_event = asyncio.Event() self.server = None self.task = None + self.cond = asyncio.Condition() + self.running = 0 async def __aenter__(self): await self.start() @@ -63,10 +65,10 @@ async def stop(self): except asyncio.CancelledError: pass await self.server.wait_closed() - # do we need to close individual connections too? - # prudently close all async generators - loop = self.server.get_loop() - await loop.shutdown_asyncgens() + # Server does not wait for all spawned tasks. We must do that also to ensure + # that all sockets are closed. + async with self.cond: + await self.cond.wait_for(lambda: self.running == 0) async def pipe( self, @@ -75,6 +77,7 @@ async def pipe( name="", event: asyncio.Event = None, ): + self.running += 1 try: while True: data = await reader.read(1000) @@ -94,6 +97,10 @@ async def pipe( # ignore errors on close pertaining to no event loop. Don't want # to clutter the test output with errors if being garbage collected pass + async with self.cond: + self.running -= 1 + if self.running == 0: + self.cond.notify_all() @pytest.mark.onlynoncluster diff --git a/tests/test_asyncio/test_retry.py b/tests/test_asyncio/test_retry.py index 2912ca786c..8bc71c1479 100644 --- a/tests/test_asyncio/test_retry.py +++ b/tests/test_asyncio/test_retry.py @@ -131,5 +131,8 @@ async def test_get_set_retry_object(self, request): assert r.get_retry()._retries == new_retry_policy._retries assert isinstance(r.get_retry()._backoff, ExponentialBackoff) assert exiting_conn.retry._retries == new_retry_policy._retries + await r.connection_pool.release(exiting_conn) new_conn = await r.connection_pool.get_connection("_") assert new_conn.retry._retries == new_retry_policy._retries + await r.connection_pool.release(new_conn) + await r.aclose() diff --git a/tests/test_asyncio/test_sentinel.py b/tests/test_asyncio/test_sentinel.py index 25bd7730da..51e59d69d0 100644 --- a/tests/test_asyncio/test_sentinel.py +++ b/tests/test_asyncio/test_sentinel.py @@ -183,13 +183,13 @@ async def test_discover_slaves(cluster, sentinel): @pytest.mark.onlynoncluster async def test_master_for(cluster, sentinel, master_ip): - master = sentinel.master_for("mymaster", db=9) - assert await master.ping() - assert master.connection_pool.master_address == (master_ip, 6379) + async with sentinel.master_for("mymaster", db=9) as master: + assert await master.ping() + assert master.connection_pool.master_address == (master_ip, 6379) # Use internal connection check - master = sentinel.master_for("mymaster", db=9, check_connection=True) - assert await master.ping() + async with sentinel.master_for("mymaster", db=9, check_connection=True) as master: + assert await master.ping() @pytest.mark.onlynoncluster @@ -197,16 +197,16 @@ async def test_slave_for(cluster, sentinel): cluster.slaves = [ {"ip": "127.0.0.1", "port": 6379, "is_odown": False, "is_sdown": False} ] - slave = sentinel.slave_for("mymaster", db=9) - assert await slave.ping() + async with sentinel.slave_for("mymaster", db=9) as slave: + assert await slave.ping() @pytest.mark.onlynoncluster async def test_slave_for_slave_not_found_error(cluster, sentinel): cluster.master["is_odown"] = True - slave = sentinel.slave_for("mymaster", db=9) - with pytest.raises(SlaveNotFoundError): - await slave.ping() + async with sentinel.slave_for("mymaster", db=9) as slave: + with pytest.raises(SlaveNotFoundError): + await slave.ping() @pytest.mark.onlynoncluster @@ -260,7 +260,7 @@ async def mock_disconnect(): calls += 1 with mock.patch.object(pool, "disconnect", mock_disconnect): - await client.close() + await client.aclose() assert calls == 1 await pool.disconnect() diff --git a/tests/test_asyncio/test_sentinel_managed_connection.py b/tests/test_asyncio/test_sentinel_managed_connection.py index 711b3ee733..cae4b9581f 100644 --- a/tests/test_asyncio/test_sentinel_managed_connection.py +++ b/tests/test_asyncio/test_sentinel_managed_connection.py @@ -34,3 +34,4 @@ async def mock_connect(): conn._connect.side_effect = mock_connect await conn.connect() assert conn._connect.call_count == 3 + await conn.disconnect() From 194d7100bd6bd279c6b6feda5b11b222402f0f5e Mon Sep 17 00:00:00 2001 From: Chayim Date: Mon, 16 Oct 2023 14:18:34 +0300 Subject: [PATCH 07/48] Linking to Redis resources (#3006) --- README.md | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/README.md b/README.md index 4bd71b4060..2097e87bba 100644 --- a/README.md +++ b/README.md @@ -17,6 +17,20 @@ The Python interface to the Redis key-value store. --------------------------------------------- +## How do I Redis? + +[Learn for free at Redis University](https://university.redis.com/) + +[Build faster with the Redis Launchpad](https://launchpad.redis.com/) + +[Try the Redis Cloud](https://redis.com/try-free/) + +[Dive in developer tutorials](https://developer.redis.com/) + +[Join the Redis community](https://redis.com/community/) + +[Work at Redis](https://redis.com/company/careers/jobs/) + ## Installation Start a redis via docker: From b5e8e55eeb4e081083ae08a8b91ce19ef4d89ee6 Mon Sep 17 00:00:00 2001 From: Shaya Potter Date: Mon, 16 Oct 2023 14:18:50 +0300 Subject: [PATCH 08/48] Add GEOSHAPE field type for index creation of RediSearch (#2957) * first pass of geoshape index type * first attempt at test, but demonstrates the initial commit is broken * fix new field + fix tests * work on linter * more linter * try to mark test with correct fixture * fix linter --- redis/commands/search/field.py | 16 ++++++++++++++++ tests/test_search.py | 18 ++++++++++++++++++ 2 files changed, 34 insertions(+) diff --git a/redis/commands/search/field.py b/redis/commands/search/field.py index 76eb58c2d7..f316ed9f14 100644 --- a/redis/commands/search/field.py +++ b/redis/commands/search/field.py @@ -13,6 +13,7 @@ class Field: SORTABLE = "SORTABLE" NOINDEX = "NOINDEX" AS = "AS" + GEOSHAPE = "GEOSHAPE" def __init__( self, @@ -91,6 +92,21 @@ def __init__(self, name: str, **kwargs): Field.__init__(self, name, args=[Field.NUMERIC], **kwargs) +class GeoShapeField(Field): + """ + GeoShapeField is used to enable within/contain indexing/searching + """ + + SPHERICAL = "SPHERICAL" + FLAT = "FLAT" + + def __init__(self, name: str, coord_system=None, **kwargs): + args = [Field.GEOSHAPE] + if coord_system: + args.append(coord_system) + Field.__init__(self, name, args=args, **kwargs) + + class GeoField(Field): """ GeoField is used to define a geo-indexing field in a schema definition diff --git a/tests/test_search.py b/tests/test_search.py index 9bbfc3c696..7469123453 100644 --- a/tests/test_search.py +++ b/tests/test_search.py @@ -13,6 +13,7 @@ from redis.commands.search import Search from redis.commands.search.field import ( GeoField, + GeoShapeField, NumericField, TagField, TextField, @@ -2266,3 +2267,20 @@ def test_query_timeout(r: redis.Redis): q2 = Query("foo").timeout("not_a_number") with pytest.raises(redis.ResponseError): r.ft().search(q2) + + +@pytest.mark.redismod +def test_geoshape(client: redis.Redis): + client.ft().create_index((GeoShapeField("geom", GeoShapeField.FLAT))) + waitForIndex(client, getattr(client.ft(), "index_name", "idx")) + client.hset("small", "geom", "POLYGON((1 1, 1 100, 100 100, 100 1, 1 1))") + client.hset("large", "geom", "POLYGON((1 1, 1 200, 200 200, 200 1, 1 1))") + q1 = Query("@geom:[WITHIN $poly]").dialect(3) + qp1 = {"poly": "POLYGON((0 0, 0 150, 150 150, 150 0, 0 0))"} + q2 = Query("@geom:[CONTAINS $poly]").dialect(3) + qp2 = {"poly": "POLYGON((2 2, 2 50, 50 50, 50 2, 2 2))"} + result = client.ft().search(q1, query_params=qp1) + assert len(result.docs) == 1 + assert result.docs[0]["id"] == "small" + result = client.ft().search(q2, query_params=qp2) + assert len(result.docs) == 2 From 30c1686020191d1328bb5701eb31fa78659778d8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristj=C3=A1n=20Valur=20J=C3=B3nsson?= Date: Mon, 16 Oct 2023 14:16:17 +0000 Subject: [PATCH 09/48] Better deal with "lost" connections for async Redis (#2999) * Allow tracking/reporting and closing of "lost" connections. ConnectionPool keeps a WeakSet of in_use connections, allowing lost ones to be collected. Collection produces a warning and closes the underlying transport. * Add tests for the __del__ handlers of async Redis and Connection objects * capture expected warnings in the test * lint --- redis/asyncio/client.py | 1 + redis/asyncio/connection.py | 21 ++++++++++- tests/test_asyncio/test_connection.py | 52 ++++++++++++++++++++++++++- 3 files changed, 72 insertions(+), 2 deletions(-) diff --git a/redis/asyncio/client.py b/redis/asyncio/client.py index e4d2e776bc..acc89941f2 100644 --- a/redis/asyncio/client.py +++ b/redis/asyncio/client.py @@ -546,6 +546,7 @@ def __del__( _grl().call_exception_handler(context) except RuntimeError: pass + self.connection._close() async def aclose(self, close_connection_pool: Optional[bool] = None) -> None: """ diff --git a/redis/asyncio/connection.py b/redis/asyncio/connection.py index 1ef9960ff3..7b0443454b 100644 --- a/redis/asyncio/connection.py +++ b/redis/asyncio/connection.py @@ -5,6 +5,7 @@ import socket import ssl import sys +import warnings import weakref from abc import abstractmethod from itertools import chain @@ -204,6 +205,24 @@ def __init__( raise ConnectionError("protocol must be either 2 or 3") self.protocol = protocol + def __del__(self, _warnings: Any = warnings): + # For some reason, the individual streams don't get properly garbage + # collected and therefore produce no resource warnings. We add one + # here, in the same style as those from the stdlib. + if getattr(self, "_writer", None): + _warnings.warn( + f"unclosed Connection {self!r}", ResourceWarning, source=self + ) + self._close() + + def _close(self): + """ + Internal method to silently close the connection without waiting + """ + if self._writer: + self._writer.close() + self._writer = self._reader = None + def __repr__(self): repr_args = ",".join((f"{k}={v}" for k, v in self.repr_pieces())) return f"{self.__class__.__name__}<{repr_args}>" @@ -1017,7 +1036,7 @@ def __repr__(self): def reset(self): self._available_connections = [] - self._in_use_connections = set() + self._in_use_connections = weakref.WeakSet() def can_get_connection(self) -> bool: """Return True if a connection can be retrieved from the pool.""" diff --git a/tests/test_asyncio/test_connection.py b/tests/test_asyncio/test_connection.py index 9c7f25bf87..55a1c3a2f6 100644 --- a/tests/test_asyncio/test_connection.py +++ b/tests/test_asyncio/test_connection.py @@ -320,7 +320,8 @@ async def mock_aclose(self): url: str = request.config.getoption("--redis-url") r1 = await Redis.from_url(url) with patch.object(r1, "aclose", mock_aclose): - await r1.close() + with pytest.deprecated_call(): + await r1.close() assert calls == 1 with pytest.deprecated_call(): @@ -440,3 +441,52 @@ async def mock_disconnect(_): assert called == 0 await pool.disconnect() + + +async def test_client_garbage_collection(request): + """ + Test that a Redis client will call _close() on any + connection that it holds at time of destruction + """ + + url: str = request.config.getoption("--redis-url") + pool = ConnectionPool.from_url(url) + + # create a client with a connection from the pool + client = Redis(connection_pool=pool, single_connection_client=True) + await client.initialize() + with mock.patch.object(client, "connection") as a: + # we cannot, in unittests, or from asyncio, reliably trigger garbage collection + # so we must just invoke the handler + with pytest.warns(ResourceWarning): + client.__del__() + assert a._close.called + + await client.aclose() + await pool.aclose() + + +async def test_connection_garbage_collection(request): + """ + Test that a Connection object will call close() on the + stream that it holds. + """ + + url: str = request.config.getoption("--redis-url") + pool = ConnectionPool.from_url(url) + + # create a client with a connection from the pool + client = Redis(connection_pool=pool, single_connection_client=True) + await client.initialize() + conn = client.connection + + with mock.patch.object(conn, "_reader"): + with mock.patch.object(conn, "_writer") as a: + # we cannot, in unittests, or from asyncio, reliably trigger + # garbage collection so we must just invoke the handler + with pytest.warns(ResourceWarning): + conn.__del__() + assert a.close.called + + await client.aclose() + await pool.aclose() From 63239fa7cd7377a7fb611e9dc22c53f03547a311 Mon Sep 17 00:00:00 2001 From: "Bosheng (Daniel) Zhang" <740807262@qq.com> Date: Tue, 17 Oct 2023 00:16:37 +1000 Subject: [PATCH 10/48] Update client.py sleep_time typing for run_in_thread function (#2977) Changed from `sleep_time: int = 0` to `sleep_time: float = 0.0` To avoid Pylance complaining: `Argument of type "float" cannot be assigned to parameter "sleep_time" of type "int" in function "run_in_thread" "float" is incompatible with "int"` --- redis/client.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/redis/client.py b/redis/client.py index b526d7787b..cb91c7a088 100755 --- a/redis/client.py +++ b/redis/client.py @@ -1100,7 +1100,7 @@ def handle_message(self, response, ignore_subscribe_messages=False): def run_in_thread( self, - sleep_time: int = 0, + sleep_time: float = 0.0, daemon: bool = False, exception_handler: Optional[Callable] = None, ) -> "PubSubWorkerThread": From e2127e4a9a38e4e446fd05282ebd0b34b40f8ebd Mon Sep 17 00:00:00 2001 From: Romain Date: Mon, 13 Nov 2023 02:41:55 +0100 Subject: [PATCH 11/48] Fix BlockingConnectionPool.from_url parsing of timeout in query args #2983 (#2984) Co-authored-by: Romain Fliedel --- redis/asyncio/connection.py | 1 + redis/connection.py | 1 + tests/test_asyncio/test_connection_pool.py | 25 ++++++++++++++++++++++ tests/test_connection_pool.py | 25 ++++++++++++++++++++++ 4 files changed, 52 insertions(+) diff --git a/redis/asyncio/connection.py b/redis/asyncio/connection.py index 7b0443454b..77312211a9 100644 --- a/redis/asyncio/connection.py +++ b/redis/asyncio/connection.py @@ -880,6 +880,7 @@ def to_bool(value) -> Optional[bool]: "max_connections": int, "health_check_interval": int, "ssl_check_hostname": to_bool, + "timeout": float, } ) diff --git a/redis/connection.py b/redis/connection.py index b39ba28f76..fead6135e0 100644 --- a/redis/connection.py +++ b/redis/connection.py @@ -853,6 +853,7 @@ def to_bool(value): "max_connections": int, "health_check_interval": int, "ssl_check_hostname": to_bool, + "timeout": float, } diff --git a/tests/test_asyncio/test_connection_pool.py b/tests/test_asyncio/test_connection_pool.py index c93fa91a39..ed90fc73fc 100644 --- a/tests/test_asyncio/test_connection_pool.py +++ b/tests/test_asyncio/test_connection_pool.py @@ -454,6 +454,31 @@ def test_invalid_scheme_raises_error(self): ) +class TestBlockingConnectionPoolURLParsing: + def test_extra_typed_querystring_options(self): + pool = redis.BlockingConnectionPool.from_url( + "redis://localhost/2?socket_timeout=20&socket_connect_timeout=10" + "&socket_keepalive=&retry_on_timeout=Yes&max_connections=10&timeout=13.37" + ) + + assert pool.connection_class == redis.Connection + assert pool.connection_kwargs == { + "host": "localhost", + "db": 2, + "socket_timeout": 20.0, + "socket_connect_timeout": 10.0, + "retry_on_timeout": True, + } + assert pool.max_connections == 10 + assert pool.timeout == 13.37 + + def test_invalid_extra_typed_querystring_options(self): + with pytest.raises(ValueError): + redis.BlockingConnectionPool.from_url( + "redis://localhost/2?timeout=_not_a_float_" + ) + + class TestConnectionPoolUnixSocketURLParsing: def test_defaults(self): pool = redis.ConnectionPool.from_url("unix:///socket") diff --git a/tests/test_connection_pool.py b/tests/test_connection_pool.py index ef70a8ff35..d1e984ee9c 100644 --- a/tests/test_connection_pool.py +++ b/tests/test_connection_pool.py @@ -359,6 +359,31 @@ def test_invalid_scheme_raises_error_when_double_slash_missing(self): ) +class TestBlockingConnectionPoolURLParsing: + def test_extra_typed_querystring_options(self): + pool = redis.BlockingConnectionPool.from_url( + "redis://localhost/2?socket_timeout=20&socket_connect_timeout=10" + "&socket_keepalive=&retry_on_timeout=Yes&max_connections=10&timeout=42" + ) + + assert pool.connection_class == redis.Connection + assert pool.connection_kwargs == { + "host": "localhost", + "db": 2, + "socket_timeout": 20.0, + "socket_connect_timeout": 10.0, + "retry_on_timeout": True, + } + assert pool.max_connections == 10 + assert pool.timeout == 42.0 + + def test_invalid_extra_typed_querystring_options(self): + with pytest.raises(ValueError): + redis.BlockingConnectionPool.from_url( + "redis://localhost/2?timeout=_not_a_float_" + ) + + class TestConnectionPoolUnixSocketURLParsing: def test_defaults(self): pool = redis.ConnectionPool.from_url("unix:///socket") From 45e858a671b3e6c4a43faf728414d725c40acfb8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristj=C3=A1n=20Valur=20J=C3=B3nsson?= Date: Mon, 13 Nov 2023 01:43:57 +0000 Subject: [PATCH 12/48] Fix parsing resp3 dicts (#2982) --- redis/_parsers/resp3.py | 19 ++++++++++++------- 1 file changed, 12 insertions(+), 7 deletions(-) diff --git a/redis/_parsers/resp3.py b/redis/_parsers/resp3.py index ad766a8f95..569e7ee679 100644 --- a/redis/_parsers/resp3.py +++ b/redis/_parsers/resp3.py @@ -96,8 +96,9 @@ def _read_response(self, disable_decoding=False, push_request=False): pass # map response elif byte == b"%": - # we use this approach and not dict comprehension here - # because this dict comprehension fails in python 3.7 + # We cannot use a dict-comprehension to parse stream. + # Evaluation order of key:val expression in dict comprehension only + # became defined to be left-right in version 3.8 resp_dict = {} for _ in range(int(response)): key = self._read_response(disable_decoding=disable_decoding) @@ -225,12 +226,16 @@ async def _read_response( pass # map response elif byte == b"%": - response = { - (await self._read_response(disable_decoding=disable_decoding)): ( - await self._read_response(disable_decoding=disable_decoding) + # We cannot use a dict-comprehension to parse stream. + # Evaluation order of key:val expression in dict comprehension only + # became defined to be left-right in version 3.8 + resp_dict = {} + for _ in range(int(response)): + key = await self._read_response(disable_decoding=disable_decoding) + resp_dict[key] = await self._read_response( + disable_decoding=disable_decoding, push_request=push_request ) - for _ in range(int(response)) - } + response = resp_dict # push response elif byte == b">": response = [ From 63b562f5cb744e5705bb9d501f2d2026e721eea3 Mon Sep 17 00:00:00 2001 From: Aniket Patil <128228805+AniketP04@users.noreply.github.com> Date: Wed, 29 Nov 2023 06:01:50 +0530 Subject: [PATCH 13/48] Update ocsp.py (#3022) --- redis/ocsp.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/redis/ocsp.py b/redis/ocsp.py index b0420b4711..8819848fa9 100644 --- a/redis/ocsp.py +++ b/redis/ocsp.py @@ -61,7 +61,7 @@ def _check_certificate(issuer_cert, ocsp_bytes, validate=True): ) else: raise ConnectionError( - "failed to retrieve a sucessful response from the ocsp responder" + "failed to retrieve a successful response from the ocsp responder" ) if ocsp_response.this_update >= datetime.datetime.now(): @@ -139,7 +139,7 @@ def _get_pubkey_hash(certificate): def ocsp_staple_verifier(con, ocsp_bytes, expected=None): - """An implemention of a function for set_ocsp_client_callback in PyOpenSSL. + """An implementation of a function for set_ocsp_client_callback in PyOpenSSL. This function validates that the provide ocsp_bytes response is valid, and matches the expected, stapled responses. @@ -266,7 +266,7 @@ def build_certificate_url(self, server, cert, issuer_cert): return url def check_certificate(self, server, cert, issuer_url): - """Checks the validitity of an ocsp server for an issuer""" + """Checks the validity of an ocsp server for an issuer""" r = requests.get(issuer_url) if not r.ok: From a9306e3ea25eca9cfbfaa85bc92e784bd074359e Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sun, 3 Dec 2023 02:40:49 +0200 Subject: [PATCH 14/48] Bump rojopolis/spellcheck-github-actions from 0.34.0 to 0.35.0 (#3060) Bumps [rojopolis/spellcheck-github-actions](https://github.com/rojopolis/spellcheck-github-actions) from 0.34.0 to 0.35.0. - [Release notes](https://github.com/rojopolis/spellcheck-github-actions/releases) - [Changelog](https://github.com/rojopolis/spellcheck-github-actions/blob/master/CHANGELOG.md) - [Commits](https://github.com/rojopolis/spellcheck-github-actions/compare/0.34.0...0.35.0) --- updated-dependencies: - dependency-name: rojopolis/spellcheck-github-actions dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/spellcheck.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/spellcheck.yml b/.github/workflows/spellcheck.yml index 46c629b2cc..a48781aa84 100644 --- a/.github/workflows/spellcheck.yml +++ b/.github/workflows/spellcheck.yml @@ -8,7 +8,7 @@ jobs: - name: Checkout uses: actions/checkout@v4 - name: Check Spelling - uses: rojopolis/spellcheck-github-actions@0.34.0 + uses: rojopolis/spellcheck-github-actions@0.35.0 with: config_path: .github/spellcheck-settings.yml task_name: Markdown From 9402c3076871ac06c332181aa0b3cd5981de69d9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stanis=C5=82aw=20Denkowski?= Date: Sun, 3 Dec 2023 01:45:47 +0100 Subject: [PATCH 15/48] Use `disable_decoding` in async `read_response`. (#3042) --- CHANGES | 1 + redis/_parsers/hiredis.py | 10 ++++++++-- 2 files changed, 9 insertions(+), 2 deletions(-) diff --git a/CHANGES b/CHANGES index 8cfc47db18..ef983e4b51 100644 --- a/CHANGES +++ b/CHANGES @@ -1,3 +1,4 @@ + * Fix async `read_response` to use `disable_decoding`. * Add 'aclose()' methods to async classes, deprecate async close(). * Fix #2831, add auto_close_connection_pool=True arg to asyncio.Redis.from_url() * Fix incorrect redis.asyncio.Cluster type hint for `retry_on_error` diff --git a/redis/_parsers/hiredis.py b/redis/_parsers/hiredis.py index b3247b71ec..1919d3658e 100644 --- a/redis/_parsers/hiredis.py +++ b/redis/_parsers/hiredis.py @@ -198,10 +198,16 @@ async def read_response( if not self._connected: raise ConnectionError(SERVER_CLOSED_CONNECTION_ERROR) from None - response = self._reader.gets() + if disable_decoding: + response = self._reader.gets(False) + else: + response = self._reader.gets() while response is False: await self.read_from_socket() - response = self._reader.gets() + if disable_decoding: + response = self._reader.gets(False) + else: + response = self._reader.gets() # if the response is a ConnectionError or the response is a list and # the first item is a ConnectionError, raise it as something bad From ab82697469db83e838c83d7b7f29c3bee11171b9 Mon Sep 17 00:00:00 2001 From: Pedram Parsian Date: Sun, 3 Dec 2023 04:20:52 +0330 Subject: [PATCH 16/48] Add "sum" to DUPLICATE_POLICY documentation of TS.CREATE, TS.ADD and TS.ALTER (#3027) --- CHANGES | 1 + redis/commands/timeseries/commands.py | 9 +++++++++ 2 files changed, 10 insertions(+) diff --git a/CHANGES b/CHANGES index ef983e4b51..191286d4af 100644 --- a/CHANGES +++ b/CHANGES @@ -56,6 +56,7 @@ * Fix for Unhandled exception related to self.host with unix socket (#2496) * Improve error output for master discovery * Make `ClusterCommandsProtocol` an actual Protocol + * Add `sum` to DUPLICATE_POLICY documentation of `TS.CREATE`, `TS.ADD` and `TS.ALTER` * 4.1.3 (Feb 8, 2022) * Fix flushdb and flushall (#1926) diff --git a/redis/commands/timeseries/commands.py b/redis/commands/timeseries/commands.py index 13e3cdf498..ad137f2df0 100644 --- a/redis/commands/timeseries/commands.py +++ b/redis/commands/timeseries/commands.py @@ -59,6 +59,9 @@ def create( - 'last': override with latest value. - 'min': only override if the value is lower than the existing value. - 'max': only override if the value is higher than the existing value. + - 'sum': If a previous sample exists, add the new sample to it so that \ + the updated value is equal to (previous + new). If no previous sample \ + exists, set the updated value equal to the new value. For more information: https://redis.io/commands/ts.create/ """ # noqa @@ -103,6 +106,9 @@ def alter( - 'last': override with latest value. - 'min': only override if the value is lower than the existing value. - 'max': only override if the value is higher than the existing value. + - 'sum': If a previous sample exists, add the new sample to it so that \ + the updated value is equal to (previous + new). If no previous sample \ + exists, set the updated value equal to the new value. For more information: https://redis.io/commands/ts.alter/ """ # noqa @@ -154,6 +160,9 @@ def add( - 'last': override with latest value. - 'min': only override if the value is lower than the existing value. - 'max': only override if the value is higher than the existing value. + - 'sum': If a previous sample exists, add the new sample to it so that \ + the updated value is equal to (previous + new). If no previous sample \ + exists, set the updated value equal to the new value. For more information: https://redis.io/commands/ts.add/ """ # noqa From f2fa734b505fdbab0910780550a2d9fa6a1b30a8 Mon Sep 17 00:00:00 2001 From: BackflipPenguin <63213817+BackflipPenguin@users.noreply.github.com> Date: Sat, 2 Dec 2023 21:51:27 -0300 Subject: [PATCH 17/48] Update advanced_features.rst (#3019) --- docs/advanced_features.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/advanced_features.rst b/docs/advanced_features.rst index fd29d2f684..de645bd764 100644 --- a/docs/advanced_features.rst +++ b/docs/advanced_features.rst @@ -346,7 +346,7 @@ running. The third option runs an event loop in a separate thread. pubsub.run_in_thread() creates a new thread and starts the event loop. -The thread object is returned to the caller of [un_in_thread(). The +The thread object is returned to the caller of run_in_thread(). The caller can use the thread.stop() method to shut down the event loop and thread. Behind the scenes, this is simply a wrapper around get_message() that runs in a separate thread, essentially creating a tiny non-blocking From c1a881edf23937c29183c97311d43cc8a251fd5e Mon Sep 17 00:00:00 2001 From: AYMEN Mohammed <53928879+AYMENJD@users.noreply.github.com> Date: Sun, 3 Dec 2023 03:53:30 +0300 Subject: [PATCH 18/48] Fix typos. (#3016) --- docs/conf.py | 2 +- docs/examples/pipeline_examples.ipynb | 2 +- redis/cluster.py | 2 +- redis/commands/cluster.py | 6 +++--- tests/test_asyncio/test_cwe_404.py | 2 +- tests/test_asyncio/test_search.py | 2 +- tests/test_graph.py | 2 +- tests/test_graph_utils/test_edge.py | 2 +- tests/test_graph_utils/test_node.py | 2 +- tests/test_search.py | 4 ++-- 10 files changed, 13 insertions(+), 13 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index 8849752404..a201da2fc0 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -86,7 +86,7 @@ # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. -exclude_patterns = ["_build", "**.ipynb_checkponts"] +exclude_patterns = ["_build", "**.ipynb_checkpoints"] # The reST default role (used for this markup: `text`) to use for all # documents. diff --git a/docs/examples/pipeline_examples.ipynb b/docs/examples/pipeline_examples.ipynb index 4e20375bfa..36ce31d708 100644 --- a/docs/examples/pipeline_examples.ipynb +++ b/docs/examples/pipeline_examples.ipynb @@ -123,7 +123,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "The responses of the three commands are stored in a list. In the above example, the two first boolean indicates that the `set` commands were successfull and the last element of the list is the result of the `get(\"a\")` comand." + "The responses of the three commands are stored in a list. In the above example, the two first boolean indicates that the `set` commands were successful and the last element of the list is the result of the `get(\"a\")` comand." ] }, { diff --git a/redis/cluster.py b/redis/cluster.py index 873d586c4a..66857ccac2 100644 --- a/redis/cluster.py +++ b/redis/cluster.py @@ -2196,7 +2196,7 @@ def _send_cluster_commands( ) if attempt and allow_redirections: # RETRY MAGIC HAPPENS HERE! - # send these remaing commands one at a time using `execute_command` + # send these remaining commands one at a time using `execute_command` # in the main client. This keeps our retry logic # in one place mostly, # and allows us to be more confident in correctness of behavior. diff --git a/redis/commands/cluster.py b/redis/commands/cluster.py index 14b8741443..eff2059948 100644 --- a/redis/commands/cluster.py +++ b/redis/commands/cluster.py @@ -225,7 +225,7 @@ def delete(self, *keys: KeyT) -> ResponseT: The keys are first split up into slots and then an DEL command is sent for every slot - Non-existant keys are ignored. + Non-existent keys are ignored. Returns the number of keys that were deleted. For more information see https://redis.io/commands/del @@ -240,7 +240,7 @@ def touch(self, *keys: KeyT) -> ResponseT: The keys are first split up into slots and then an TOUCH command is sent for every slot - Non-existant keys are ignored. + Non-existent keys are ignored. Returns the number of keys that were touched. For more information see https://redis.io/commands/touch @@ -254,7 +254,7 @@ def unlink(self, *keys: KeyT) -> ResponseT: The keys are first split up into slots and then an TOUCH command is sent for every slot - Non-existant keys are ignored. + Non-existent keys are ignored. Returns the number of keys that were unlinked. For more information see https://redis.io/commands/unlink diff --git a/tests/test_asyncio/test_cwe_404.py b/tests/test_asyncio/test_cwe_404.py index bb9f1780ac..df46cabc43 100644 --- a/tests/test_asyncio/test_cwe_404.py +++ b/tests/test_asyncio/test_cwe_404.py @@ -253,7 +253,7 @@ async def op(r): with pytest.raises(asyncio.CancelledError): await t - # try a number of requests to excercise all the connections + # try a number of requests to exercise all the connections async def doit(): assert await r.get("bar") == b"bar" assert await r.ping() diff --git a/tests/test_asyncio/test_search.py b/tests/test_asyncio/test_search.py index efc5bf549c..1f1931e28a 100644 --- a/tests/test_asyncio/test_search.py +++ b/tests/test_asyncio/test_search.py @@ -1503,7 +1503,7 @@ async def test_withsuffixtrie(decoded_r: redis.Redis): assert "WITHSUFFIXTRIE" not in info["attributes"][0]["flags"] assert await decoded_r.ft().dropindex("idx") - # create withsuffixtrie index (text fiels) + # create withsuffixtrie index (text fields) assert await decoded_r.ft().create_index((TextField("t", withsuffixtrie=True))) waitForIndex(decoded_r, getattr(decoded_r.ft(), "index_name", "idx")) info = await decoded_r.ft().info() diff --git a/tests/test_graph.py b/tests/test_graph.py index 6fa9977d98..c6d128908e 100644 --- a/tests/test_graph.py +++ b/tests/test_graph.py @@ -404,7 +404,7 @@ def test_cache_sync(client): # Client B will try to get Client A out of sync by: # 1. deleting the graph # 2. reconstructing the graph in a different order, this will casuse - # a differance in the current mapping between string IDs and the + # a difference in the current mapping between string IDs and the # mapping Client A is aware of # # Client A should pick up on the changes by comparing graph versions diff --git a/tests/test_graph_utils/test_edge.py b/tests/test_graph_utils/test_edge.py index d2a1e3f39e..1918a6ff44 100644 --- a/tests/test_graph_utils/test_edge.py +++ b/tests/test_graph_utils/test_edge.py @@ -61,7 +61,7 @@ def test_stringify(): @pytest.mark.redismod -def test_comparision(): +def test_comparison(): node1 = node.Node(node_id=1) node2 = node.Node(node_id=2) node3 = node.Node(node_id=3) diff --git a/tests/test_graph_utils/test_node.py b/tests/test_graph_utils/test_node.py index c3b34ac6ff..22e6d59414 100644 --- a/tests/test_graph_utils/test_node.py +++ b/tests/test_graph_utils/test_node.py @@ -33,7 +33,7 @@ def test_stringify(fixture): @pytest.mark.redismod -def test_comparision(fixture): +def test_comparison(fixture): no_args, no_props, props_only, no_label, multi_label = fixture assert node.Node() == node.Node() diff --git a/tests/test_search.py b/tests/test_search.py index 7469123453..bfe204254c 100644 --- a/tests/test_search.py +++ b/tests/test_search.py @@ -2227,7 +2227,7 @@ def test_withsuffixtrie(client: redis.Redis): assert "WITHSUFFIXTRIE" not in info["attributes"][0] assert client.ft().dropindex("idx") - # create withsuffixtrie index (text fiels) + # create withsuffixtrie index (text fields) assert client.ft().create_index((TextField("t", withsuffixtrie=True))) waitForIndex(client, getattr(client.ft(), "index_name", "idx")) info = client.ft().info() @@ -2244,7 +2244,7 @@ def test_withsuffixtrie(client: redis.Redis): assert "WITHSUFFIXTRIE" not in info["attributes"][0]["flags"] assert client.ft().dropindex("idx") - # create withsuffixtrie index (text fiels) + # create withsuffixtrie index (text fields) assert client.ft().create_index((TextField("t", withsuffixtrie=True))) waitForIndex(client, getattr(client.ft(), "index_name", "idx")) info = client.ft().info() From 7c13191698b1161cd9eb93a21414ef0bfc02ae40 Mon Sep 17 00:00:00 2001 From: dvora-h <67596500+dvora-h@users.noreply.github.com> Date: Mon, 4 Dec 2023 13:33:42 +0200 Subject: [PATCH 19/48] Fix parsing of `FT.PROFILE` result (#3063) * Fix parsing of ft.profile result * test --- redis/commands/helpers.py | 5 +++++ tests/test_helpers.py | 2 ++ 2 files changed, 7 insertions(+) diff --git a/redis/commands/helpers.py b/redis/commands/helpers.py index 324d981d66..127141f650 100644 --- a/redis/commands/helpers.py +++ b/redis/commands/helpers.py @@ -64,6 +64,11 @@ def parse_list_to_dict(response): for i in range(0, len(response), 2): if isinstance(response[i], list): res["Child iterators"].append(parse_list_to_dict(response[i])) + try: + if isinstance(response[i + 1], list): + res["Child iterators"].append(parse_list_to_dict(response[i + 1])) + except IndexError: + pass elif isinstance(response[i + 1], list): res["Child iterators"] = [parse_list_to_dict(response[i + 1])] else: diff --git a/tests/test_helpers.py b/tests/test_helpers.py index 57a94d2f45..66ee1c5390 100644 --- a/tests/test_helpers.py +++ b/tests/test_helpers.py @@ -41,6 +41,7 @@ def test_parse_to_dict(): "Child iterators", ["Type", "bar", "Time", "0.0729", "Counter", 3], ["Type", "barbar", "Time", "0.058", "Counter", 3], + ["Type", "barbarbar", "Time", "0.0234", "Counter", 3], ], ], ] @@ -49,6 +50,7 @@ def test_parse_to_dict(): "Child iterators": [ {"Counter": 3.0, "Time": 0.0729, "Type": "bar"}, {"Counter": 3.0, "Time": 0.058, "Type": "barbar"}, + {"Counter": 3.0, "Time": 0.0234, "Type": "barbarbar"}, ], "Counter": 3.0, "Time": 0.2089, From 3f4f5e31472ecc21024a70c7881d8ca3ef390316 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristj=C3=A1n=20Valur=20J=C3=B3nsson?= Date: Mon, 11 Dec 2023 00:45:43 +0000 Subject: [PATCH 20/48] Make the connection callback methods public again, add documentation (#2980) --- CHANGES | 1 + redis/asyncio/client.py | 6 +++--- redis/asyncio/connection.py | 16 ++++++++++++++-- redis/client.py | 4 ++-- redis/cluster.py | 2 +- redis/connection.py | 16 ++++++++++++++-- 6 files changed, 35 insertions(+), 10 deletions(-) diff --git a/CHANGES b/CHANGES index 191286d4af..c6393c9655 100644 --- a/CHANGES +++ b/CHANGES @@ -1,3 +1,4 @@ + * Connection.register_connect_callback() is made public. * Fix async `read_response` to use `disable_decoding`. * Add 'aclose()' methods to async classes, deprecate async close(). * Fix #2831, add auto_close_connection_pool=True arg to asyncio.Redis.from_url() diff --git a/redis/asyncio/client.py b/redis/asyncio/client.py index acc89941f2..491e01916f 100644 --- a/redis/asyncio/client.py +++ b/redis/asyncio/client.py @@ -784,7 +784,7 @@ async def __aexit__(self, exc_type, exc_value, traceback): def __del__(self): if self.connection: - self.connection._deregister_connect_callback(self.on_connect) + self.connection.deregister_connect_callback(self.on_connect) async def aclose(self): # In case a connection property does not yet exist @@ -795,7 +795,7 @@ async def aclose(self): async with self._lock: if self.connection: await self.connection.disconnect() - self.connection._deregister_connect_callback(self.on_connect) + self.connection.deregister_connect_callback(self.on_connect) await self.connection_pool.release(self.connection) self.connection = None self.channels = {} @@ -858,7 +858,7 @@ async def connect(self): ) # register a callback that re-subscribes to any channels we # were listening to when we were disconnected - self.connection._register_connect_callback(self.on_connect) + self.connection.register_connect_callback(self.on_connect) else: await self.connection.connect() if self.push_handler_func is not None and not HIREDIS_AVAILABLE: diff --git a/redis/asyncio/connection.py b/redis/asyncio/connection.py index 77312211a9..abcd72ecc1 100644 --- a/redis/asyncio/connection.py +++ b/redis/asyncio/connection.py @@ -235,12 +235,24 @@ def repr_pieces(self): def is_connected(self): return self._reader is not None and self._writer is not None - def _register_connect_callback(self, callback): + def register_connect_callback(self, callback): + """ + Register a callback to be called when the connection is established either + initially or reconnected. This allows listeners to issue commands that + are ephemeral to the connection, for example pub/sub subscription or + key tracking. The callback must be a _method_ and will be kept as + a weak reference. + """ wm = weakref.WeakMethod(callback) if wm not in self._connect_callbacks: self._connect_callbacks.append(wm) - def _deregister_connect_callback(self, callback): + def deregister_connect_callback(self, callback): + """ + De-register a previously registered callback. It will no-longer receive + notifications on connection events. Calling this is not required when the + listener goes away, since the callbacks are kept as weak methods. + """ try: self._connect_callbacks.remove(weakref.WeakMethod(callback)) except ValueError: diff --git a/redis/client.py b/redis/client.py index cb91c7a088..cca36ab493 100755 --- a/redis/client.py +++ b/redis/client.py @@ -693,7 +693,7 @@ def __del__(self) -> None: def reset(self) -> None: if self.connection: self.connection.disconnect() - self.connection._deregister_connect_callback(self.on_connect) + self.connection.deregister_connect_callback(self.on_connect) self.connection_pool.release(self.connection) self.connection = None self.health_check_response_counter = 0 @@ -751,7 +751,7 @@ def execute_command(self, *args): ) # register a callback that re-subscribes to any channels we # were listening to when we were disconnected - self.connection._register_connect_callback(self.on_connect) + self.connection.register_connect_callback(self.on_connect) if self.push_handler_func is not None and not HIREDIS_AVAILABLE: self.connection._parser.set_push_handler(self.push_handler_func) connection = self.connection diff --git a/redis/cluster.py b/redis/cluster.py index 66857ccac2..481c881684 100644 --- a/redis/cluster.py +++ b/redis/cluster.py @@ -1775,7 +1775,7 @@ def execute_command(self, *args): ) # register a callback that re-subscribes to any channels we # were listening to when we were disconnected - self.connection._register_connect_callback(self.on_connect) + self.connection.register_connect_callback(self.on_connect) if self.push_handler_func is not None and not HIREDIS_AVAILABLE: self.connection._parser.set_push_handler(self.push_handler_func) connection = self.connection diff --git a/redis/connection.py b/redis/connection.py index fead6135e0..f0f34a2970 100644 --- a/redis/connection.py +++ b/redis/connection.py @@ -237,12 +237,24 @@ def _construct_command_packer(self, packer): else: return PythonRespSerializer(self._buffer_cutoff, self.encoder.encode) - def _register_connect_callback(self, callback): + def register_connect_callback(self, callback): + """ + Register a callback to be called when the connection is established either + initially or reconnected. This allows listeners to issue commands that + are ephemeral to the connection, for example pub/sub subscription or + key tracking. The callback must be a _method_ and will be kept as + a weak reference. + """ wm = weakref.WeakMethod(callback) if wm not in self._connect_callbacks: self._connect_callbacks.append(wm) - def _deregister_connect_callback(self, callback): + def deregister_connect_callback(self, callback): + """ + De-register a previously registered callback. It will no-longer receive + notifications on connection events. Calling this is not required when the + listener goes away, since the callbacks are kept as weak methods. + """ try: self._connect_callbacks.remove(weakref.WeakMethod(callback)) except ValueError: From f29a7d7ee1575a0d37667c6fb475a1b2447aa3de Mon Sep 17 00:00:00 2001 From: Zachary Ware Date: Sun, 10 Dec 2023 18:46:00 -0600 Subject: [PATCH 21/48] Fix reported version of deprecations in asyncio.client (#2968) --- redis/asyncio/client.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/redis/asyncio/client.py b/redis/asyncio/client.py index 491e01916f..c32b8ec8a8 100644 --- a/redis/asyncio/client.py +++ b/redis/asyncio/client.py @@ -168,7 +168,7 @@ class initializer. In the case of conflicting arguments, querystring warnings.warn( DeprecationWarning( '"auto_close_connection_pool" is deprecated ' - "since version 5.0.0. " + "since version 5.0.1. " "Please create a ConnectionPool explicitly and " "provide to the Redis() constructor instead." ) @@ -247,7 +247,7 @@ def __init__( warnings.warn( DeprecationWarning( '"auto_close_connection_pool" is deprecated ' - "since version 5.0.0. " + "since version 5.0.1. " "Please create a ConnectionPool explicitly and " "provide to the Redis() constructor instead." ) @@ -566,7 +566,7 @@ async def aclose(self, close_connection_pool: Optional[bool] = None) -> None: ): await self.connection_pool.disconnect() - @deprecated_function(version="5.0.0", reason="Use aclose() instead", name="close") + @deprecated_function(version="5.0.1", reason="Use aclose() instead", name="close") async def close(self, close_connection_pool: Optional[bool] = None) -> None: """ Alias for aclose(), for backwards compatibility @@ -803,12 +803,12 @@ async def aclose(self): self.patterns = {} self.pending_unsubscribe_patterns = set() - @deprecated_function(version="5.0.0", reason="Use aclose() instead", name="close") + @deprecated_function(version="5.0.1", reason="Use aclose() instead", name="close") async def close(self) -> None: """Alias for aclose(), for backwards compatibility""" await self.aclose() - @deprecated_function(version="5.0.0", reason="Use aclose() instead", name="reset") + @deprecated_function(version="5.0.1", reason="Use aclose() instead", name="reset") async def reset(self) -> None: """Alias for aclose(), for backwards compatibility""" await self.aclose() From 7573448ae963e3ce541becedb0faae304ccaa46e Mon Sep 17 00:00:00 2001 From: "Tyler Bream (Event pipeline)" <97038416+tbbream@users.noreply.github.com> Date: Sun, 10 Dec 2023 19:47:43 -0500 Subject: [PATCH 22/48] Allow the parsing of the asking command to forward original options (#3012) Co-authored-by: dvora-h <67596500+dvora-h@users.noreply.github.com> --- redis/_parsers/helpers.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/redis/_parsers/helpers.py b/redis/_parsers/helpers.py index fb5da831fe..bdd749a5bc 100644 --- a/redis/_parsers/helpers.py +++ b/redis/_parsers/helpers.py @@ -322,7 +322,7 @@ def float_or_none(response): return float(response) -def bool_ok(response): +def bool_ok(response, **options): return str_if_bytes(response) == "OK" From b7dbb7ef06eaad346c7fe0feaae342e0c2fc4ff0 Mon Sep 17 00:00:00 2001 From: Binbin Date: Tue, 12 Dec 2023 06:41:59 +0800 Subject: [PATCH 23/48] Fix Specifying Target Nodes broken hyperlink (#3072) The typo cause hyperlinks to fail. --- docs/clustering.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/clustering.rst b/docs/clustering.rst index 9b4dee1c9f..f8320e4e59 100644 --- a/docs/clustering.rst +++ b/docs/clustering.rst @@ -92,7 +92,7 @@ The ‘target_nodes’ parameter is explained in the following section, >>> # target-node: default-node >>> rc.ping() -Specfiying Target Nodes +Specifying Target Nodes ----------------------- As mentioned above, all non key-based RedisCluster commands accept the From ce6d7b42d8676e4e6b748f419658037f7bebee0c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?P=C3=A1rmenas=20Haniel?= <42049025+parmenashp@users.noreply.github.com> Date: Mon, 11 Dec 2023 19:42:39 -0300 Subject: [PATCH 24/48] Fix return types in json commands (#3071) * Fix return types in JSONCommands class * Update CHANGES --- CHANGES | 1 + redis/commands/json/commands.py | 6 +++--- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/CHANGES b/CHANGES index c6393c9655..1bc7cef600 100644 --- a/CHANGES +++ b/CHANGES @@ -1,3 +1,4 @@ + * Fix return types for `get`, `set_path` and `strappend` in JSONCommands * Connection.register_connect_callback() is made public. * Fix async `read_response` to use `disable_decoding`. * Add 'aclose()' methods to async classes, deprecate async close(). diff --git a/redis/commands/json/commands.py b/redis/commands/json/commands.py index 0f92e0d6c9..a16f07c150 100644 --- a/redis/commands/json/commands.py +++ b/redis/commands/json/commands.py @@ -173,7 +173,7 @@ def delete(self, key: str, path: Optional[str] = Path.root_path()) -> int: def get( self, name: str, *args, no_escape: Optional[bool] = False - ) -> List[JsonType]: + ) -> Optional[List[JsonType]]: """ Get the object stored as a JSON value at key ``name``. @@ -324,7 +324,7 @@ def set_path( nx: Optional[bool] = False, xx: Optional[bool] = False, decode_keys: Optional[bool] = False, - ) -> List[Dict[str, bool]]: + ) -> Dict[str, bool]: """ Iterate over ``root_folder`` and set each JSON file to a value under ``json_path`` with the file name as the key. @@ -377,7 +377,7 @@ def toggle( return self.execute_command("JSON.TOGGLE", name, str(path)) def strappend( - self, name: str, value: str, path: Optional[int] = Path.root_path() + self, name: str, value: str, path: Optional[str] = Path.root_path() ) -> Union[int, List[Optional[int]]]: """Append to the string JSON value. If two options are specified after the key name, the path is determined to be the first. If a single From 6116c3823bdbf3a4cca1f0892c84e1700f116f18 Mon Sep 17 00:00:00 2001 From: dvora-h <67596500+dvora-h@users.noreply.github.com> Date: Mon, 1 Jan 2024 13:22:15 +0200 Subject: [PATCH 25/48] fix acl_genpass with bits (#3062) --- redis/commands/core.py | 1 + tests/test_commands.py | 3 ++- 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/redis/commands/core.py b/redis/commands/core.py index e73553e47e..642f3edcc5 100644 --- a/redis/commands/core.py +++ b/redis/commands/core.py @@ -99,6 +99,7 @@ def acl_genpass(self, bits: Union[int, None] = None, **kwargs) -> ResponseT: b = int(bits) if b < 0 or b > 4096: raise ValueError + pieces.append(b) except ValueError: raise DataError( "genpass optionally accepts a bits argument, between 0 and 4096." diff --git a/tests/test_commands.py b/tests/test_commands.py index 6660c2c6b0..b2d7c1b9ed 100644 --- a/tests/test_commands.py +++ b/tests/test_commands.py @@ -201,8 +201,9 @@ def test_acl_genpass(self, r): r.acl_genpass(-5) r.acl_genpass(5555) - r.acl_genpass(555) + password = r.acl_genpass(555) assert isinstance(password, (str, bytes)) + assert len(password) == 139 @skip_if_server_version_lt("7.0.0") @skip_if_redis_enterprise() From d9d305441023735aa005dea2ae5875eb125f8ffa Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 2 Jan 2024 09:07:18 +0200 Subject: [PATCH 26/48] Bump github/codeql-action from 2 to 3 (#3096) Bumps [github/codeql-action](https://github.com/github/codeql-action) from 2 to 3. - [Release notes](https://github.com/github/codeql-action/releases) - [Changelog](https://github.com/github/codeql-action/blob/main/CHANGELOG.md) - [Commits](https://github.com/github/codeql-action/compare/v2...v3) --- updated-dependencies: - dependency-name: github/codeql-action dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/codeql-analysis.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml index 61da2fce55..4670c55b0f 100644 --- a/.github/workflows/codeql-analysis.yml +++ b/.github/workflows/codeql-analysis.yml @@ -40,7 +40,7 @@ jobs: # Initializes the CodeQL tools for scanning. - name: Initialize CodeQL - uses: github/codeql-action/init@v2 + uses: github/codeql-action/init@v3 with: languages: ${{ matrix.language }} # If you wish to specify custom queries, you can do so here or in a config file. @@ -51,7 +51,7 @@ jobs: # Autobuild attempts to build any compiled languages (C/C++, C#, or Java). # If this step fails, then you should remove it and run the build manually (see below) - name: Autobuild - uses: github/codeql-action/autobuild@v2 + uses: github/codeql-action/autobuild@v3 # ℹ️ Command-line programs to run using the OS shell. # 📚 https://git.io/JvXDl @@ -65,4 +65,4 @@ jobs: # make release - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@v2 + uses: github/codeql-action/analyze@v3 From 4f8c7d20b441dbcd642f016358fd58b0b831fbee Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 2 Jan 2024 09:07:27 +0200 Subject: [PATCH 27/48] Bump actions/upload-artifact from 3 to 4 (#3097) Bumps [actions/upload-artifact](https://github.com/actions/upload-artifact) from 3 to 4. - [Release notes](https://github.com/actions/upload-artifact/releases) - [Commits](https://github.com/actions/upload-artifact/compare/v3...v4) --- updated-dependencies: - dependency-name: actions/upload-artifact dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/docs.yaml | 2 +- .github/workflows/integration.yaml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/docs.yaml b/.github/workflows/docs.yaml index 56f16fa2b0..f32afb6a90 100644 --- a/.github/workflows/docs.yaml +++ b/.github/workflows/docs.yaml @@ -40,7 +40,7 @@ jobs: invoke build-docs - name: upload docs - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: redis-py-docs path: | diff --git a/.github/workflows/integration.yaml b/.github/workflows/integration.yaml index 96b51fbafb..70a08fdcf2 100644 --- a/.github/workflows/integration.yaml +++ b/.github/workflows/integration.yaml @@ -81,7 +81,7 @@ jobs: sleep 10 # time to settle invoke ${{matrix.test-type}}-tests - - uses: actions/upload-artifact@v3 + - uses: actions/upload-artifact@v4 if: success() || failure() with: name: pytest-results-${{matrix.test-type}}-${{matrix.connection-type}}-${{matrix.python-version}} From 8f450684990f9ae9e13dd83f77a3e5fde0a9e031 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 2 Jan 2024 09:07:43 +0200 Subject: [PATCH 28/48] Bump actions/setup-python from 4 to 5 (#3095) Bumps [actions/setup-python](https://github.com/actions/setup-python) from 4 to 5. - [Release notes](https://github.com/actions/setup-python/releases) - [Commits](https://github.com/actions/setup-python/compare/v4...v5) --- updated-dependencies: - dependency-name: actions/setup-python dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/docs.yaml | 2 +- .github/workflows/integration.yaml | 10 +++++----- .github/workflows/pypi-publish.yaml | 2 +- 3 files changed, 7 insertions(+), 7 deletions(-) diff --git a/.github/workflows/docs.yaml b/.github/workflows/docs.yaml index f32afb6a90..c5c74aa4d3 100644 --- a/.github/workflows/docs.yaml +++ b/.github/workflows/docs.yaml @@ -26,7 +26,7 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 - - uses: actions/setup-python@v4 + - uses: actions/setup-python@v5 with: python-version: 3.9 cache: 'pip' diff --git a/.github/workflows/integration.yaml b/.github/workflows/integration.yaml index 70a08fdcf2..adf5120341 100644 --- a/.github/workflows/integration.yaml +++ b/.github/workflows/integration.yaml @@ -41,7 +41,7 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 - - uses: actions/setup-python@v4 + - uses: actions/setup-python@v5 with: python-version: 3.9 cache: 'pip' @@ -65,7 +65,7 @@ jobs: name: Python ${{ matrix.python-version }} ${{matrix.test-type}}-${{matrix.connection-type}} tests steps: - uses: actions/checkout@v4 - - uses: actions/setup-python@v4 + - uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} cache: 'pip' @@ -120,7 +120,7 @@ jobs: name: RESP3 [${{ matrix.python-version }} ${{matrix.test-type}}-${{matrix.connection-type}}] steps: - uses: actions/checkout@v4 - - uses: actions/setup-python@v4 + - uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} cache: 'pip' @@ -147,7 +147,7 @@ jobs: extension: ['tar.gz', 'whl'] steps: - uses: actions/checkout@v4 - - uses: actions/setup-python@v4 + - uses: actions/setup-python@v5 with: python-version: 3.9 - name: Run installed unit tests @@ -163,7 +163,7 @@ jobs: python-version: ['3.7', '3.8', '3.9', '3.10', '3.11', 'pypy-3.7', 'pypy-3.8', 'pypy-3.9'] steps: - uses: actions/checkout@v4 - - uses: actions/setup-python@v4 + - uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} cache: 'pip' diff --git a/.github/workflows/pypi-publish.yaml b/.github/workflows/pypi-publish.yaml index 4f8833372f..30720d7b8a 100644 --- a/.github/workflows/pypi-publish.yaml +++ b/.github/workflows/pypi-publish.yaml @@ -14,7 +14,7 @@ jobs: steps: - uses: actions/checkout@v4 - name: install python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: 3.9 - name: Install dev tools From 813187ec7f10b9ec8a88b4a566c29f4d1a42ae83 Mon Sep 17 00:00:00 2001 From: Chayim Date: Tue, 9 Jan 2024 13:22:09 +0200 Subject: [PATCH 29/48] Always sending codecov (#3101) --- .github/workflows/integration.yaml | 1 - 1 file changed, 1 deletion(-) diff --git a/.github/workflows/integration.yaml b/.github/workflows/integration.yaml index adf5120341..9309208342 100644 --- a/.github/workflows/integration.yaml +++ b/.github/workflows/integration.yaml @@ -89,7 +89,6 @@ jobs: - name: Upload codecov coverage uses: codecov/codecov-action@v3 - if: ${{matrix.python-version == '3.11'}} with: fail_ci_if_error: false From 5c94a954429a5333c11f1e80d4c0fd83d91044e1 Mon Sep 17 00:00:00 2001 From: Chayim Date: Tue, 9 Jan 2024 13:43:40 +0200 Subject: [PATCH 30/48] filter commits for main branch (#3036) --- .github/release-drafter-config.yml | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/.github/release-drafter-config.yml b/.github/release-drafter-config.yml index 9ccb28aca4..4607da071c 100644 --- a/.github/release-drafter-config.yml +++ b/.github/release-drafter-config.yml @@ -1,5 +1,7 @@ name-template: '$NEXT_MINOR_VERSION' tag-template: 'v$NEXT_MINOR_VERSION' +filter-by-commitish: true +commitish: master autolabeler: - label: 'maintenance' files: @@ -15,7 +17,7 @@ autolabeler: branch: - '/feature-.+' categories: - - title: 'Breaking Changes' + - title: '🔥 Breaking Changes' labels: - 'breakingchange' - title: '🧪 Experimental Features' @@ -32,7 +34,12 @@ categories: - 'bug' - 'BUG' - title: '🧰 Maintenance' - label: 'maintenance' + labels: + - 'maintenance' + - 'dependencies' + - 'documentation' + - 'docs' + - 'testing' change-template: '- $TITLE (#$NUMBER)' exclude-labels: - 'skip-changelog' From 7b1965aa348ba5fa2b7807b8e5f1f90a480ba5b4 Mon Sep 17 00:00:00 2001 From: "Wei-Hsiang (Matt) Wang" Date: Tue, 9 Jan 2024 19:46:08 +0800 Subject: [PATCH 31/48] fix(docs): organize cluster mode part of lua scripting (#3073) --- docs/lua_scripting.rst | 33 +++++++++++++++++++-------------- 1 file changed, 19 insertions(+), 14 deletions(-) diff --git a/docs/lua_scripting.rst b/docs/lua_scripting.rst index 0edb6b6723..bd7b9bc01d 100644 --- a/docs/lua_scripting.rst +++ b/docs/lua_scripting.rst @@ -92,19 +92,24 @@ Cluster Mode Cluster mode has limited support for lua scripting. -The following commands are supported, with caveats: - ``EVAL`` and -``EVALSHA``: The command is sent to the relevant node, depending on the -keys (i.e., in ``EVAL "