diff --git a/.pylintrc b/.pylintrc index 35aba38a..e64c7a3a 100644 --- a/.pylintrc +++ b/.pylintrc @@ -7,7 +7,9 @@ enable=line-too-long, missing-function-docstring, missing-param-doc, differing-param-doc, - unnecessary-dunder-call + unnecessary-dunder-call, + assignment-from-no-return, + unused-variable max-line-length=240 good-names-rgxs=^[_a-z][_a-z0-9]?$ diff --git a/CHANGELOG.md b/CHANGELOG.md index 930003bb..ee905a92 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -3,6 +3,9 @@ Inspired from [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) ## [Unreleased] ### Added +- Added pylint `assignment-from-no-return` and `unused-variable` (([#658](https://github.com/opensearch-project/opensearch-py/pull/658)) +- Added pylint `unnecessary-dunder-calls` (([#655](https://github.com/opensearch-project/opensearch-py/pull/655)) +- Changed to use .pylintrc files in root and any directory with override requirements (([#654](https://github.com/opensearch-project/opensearch-py/pull/654)) - Added pylint `unspecified-encoding` and `missing-function-docstring` and ignored opensearchpy for lints (([#643](https://github.com/opensearch-project/opensearch-py/pull/643))) - Added pylint `line-too-long` and `invalid-name` ([#590](https://github.com/opensearch-project/opensearch-py/pull/590)) - Added pylint `pointless-statement` ([#611](https://github.com/opensearch-project/opensearch-py/pull/611)) diff --git a/benchmarks/bench_info_sync.py b/benchmarks/bench_info_sync.py index 88fb8698..5d0729d9 100644 --- a/benchmarks/bench_info_sync.py +++ b/benchmarks/bench_info_sync.py @@ -23,7 +23,7 @@ def get_info(client: Any, request_count: int) -> float: """get info from client""" total_time: float = 0 - for request in range(request_count): + for _ in range(request_count): start = time.time() * 1000 client.info() total_time += time.time() * 1000 - start @@ -49,7 +49,7 @@ def test(thread_count: int = 1, request_count: int = 1, client_count: int = 1) - root.addHandler(handler) clients = [] - for i in range(client_count): + for _ in range(client_count): clients.append( OpenSearch( hosts=[{"host": host, "port": port}], diff --git a/benchmarks/bench_sync.py b/benchmarks/bench_sync.py index 3580e596..e9c30c53 100644 --- a/benchmarks/bench_sync.py +++ b/benchmarks/bench_sync.py @@ -24,7 +24,7 @@ def index_records(client: Any, index_name: str, item_count: int) -> Any: """bulk index item_count records into index_name""" total_time = 0 - for iteration in range(10): + for _ in range(10): data: Any = [] for item in range(item_count): data.append( @@ -68,7 +68,7 @@ def test(thread_count: int = 1, item_count: int = 1, client_count: int = 1) -> N root.addHandler(handler) clients = [] - for i in range(client_count): + for _ in range(client_count): clients.append( OpenSearch( hosts=[{"host": host, "port": port}], diff --git a/opensearchpy/.pylintrc b/opensearchpy/.pylintrc index 6a1a6a95..273359fe 100644 --- a/opensearchpy/.pylintrc +++ b/opensearchpy/.pylintrc @@ -4,6 +4,8 @@ enable=line-too-long, invalid-name, pointless-statement, unspecified-encoding, - unnecessary-dunder-call + unnecessary-dunder-call, + assignment-from-no-return, + unused-variable max-line-length=240 good-names-rgxs=^[_a-z][_a-z0-9]?$ \ No newline at end of file diff --git a/opensearchpy/helpers/faceted_search.py b/opensearchpy/helpers/faceted_search.py index 37d067c1..a97507a3 100644 --- a/opensearchpy/helpers/faceted_search.py +++ b/opensearchpy/helpers/faceted_search.py @@ -23,7 +23,6 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. - from datetime import datetime, timedelta from typing import Any, Optional @@ -86,10 +85,7 @@ def add_filter(self, filter_values: Any) -> Any: return f def get_value_filter(self, filter_value: Any) -> Any: - """ - Construct a filter for an individual value - """ - pass + return None def is_filtered(self, key: Any, filter_values: Any) -> bool: """ diff --git a/samples/knn/knn_async_basics.py b/samples/knn/knn_async_basics.py index 8847f924..7b655cb4 100755 --- a/samples/knn/knn_async_basics.py +++ b/samples/knn/knn_async_basics.py @@ -56,7 +56,7 @@ async def main() -> None: vectors = [] for i in range(10): vec = [] - for j in range(dimensions): + for _ in range(dimensions): vec.append(round(random.uniform(0, 1), 2)) vectors.append( @@ -74,7 +74,7 @@ async def main() -> None: # search vec = [] - for j in range(dimensions): + for _ in range(dimensions): vec.append(round(random.uniform(0, 1), 2)) print(f"Searching for {vec} ...") diff --git a/samples/knn/knn_basics.py b/samples/knn/knn_basics.py index b3cdfca4..7bcc76b2 100755 --- a/samples/knn/knn_basics.py +++ b/samples/knn/knn_basics.py @@ -55,7 +55,7 @@ def main() -> None: vectors = [] for i in range(10): vec = [] - for j in range(dimensions): + for _ in range(dimensions): vec.append(round(random.uniform(0, 1), 2)) vectors.append( @@ -72,15 +72,12 @@ def main() -> None: client.indices.refresh(index=index_name) # search - vec = [] - for j in range(dimensions): - vec.append(round(random.uniform(0, 1), 2)) + vec = [round(random.uniform(0, 1), 2) for _ in range(dimensions)] print(f"Searching for {vec} ...") search_query = {"query": {"knn": {"values": {"vector": vec, "k": 3}}}} results = client.search(index=index_name, body=search_query) - for hit in results["hits"]["hits"]: - print(hit) + (print(hit) for hit in results["hits"]["hits"]) # delete index client.indices.delete(index=index_name) diff --git a/samples/knn/knn_boolean_filter.py b/samples/knn/knn_boolean_filter.py index 40b5434b..c2a46b4a 100755 --- a/samples/knn/knn_boolean_filter.py +++ b/samples/knn/knn_boolean_filter.py @@ -56,7 +56,7 @@ def main() -> None: genres = ["fiction", "drama", "romance"] for i in range(3000): vec = [] - for j in range(dimensions): + for _ in range(dimensions): vec.append(round(random.uniform(0, 1), 2)) vectors.append( @@ -76,7 +76,7 @@ def main() -> None: # search genre = random.choice(genres) vec = [] - for j in range(dimensions): + for _ in range(dimensions): vec.append(round(random.uniform(0, 1), 2)) print(f"Searching for {vec} with the '{genre}' genre ...") diff --git a/test_opensearchpy/.pylintrc b/test_opensearchpy/.pylintrc index fb981792..655458f6 100644 --- a/test_opensearchpy/.pylintrc +++ b/test_opensearchpy/.pylintrc @@ -6,6 +6,8 @@ enable=line-too-long, unspecified-encoding, missing-param-doc, differing-param-doc, - unnecessary-dunder-call + unnecessary-dunder-call, + assignment-from-no-return, + unused-variable max-line-length=240 good-names-rgxs=^[_a-z][_a-z0-9]?$ \ No newline at end of file diff --git a/test_opensearchpy/run_tests.py b/test_opensearchpy/run_tests.py index 2aa94a76..a16393d0 100755 --- a/test_opensearchpy/run_tests.py +++ b/test_opensearchpy/run_tests.py @@ -94,9 +94,13 @@ def fetch_opensearch_repo() -> None: # if the run is interrupted from a previous run, it doesn't clean up, and the git add origin command # errors out; this allows the test to continue remote_origin_already_exists = 3 - print(e) - if e.returncode != remote_origin_already_exists: - sys.exit(1) + if e.returncode == remote_origin_already_exists: + print( + "Consider setting TEST_OPENSEARCH_NOFETCH=true if you want to reuse the existing local OpenSearch repo" + ) + else: + print(e) + sys.exit(1) # fetch the sha commit, version from info() print("Fetching opensearch repo...") @@ -128,6 +132,7 @@ def run_all(argv: Any = None) -> None: codecov_xml = join( abspath(dirname(dirname(__file__))), "junit", "opensearch-py-codecov.xml" ) + argv = [ "pytest", "--cov=opensearchpy", @@ -137,6 +142,12 @@ def run_all(argv: Any = None) -> None: "-vv", "--cov-report=xml:%s" % codecov_xml, ] + if ( + "OPENSEARCHPY_GEN_HTML_COV" in environ + and environ.get("OPENSEARCHPY_GEN_HTML_COV") == "true" + ): + codecov_html = join(abspath(dirname(dirname(__file__))), "junit", "html") + argv.append("--cov-report=html:%s" % codecov_html) secured = False if environ.get("OPENSEARCH_URL", "").startswith("https://"): diff --git a/test_opensearchpy/test_async/test_connection.py b/test_opensearchpy/test_async/test_connection.py index d154e7ac..662cf146 100644 --- a/test_opensearchpy/test_async/test_connection.py +++ b/test_opensearchpy/test_async/test_connection.py @@ -341,7 +341,7 @@ async def test_failure_body_not_logged(self, logger: Any) -> None: async def test_surrogatepass_into_bytes(self) -> None: buf = b"\xe4\xbd\xa0\xe5\xa5\xbd\xed\xa9\xaa" con = await self._get_mock_connection(response_body=buf) - status, headers, data = await con.perform_request("GET", "/") + _, _, data = await con.perform_request("GET", "/") assert u"你好\uda6a" == data # fmt: skip @pytest.mark.parametrize("exception_cls", reraise_exceptions) # type: ignore @@ -394,7 +394,7 @@ def teardown_class(cls) -> None: cls.server.stop() async def httpserver(self, conn: Any, **kwargs: Any) -> Any: - status, headers, data = await conn.perform_request("GET", "/", **kwargs) + status, _, data = await conn.perform_request("GET", "/", **kwargs) data = json.loads(data) return (status, data) diff --git a/test_opensearchpy/test_async/test_server/test_helpers/test_actions.py b/test_opensearchpy/test_async/test_server/test_helpers/test_actions.py index 29fbb5a3..2ea6afe9 100644 --- a/test_opensearchpy/test_async/test_server/test_helpers/test_actions.py +++ b/test_opensearchpy/test_async/test_server/test_helpers/test_actions.py @@ -72,7 +72,7 @@ async def bulk(self, *args: Any, **kwargs: Any) -> Any: class TestStreamingBulk(object): async def test_actions_remain_unchanged(self, async_client: Any) -> None: actions1 = [{"_id": 1}, {"_id": 2}] - async for ok, item in actions.async_streaming_bulk( + async for ok, _ in actions.async_streaming_bulk( async_client, actions1, index="test-index" ): assert ok @@ -80,7 +80,7 @@ async def test_actions_remain_unchanged(self, async_client: Any) -> None: async def test_all_documents_get_inserted(self, async_client: Any) -> None: docs = [{"answer": x, "_id": x} for x in range(100)] - async for ok, item in actions.async_streaming_bulk( + async for ok, _ in actions.async_streaming_bulk( async_client, docs, index="test-index", refresh=True ): assert ok @@ -100,7 +100,7 @@ def sync_gen() -> Any: for x in range(100): yield {"answer": x, "_id": x} - async for ok, item in actions.async_streaming_bulk( + async for ok, _ in actions.async_streaming_bulk( async_client, async_gen(), index="test-index", refresh=True ): assert ok @@ -114,7 +114,7 @@ def sync_gen() -> Any: index="test-index", body={"query": {"match_all": {}}} ) - async for ok, item in actions.async_streaming_bulk( + async for ok, _ in actions.async_streaming_bulk( async_client, sync_gen(), index="test-index", refresh=True ): assert ok @@ -137,7 +137,7 @@ async def test_all_errors_from_chunk_are_raised_on_failure( await async_client.cluster.health(wait_for_status="yellow") try: - async for ok, item in actions.async_streaming_bulk( + async for ok, _ in actions.async_streaming_bulk( async_client, [{"a": "b"}, {"a": "c"}], index="i", raise_on_error=True ): assert ok @@ -154,7 +154,7 @@ async def test_different_op_types(self, async_client: Any) -> None: {"_op_type": "delete", "_index": "i", "_id": 45}, {"_op_type": "update", "_index": "i", "_id": 42, "doc": {"answer": 42}}, ] - async for ok, item in actions.async_streaming_bulk(async_client, docs): + async for ok, _ in actions.async_streaming_bulk(async_client, docs): assert ok assert not await async_client.exists(index="i", id=45) diff --git a/test_opensearchpy/test_async/test_server/test_helpers/test_faceted_search.py b/test_opensearchpy/test_async/test_server/test_helpers/test_faceted_search.py index 90899d62..70861f0b 100644 --- a/test_opensearchpy/test_async/test_server/test_helpers/test_faceted_search.py +++ b/test_opensearchpy/test_async/test_server/test_helpers/test_faceted_search.py @@ -170,7 +170,7 @@ async def test_boolean_facet(data_client: Any, repo_search_cls: Any) -> None: assert r.hits.total.value == 1 assert [(True, 1, False)] == r.facets.public - value, count, selected = r.facets.public[0] + value, _, _ = r.facets.public[0] assert value is True diff --git a/test_opensearchpy/test_connection/test_requests_http_connection.py b/test_opensearchpy/test_connection/test_requests_http_connection.py index 360cef2f..325f29a0 100644 --- a/test_opensearchpy/test_connection/test_requests_http_connection.py +++ b/test_opensearchpy/test_connection/test_requests_http_connection.py @@ -73,7 +73,7 @@ def _get_request(self, connection: Any, *args: Any, **kwargs: Any) -> Any: if "body" in kwargs: kwargs["body"] = kwargs["body"].encode("utf-8") - status, headers, data = connection.perform_request(*args, **kwargs) + status, _, data = connection.perform_request(*args, **kwargs) self.assertEqual(200, status) self.assertEqual("{}", data) @@ -278,7 +278,7 @@ def test_failed_request_logs_and_traces(self, logger: Any, tracer: Any) -> None: @patch("opensearchpy.connection.base.logger") def test_success_logs_and_traces(self, logger: Any, tracer: Any) -> None: con = self._get_mock_connection(response_body=b"""{"answer": "that's it!"}""") - status, headers, data = con.perform_request( + _, _, _ = con.perform_request( "GET", "/", {"param": 42}, @@ -430,7 +430,7 @@ def test_url_prefix(self, tracer: Any) -> None: def test_surrogatepass_into_bytes(self) -> None: buf = b"\xe4\xbd\xa0\xe5\xa5\xbd\xed\xa9\xaa" con = self._get_mock_connection(response_body=buf) - status, headers, data = con.perform_request("GET", "/") + _, _, data = con.perform_request("GET", "/") self.assertEqual(u"你好\uda6a", data) # fmt: skip def test_recursion_error_reraised(self) -> None: @@ -543,17 +543,15 @@ def test_redirect_failure_when_allow_redirect_false(self) -> None: def test_redirect_success_when_allow_redirect_true(self) -> None: conn = RequestsHttpConnection("localhost", port=8080, use_ssl=False, timeout=60) user_agent = conn._get_default_user_agent() - status, headers, data = conn.perform_request("GET", "/redirect") + status, _, data = conn.perform_request("GET", "/redirect") self.assertEqual(status, 200) data = json.loads(data) - self.assertEqual( - data["headers"], - { - "Host": "localhost:8090", - "Accept-Encoding": "identity", - "User-Agent": user_agent, - }, - ) + expected_headers = { + "Host": "localhost:8090", + "Accept-Encoding": "identity", + "User-Agent": user_agent, + } + self.assertEqual(data["headers"], expected_headers) class TestSignerWithFrozenCredentials(TestRequestsHttpConnection): diff --git a/test_opensearchpy/test_connection/test_urllib3_http_connection.py b/test_opensearchpy/test_connection/test_urllib3_http_connection.py index 9da5d40b..ef9cbc73 100644 --- a/test_opensearchpy/test_connection/test_urllib3_http_connection.py +++ b/test_opensearchpy/test_connection/test_urllib3_http_connection.py @@ -384,7 +384,7 @@ def test_failure_body_not_logged(self, logger: Any) -> None: def test_surrogatepass_into_bytes(self) -> None: buf = b"\xe4\xbd\xa0\xe5\xa5\xbd\xed\xa9\xaa" con = self._get_mock_connection(response_body=buf) - status, headers, data = con.perform_request("GET", "/") + _, _, data = con.perform_request("GET", "/") self.assertEqual(u"你好\uda6a", data) # fmt: skip def test_recursion_error_reraised(self) -> None: diff --git a/test_opensearchpy/test_helpers/test_actions.py b/test_opensearchpy/test_helpers/test_actions.py index 68ce1027..c0d07138 100644 --- a/test_opensearchpy/test_helpers/test_actions.py +++ b/test_opensearchpy/test_helpers/test_actions.py @@ -265,7 +265,7 @@ def test_chunks_are_chopped_by_byte_size_properly(self) -> None: ) ) self.assertEqual(25, len(chunks)) - for chunk_data, chunk_actions in chunks: + for _, chunk_actions in chunks: chunk = u"".join(chunk_actions) # fmt: skip chunk = chunk if isinstance(chunk, str) else chunk.encode("utf-8") self.assertLessEqual(len(chunk), max_byte_size) diff --git a/test_opensearchpy/test_helpers/test_faceted_search.py b/test_opensearchpy/test_helpers/test_faceted_search.py index d1874541..39227bf0 100644 --- a/test_opensearchpy/test_helpers/test_faceted_search.py +++ b/test_opensearchpy/test_helpers/test_faceted_search.py @@ -94,6 +94,7 @@ def test_query_is_created_properly_with_sort_tuple() -> None: "highlight": {"fields": {"body": {}, "title": {}}}, "sort": ["category", {"title": {"order": "desc"}}], } == s.to_dict() + assert bs.facets["category"].get_value_filter(None) is None def test_filter_is_applied_to_search_but_not_relevant_facet() -> None: @@ -117,9 +118,10 @@ def test_filter_is_applied_to_search_but_not_relevant_facet() -> None: }, "highlight": {"fields": {"body": {}, "title": {}}}, } == s.to_dict() + assert bs.facets["category"].get_value_filter(None) is None -def test_filters_are_applied_to_search_ant_relevant_facets() -> None: +def test_filters_are_applied_to_search_and_relevant_facets() -> None: bs = BlogSearch( "python search", filters={"category": "opensearch", "tags": ["python", "django"]}, diff --git a/test_opensearchpy/test_server/test_helpers/test_actions.py b/test_opensearchpy/test_server/test_helpers/test_actions.py index 17b60d6d..4d4bd893 100644 --- a/test_opensearchpy/test_server/test_helpers/test_actions.py +++ b/test_opensearchpy/test_server/test_helpers/test_actions.py @@ -59,15 +59,13 @@ def bulk(self, *args: Any, **kwargs: Any) -> Any: class TestStreamingBulk(OpenSearchTestCase): def test_actions_remain_unchanged(self) -> None: actions = [{"_id": 1}, {"_id": 2}] - for ok, item in helpers.streaming_bulk( - self.client, actions, index="test-index" - ): + for ok, _ in helpers.streaming_bulk(self.client, actions, index="test-index"): self.assertTrue(ok) self.assertEqual([{"_id": 1}, {"_id": 2}], actions) def test_all_documents_get_inserted(self) -> None: docs = [{"answer": x, "_id": x} for x in range(100)] - for ok, item in helpers.streaming_bulk( + for ok, _ in helpers.streaming_bulk( self.client, docs, index="test-index", refresh=True ): self.assertTrue(ok) @@ -88,7 +86,7 @@ def test_all_errors_from_chunk_are_raised_on_failure(self) -> None: self.client.cluster.health(wait_for_status="yellow") try: - for ok, item in helpers.streaming_bulk( + for ok, _ in helpers.streaming_bulk( self.client, [{"a": "b"}, {"a": "c"}], index="i", raise_on_error=True ): self.assertTrue(ok) @@ -112,7 +110,7 @@ def test_different_op_types(self) -> Any: "doc": {"answer": 42}, }, ] - for ok, item in helpers.streaming_bulk(self.client, docs): + for ok, _ in helpers.streaming_bulk(self.client, docs): self.assertTrue(ok) self.assertFalse(self.client.exists(index="i", id=45)) diff --git a/test_opensearchpy/test_server/test_helpers/test_faceted_search.py b/test_opensearchpy/test_server/test_helpers/test_faceted_search.py index 54e49c9d..a9eebc94 100644 --- a/test_opensearchpy/test_server/test_helpers/test_faceted_search.py +++ b/test_opensearchpy/test_server/test_helpers/test_faceted_search.py @@ -176,6 +176,8 @@ def test_boolean_facet(data_client: Any, repo_search_cls: Any) -> None: assert [(True, 1, False)] == r.facets.public value, count, selected = r.facets.public[0] assert value is True + assert count == 1 + assert selected is False def test_empty_search_finds_everything( diff --git a/test_opensearchpy/utils.py b/test_opensearchpy/utils.py index d4469600..88be036b 100644 --- a/test_opensearchpy/utils.py +++ b/test_opensearchpy/utils.py @@ -145,7 +145,7 @@ def wipe_node_shutdown_metadata(client: Any) -> None: def wipe_tasks(client: Any) -> None: tasks = client.tasks.list() - for node_name, node in tasks.get("node", {}).items(): + for _, node in tasks.get("node", {}).items(): for task_id in node.get("tasks", ()): client.tasks.cancel(task_id=task_id, wait_for_completion=True) diff --git a/utils/generate_api.py b/utils/generate_api.py index b10a9056..475782e3 100644 --- a/utils/generate_api.py +++ b/utils/generate_api.py @@ -340,7 +340,7 @@ def all_parts(self) -> Dict[str, str]: if k in parts: parts[sub] = parts.pop(k) - dynamic, components = self.url_parts + _, components = self.url_parts def ind(item: Any) -> Any: try: