Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Assignment from no return #658

Merged
merged 7 commits into from
Jan 25, 2024
Merged
Show file tree
Hide file tree
Changes from 6 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 3 additions & 1 deletion .pylintrc
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,9 @@ enable=line-too-long,
missing-function-docstring,
missing-param-doc,
differing-param-doc,
unnecessary-dunder-call
unnecessary-dunder-call,
assignment-from-no-return,
unused-variable
max-line-length=240
good-names-rgxs=^[_a-z][_a-z0-9]?$

3 changes: 3 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,9 @@ Inspired from [Keep a Changelog](https://keepachangelog.com/en/1.0.0/)

## [Unreleased]
### Added
- Added pylint `assignment-from-no-return` (([#657](https://github.com/opensearch-project/opensearch-py/pull/657))
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Please change PR number 658

- Added pylint `unnecessary-dunder-calls` (([#655](https://github.com/opensearch-project/opensearch-py/pull/655))
- Changed to use .pylintrc files in root and any directory with override requirements (([#654](https://github.com/opensearch-project/opensearch-py/pull/654))
- Added pylint `unspecified-encoding` and `missing-function-docstring` and ignored opensearchpy for lints (([#643](https://github.com/opensearch-project/opensearch-py/pull/643)))
- Added pylint `line-too-long` and `invalid-name` ([#590](https://github.com/opensearch-project/opensearch-py/pull/590))
- Added pylint `pointless-statement` ([#611](https://github.com/opensearch-project/opensearch-py/pull/611))
Expand Down
4 changes: 2 additions & 2 deletions benchmarks/bench_info_sync.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@
def get_info(client: Any, request_count: int) -> float:
"""get info from client"""
total_time: float = 0
for request in range(request_count):
for _ in range(request_count):
start = time.time() * 1000
client.info()
total_time += time.time() * 1000 - start
Expand All @@ -49,7 +49,7 @@ def test(thread_count: int = 1, request_count: int = 1, client_count: int = 1) -
root.addHandler(handler)

clients = []
for i in range(client_count):
for _ in range(client_count):
clients.append(
OpenSearch(
hosts=[{"host": host, "port": port}],
Expand Down
4 changes: 2 additions & 2 deletions benchmarks/bench_sync.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@
def index_records(client: Any, index_name: str, item_count: int) -> Any:
"""bulk index item_count records into index_name"""
total_time = 0
for iteration in range(10):
for _ in range(10):
data: Any = []
for item in range(item_count):
data.append(
Expand Down Expand Up @@ -68,7 +68,7 @@ def test(thread_count: int = 1, item_count: int = 1, client_count: int = 1) -> N
root.addHandler(handler)

clients = []
for i in range(client_count):
for _ in range(client_count):
clients.append(
OpenSearch(
hosts=[{"host": host, "port": port}],
Expand Down
4 changes: 3 additions & 1 deletion opensearchpy/.pylintrc
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,8 @@ enable=line-too-long,
invalid-name,
pointless-statement,
unspecified-encoding,
unnecessary-dunder-call
unnecessary-dunder-call,
assignment-from-no-return,
unused-variable
max-line-length=240
good-names-rgxs=^[_a-z][_a-z0-9]?$
6 changes: 1 addition & 5 deletions opensearchpy/helpers/faceted_search.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,6 @@
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.

from datetime import datetime, timedelta
from typing import Any, Optional

Expand Down Expand Up @@ -86,10 +85,7 @@ def add_filter(self, filter_values: Any) -> Any:
return f

def get_value_filter(self, filter_value: Any) -> Any:
"""
Construct a filter for an individual value
"""
pass
return None

def is_filtered(self, key: Any, filter_values: Any) -> bool:
"""
Expand Down
4 changes: 2 additions & 2 deletions samples/knn/knn_async_basics.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@ async def main() -> None:
vectors = []
for i in range(10):
vec = []
for j in range(dimensions):
for _ in range(dimensions):
vec.append(round(random.uniform(0, 1), 2))

vectors.append(
Expand All @@ -74,7 +74,7 @@ async def main() -> None:

# search
vec = []
for j in range(dimensions):
for _ in range(dimensions):
vec.append(round(random.uniform(0, 1), 2))
print(f"Searching for {vec} ...")

Expand Down
9 changes: 3 additions & 6 deletions samples/knn/knn_basics.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,7 @@ def main() -> None:
vectors = []
for i in range(10):
vec = []
for j in range(dimensions):
for _ in range(dimensions):
vec.append(round(random.uniform(0, 1), 2))

vectors.append(
Expand All @@ -72,15 +72,12 @@ def main() -> None:
client.indices.refresh(index=index_name)

# search
vec = []
for j in range(dimensions):
vec.append(round(random.uniform(0, 1), 2))
vec = [round(random.uniform(0, 1), 2) for _ in range(dimensions)]
print(f"Searching for {vec} ...")

search_query = {"query": {"knn": {"values": {"vector": vec, "k": 3}}}}
results = client.search(index=index_name, body=search_query)
for hit in results["hits"]["hits"]:
print(hit)
(print(hit) for hit in results["hits"]["hits"])

# delete index
client.indices.delete(index=index_name)
Expand Down
4 changes: 2 additions & 2 deletions samples/knn/knn_boolean_filter.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@ def main() -> None:
genres = ["fiction", "drama", "romance"]
for i in range(3000):
vec = []
for j in range(dimensions):
for _ in range(dimensions):
vec.append(round(random.uniform(0, 1), 2))

vectors.append(
Expand All @@ -76,7 +76,7 @@ def main() -> None:
# search
genre = random.choice(genres)
vec = []
for j in range(dimensions):
for _ in range(dimensions):
vec.append(round(random.uniform(0, 1), 2))
print(f"Searching for {vec} with the '{genre}' genre ...")

Expand Down
4 changes: 3 additions & 1 deletion test_opensearchpy/.pylintrc
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,8 @@ enable=line-too-long,
unspecified-encoding,
missing-param-doc,
differing-param-doc,
unnecessary-dunder-call
unnecessary-dunder-call,
assignment-from-no-return,
unused-variable
max-line-length=240
good-names-rgxs=^[_a-z][_a-z0-9]?$
17 changes: 14 additions & 3 deletions test_opensearchpy/run_tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -94,9 +94,13 @@ def fetch_opensearch_repo() -> None:
# if the run is interrupted from a previous run, it doesn't clean up, and the git add origin command
# errors out; this allows the test to continue
remote_origin_already_exists = 3
print(e)
if e.returncode != remote_origin_already_exists:
sys.exit(1)
if e.returncode == remote_origin_already_exists:
print(
"Consider setting TEST_OPENSEARCH_NOFETCH=true if you want to reuse the existing local OpenSearch repo"
)
else:
print(e)
sys.exit(1)
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

should this sys.exit(1) be moved into else?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

My intent here was to make it clear why the exit was happening. I didn't want to change the previous behavior especially after I found the TEST_OPENSEARCH_NOFETCH variable.

Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@macohen, I apologize if I am causing any confusion, but I believe there's a change in the behavior for e.returncode == remote_origin_already_exists.

It seems that there shouldn't be a sys.exit(1) when e.returncode == remote_origin_already_exists. Please correct me if I'm mistaken.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Sorry for the poor explanation on my end. Prior to my change, when the git repository already had a remote origin added, the whole process exited. I was considering just ignoring that state, but then found the TEST_OPENSEARCH_NOFETCH variable which can be set when you want that state to be ignored. I figured it would be better to maintain the behavior of the script in that case and also tell the user that TEST_OPENSEARCH_NOFETCH could help if exiting was not desired. I hope that helps...


# fetch the sha commit, version from info()
print("Fetching opensearch repo...")
Expand Down Expand Up @@ -128,6 +132,7 @@ def run_all(argv: Any = None) -> None:
codecov_xml = join(
abspath(dirname(dirname(__file__))), "junit", "opensearch-py-codecov.xml"
)

argv = [
"pytest",
"--cov=opensearchpy",
Expand All @@ -137,6 +142,12 @@ def run_all(argv: Any = None) -> None:
"-vv",
"--cov-report=xml:%s" % codecov_xml,
]
if (
"OPENSEARCHPY_GEN_HTML_COV" in environ
and environ.get("OPENSEARCHPY_GEN_HTML_COV") == "true"
):
codecov_html = join(abspath(dirname(dirname(__file__))), "junit", "html")
argv.append("--cov-report=html:%s" % codecov_html)

secured = False
if environ.get("OPENSEARCH_URL", "").startswith("https://"):
Expand Down
4 changes: 2 additions & 2 deletions test_opensearchpy/test_async/test_connection.py
Original file line number Diff line number Diff line change
Expand Up @@ -341,7 +341,7 @@ async def test_failure_body_not_logged(self, logger: Any) -> None:
async def test_surrogatepass_into_bytes(self) -> None:
buf = b"\xe4\xbd\xa0\xe5\xa5\xbd\xed\xa9\xaa"
con = await self._get_mock_connection(response_body=buf)
status, headers, data = await con.perform_request("GET", "/")
_, _, data = await con.perform_request("GET", "/")
assert u"你好\uda6a" == data # fmt: skip

@pytest.mark.parametrize("exception_cls", reraise_exceptions) # type: ignore
Expand Down Expand Up @@ -394,7 +394,7 @@ def teardown_class(cls) -> None:
cls.server.stop()

async def httpserver(self, conn: Any, **kwargs: Any) -> Any:
status, headers, data = await conn.perform_request("GET", "/", **kwargs)
status, _, data = await conn.perform_request("GET", "/", **kwargs)
data = json.loads(data)
return (status, data)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -72,15 +72,15 @@ async def bulk(self, *args: Any, **kwargs: Any) -> Any:
class TestStreamingBulk(object):
async def test_actions_remain_unchanged(self, async_client: Any) -> None:
actions1 = [{"_id": 1}, {"_id": 2}]
async for ok, item in actions.async_streaming_bulk(
async for ok, _ in actions.async_streaming_bulk(
async_client, actions1, index="test-index"
):
assert ok
assert [{"_id": 1}, {"_id": 2}] == actions1

async def test_all_documents_get_inserted(self, async_client: Any) -> None:
docs = [{"answer": x, "_id": x} for x in range(100)]
async for ok, item in actions.async_streaming_bulk(
async for ok, _ in actions.async_streaming_bulk(
async_client, docs, index="test-index", refresh=True
):
assert ok
Expand All @@ -100,7 +100,7 @@ def sync_gen() -> Any:
for x in range(100):
yield {"answer": x, "_id": x}

async for ok, item in actions.async_streaming_bulk(
async for ok, _ in actions.async_streaming_bulk(
async_client, async_gen(), index="test-index", refresh=True
):
assert ok
Expand All @@ -114,7 +114,7 @@ def sync_gen() -> Any:
index="test-index", body={"query": {"match_all": {}}}
)

async for ok, item in actions.async_streaming_bulk(
async for ok, _ in actions.async_streaming_bulk(
async_client, sync_gen(), index="test-index", refresh=True
):
assert ok
Expand All @@ -137,7 +137,7 @@ async def test_all_errors_from_chunk_are_raised_on_failure(
await async_client.cluster.health(wait_for_status="yellow")

try:
async for ok, item in actions.async_streaming_bulk(
async for ok, _ in actions.async_streaming_bulk(
async_client, [{"a": "b"}, {"a": "c"}], index="i", raise_on_error=True
):
assert ok
Expand All @@ -154,7 +154,7 @@ async def test_different_op_types(self, async_client: Any) -> None:
{"_op_type": "delete", "_index": "i", "_id": 45},
{"_op_type": "update", "_index": "i", "_id": 42, "doc": {"answer": 42}},
]
async for ok, item in actions.async_streaming_bulk(async_client, docs):
async for ok, _ in actions.async_streaming_bulk(async_client, docs):
assert ok

assert not await async_client.exists(index="i", id=45)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -170,7 +170,7 @@ async def test_boolean_facet(data_client: Any, repo_search_cls: Any) -> None:

assert r.hits.total.value == 1
assert [(True, 1, False)] == r.facets.public
value, count, selected = r.facets.public[0]
value, _, _ = r.facets.public[0]
assert value is True


Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -73,7 +73,7 @@ def _get_request(self, connection: Any, *args: Any, **kwargs: Any) -> Any:
if "body" in kwargs:
kwargs["body"] = kwargs["body"].encode("utf-8")

status, headers, data = connection.perform_request(*args, **kwargs)
status, _, data = connection.perform_request(*args, **kwargs)
self.assertEqual(200, status)
self.assertEqual("{}", data)

Expand Down Expand Up @@ -278,7 +278,7 @@ def test_failed_request_logs_and_traces(self, logger: Any, tracer: Any) -> None:
@patch("opensearchpy.connection.base.logger")
def test_success_logs_and_traces(self, logger: Any, tracer: Any) -> None:
con = self._get_mock_connection(response_body=b"""{"answer": "that's it!"}""")
status, headers, data = con.perform_request(
_, _, _ = con.perform_request(
"GET",
"/",
{"param": 42},
Expand Down Expand Up @@ -430,7 +430,7 @@ def test_url_prefix(self, tracer: Any) -> None:
def test_surrogatepass_into_bytes(self) -> None:
buf = b"\xe4\xbd\xa0\xe5\xa5\xbd\xed\xa9\xaa"
con = self._get_mock_connection(response_body=buf)
status, headers, data = con.perform_request("GET", "/")
_, _, data = con.perform_request("GET", "/")
self.assertEqual(u"你好\uda6a", data) # fmt: skip

def test_recursion_error_reraised(self) -> None:
Expand Down Expand Up @@ -546,14 +546,12 @@ def test_redirect_success_when_allow_redirect_true(self) -> None:
status, headers, data = conn.perform_request("GET", "/redirect")
self.assertEqual(status, 200)
data = json.loads(data)
self.assertEqual(
data["headers"],
{
"Host": "localhost:8090",
"Accept-Encoding": "identity",
"User-Agent": user_agent,
},
)
expected_headers = {
"Host": "localhost:8090",
"Accept-Encoding": "identity",
"User-Agent": user_agent,
}
self.assertEqual(data["headers"], expected_headers)


class TestSignerWithFrozenCredentials(TestRequestsHttpConnection):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -384,7 +384,7 @@ def test_failure_body_not_logged(self, logger: Any) -> None:
def test_surrogatepass_into_bytes(self) -> None:
buf = b"\xe4\xbd\xa0\xe5\xa5\xbd\xed\xa9\xaa"
con = self._get_mock_connection(response_body=buf)
status, headers, data = con.perform_request("GET", "/")
_, _, data = con.perform_request("GET", "/")
self.assertEqual(u"你好\uda6a", data) # fmt: skip

def test_recursion_error_reraised(self) -> None:
Expand Down
2 changes: 1 addition & 1 deletion test_opensearchpy/test_helpers/test_actions.py
Original file line number Diff line number Diff line change
Expand Up @@ -265,7 +265,7 @@ def test_chunks_are_chopped_by_byte_size_properly(self) -> None:
)
)
self.assertEqual(25, len(chunks))
for chunk_data, chunk_actions in chunks:
for _, chunk_actions in chunks:
chunk = u"".join(chunk_actions) # fmt: skip
chunk = chunk if isinstance(chunk, str) else chunk.encode("utf-8")
self.assertLessEqual(len(chunk), max_byte_size)
Expand Down
4 changes: 3 additions & 1 deletion test_opensearchpy/test_helpers/test_faceted_search.py
Original file line number Diff line number Diff line change
Expand Up @@ -94,6 +94,7 @@ def test_query_is_created_properly_with_sort_tuple() -> None:
"highlight": {"fields": {"body": {}, "title": {}}},
"sort": ["category", {"title": {"order": "desc"}}],
} == s.to_dict()
assert bs.facets["category"].get_value_filter(None) is None


def test_filter_is_applied_to_search_but_not_relevant_facet() -> None:
Expand All @@ -117,9 +118,10 @@ def test_filter_is_applied_to_search_but_not_relevant_facet() -> None:
},
"highlight": {"fields": {"body": {}, "title": {}}},
} == s.to_dict()
assert bs.facets["category"].get_value_filter(None) is None


def test_filters_are_applied_to_search_ant_relevant_facets() -> None:
def test_filters_are_applied_to_search_and_relevant_facets() -> None:
bs = BlogSearch(
"python search",
filters={"category": "opensearch", "tags": ["python", "django"]},
Expand Down
Loading
Loading