Skip to content

Commit

Permalink
test: fix empty result
Browse files Browse the repository at this point in the history
Signed-off-by: zhuwenxing <[email protected]>
  • Loading branch information
zhuwenxing committed Oct 28, 2024
1 parent 47f8087 commit fe13e8d
Showing 1 changed file with 8 additions and 7 deletions.
15 changes: 8 additions & 7 deletions tests/python_client/testcases/test_full_text_search.py
Original file line number Diff line number Diff line change
Expand Up @@ -2238,9 +2238,10 @@ def test_full_text_search_default(
collection_w.create_index("text", {"index_type": "INVERTED"})
collection_w.load()
limit = 100
search_data = [fake.text().lower() + " " + random.choice(tokens) for _ in range(nq)]
token = random.choice(tokens)
search_data = [fake.text().lower() + " " + token for _ in range(nq)]
if expr == "text_match":
filter = f"TextMatch(text, '{tokens[0]}')"
filter = f"TextMatch(text, '{token}')"
res, _ = collection_w.query(
expr=filter,
)
Expand Down Expand Up @@ -2297,7 +2298,7 @@ def test_full_text_search_default(
result_text = r.text
# verify search result satisfies the filter
if expr == "text_match":
assert tokens[0] in result_text
assert token in result_text
if expr == "id_range":
assert _id < data_size // 2
# verify search result has overlap with search text
Expand Down Expand Up @@ -2433,9 +2434,10 @@ def test_full_text_search_with_jieba_tokenizer(
collection_w.create_index("text", {"index_type": "INVERTED"})
collection_w.load()
limit = 100
search_data = [fake.text().lower() + " " + random.choice(tokens) for _ in range(nq)]
token = random.choice(tokens)
search_data = [fake.text().lower() + " " + token for _ in range(nq)]
if expr == "text_match":
filter = f"TextMatch(text, '{tokens[0]}')"
filter = f"TextMatch(text, '{token}')"
res, _ = collection_w.query(
expr=filter,
)
Expand Down Expand Up @@ -2492,7 +2494,7 @@ def test_full_text_search_with_jieba_tokenizer(
result_text = r.text
# verify search result satisfies the filter
if expr == "text_match":
assert tokens[0] in result_text
assert token in result_text
if expr == "id_range":
assert _id < data_size // 2
# verify search result has overlap with search text
Expand All @@ -2501,7 +2503,6 @@ def test_full_text_search_with_jieba_tokenizer(
assert len(
overlap) > 0, f"query text: {search_text}, \ntext: {result_text} \n overlap: {overlap} \n word freq a: {word_freq_a} \n word freq b: {word_freq_b}\n result: {r}"


@pytest.mark.tags(CaseLabel.L1)
@pytest.mark.parametrize("nq", [1])
@pytest.mark.parametrize("empty_percent", [0])
Expand Down

0 comments on commit fe13e8d

Please sign in to comment.