Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Adding support for redisearch #1640

Merged
merged 28 commits into from
Oct 25, 2021
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
28 commits
Select commit Hold shift + click to select a range
a78dbeb
tox integrations with invoke and docker
chayim Oct 20, 2021
f4453e7
github action to wrap invoke
chayim Oct 20, 2021
457dc04
syntax error on action
chayim Oct 20, 2021
c90bd14
s/action/actions
chayim Oct 20, 2021
3d1a007
Adding a test for packaging
chayim Oct 20, 2021
0f3e2c0
Reducing dependencies for package test
chayim Oct 20, 2021
008d532
Merge remote-tracking branch 'fork/ck-newtox'
chayim Oct 20, 2021
ee164ec
json - tests running in the new tox environment
chayim Oct 20, 2021
b39ba5e
removed search wrapper
chayim Oct 20, 2021
d0e7c21
merged master
chayim Oct 21, 2021
1c73010
Merge remote-tracking branch 'origin/master' into ck-jsonsearch
chayim Oct 21, 2021
da49c7b
completing json integration, and unit tests.
chayim Oct 21, 2021
89d1778
tox port
chayim Oct 21, 2021
ad0f5a5
Merge remote-tracking branch 'origin/master' into ck-jsonsearch
chayim Oct 21, 2021
179a800
merge back master - trying to figure out why in github this fails
chayim Oct 21, 2021
dbb8678
redismod port change
chayim Oct 21, 2021
03187d8
unit test fix from bad merge
chayim Oct 24, 2021
18efe66
Starting point on search integration
chayim Oct 24, 2021
d653735
passing search tests
chayim Oct 24, 2021
942857a
search integration and unit tests
chayim Oct 25, 2021
c588717
Merge remote-tracking branch 'origin/master' into ck-jsonsearch
chayim Oct 25, 2021
d20fa72
json merged from master, with the docker fix
chayim Oct 25, 2021
a18032d
Merge remote-tracking branch 'origin/ck-jsonsearch' into ck-searchonly
chayim Oct 25, 2021
74ec453
fixing redismod client
chayim Oct 25, 2021
bb97458
adding longer search for github actions
chayim Oct 25, 2021
45aecc3
flake8
chayim Oct 25, 2021
6b986f7
working around github action speed
chayim Oct 25, 2021
b772080
master merge
chayim Oct 25, 2021
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
9 changes: 9 additions & 0 deletions redis/commands/redismodules.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,3 +15,12 @@ def json(self, encoder=JSONEncoder(), decoder=JSONDecoder()):
from .json import JSON
jj = JSON(client=self, encoder=encoder, decoder=decoder)
return jj

def ft(self, index_name="idx"):
"""Access the search namespace, providing support for redis search."""
if 'search' not in self.loaded_modules:
raise ModuleError("search is not a loaded in the redis instance.")

from .search import Search
s = Search(client=self, index_name=index_name)
return s
96 changes: 96 additions & 0 deletions redis/commands/search/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,96 @@
from .commands import SearchCommands


class Search(SearchCommands):
"""
Create a client for talking to search.
It abstracts the API of the module and lets you just use the engine.
"""

class BatchIndexer(object):
"""
A batch indexer allows you to automatically batch
document indexing in pipelines, flushing it every N documents.
"""

def __init__(self, client, chunk_size=1000):

self.client = client
self.execute_command = client.execute_command
self.pipeline = client.pipeline(transaction=False, shard_hint=None)
self.total = 0
self.chunk_size = chunk_size
self.current_chunk = 0

def __del__(self):
if self.current_chunk:
self.commit()

def add_document(
self,
doc_id,
nosave=False,
score=1.0,
payload=None,
replace=False,
partial=False,
no_create=False,
**fields
):
"""
Add a document to the batch query
"""
self.client._add_document(
doc_id,
conn=self.pipeline,
nosave=nosave,
score=score,
payload=payload,
replace=replace,
partial=partial,
no_create=no_create,
**fields
)
self.current_chunk += 1
self.total += 1
if self.current_chunk >= self.chunk_size:
self.commit()

def add_document_hash(
self,
doc_id,
score=1.0,
replace=False,
):
"""
Add a hash to the batch query
"""
self.client._add_document_hash(
doc_id,
conn=self.pipeline,
score=score,
replace=replace,
)
self.current_chunk += 1
self.total += 1
if self.current_chunk >= self.chunk_size:
self.commit()

def commit(self):
"""
Manually commit and flush the batch indexing query
"""
self.pipeline.execute()
self.current_chunk = 0

def __init__(self, client, index_name="idx"):
"""
Create a new Client for the given index_name.
The default name is `idx`

If conn is not None, we employ an already existing redis connection
"""
self.client = client
self.index_name = index_name
self.execute_command = client.execute_command
self.pipeline = client.pipeline
10 changes: 10 additions & 0 deletions redis/commands/search/_util.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
import six


def to_string(s):
if isinstance(s, six.string_types):
return s
elif isinstance(s, six.binary_type):
return s.decode("utf-8", "ignore")
else:
return s # Not a string we care about
Loading