diff --git a/chromadb/proto/chroma_pb2.py b/chromadb/proto/chroma_pb2.py index 48b64144192..d54e7c6e22d 100644 --- a/chromadb/proto/chroma_pb2.py +++ b/chromadb/proto/chroma_pb2.py @@ -13,7 +13,7 @@ -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1b\x63hromadb/proto/chroma.proto\x12\x06\x63hroma\"&\n\x06Status\x12\x0e\n\x06reason\x18\x01 \x01(\t\x12\x0c\n\x04\x63ode\x18\x02 \x01(\x05\"U\n\x06Vector\x12\x11\n\tdimension\x18\x01 \x01(\x05\x12\x0e\n\x06vector\x18\x02 \x01(\x0c\x12(\n\x08\x65ncoding\x18\x03 \x01(\x0e\x32\x16.chroma.ScalarEncoding\"\xca\x01\n\x07Segment\x12\n\n\x02id\x18\x01 \x01(\t\x12\x0c\n\x04type\x18\x02 \x01(\t\x12#\n\x05scope\x18\x03 \x01(\x0e\x32\x14.chroma.SegmentScope\x12\x12\n\x05topic\x18\x04 \x01(\tH\x00\x88\x01\x01\x12\x17\n\ncollection\x18\x05 \x01(\tH\x01\x88\x01\x01\x12-\n\x08metadata\x18\x06 \x01(\x0b\x32\x16.chroma.UpdateMetadataH\x02\x88\x01\x01\x42\x08\n\x06_topicB\r\n\x0b_collectionB\x0b\n\t_metadata\"\xb9\x01\n\nCollection\x12\n\n\x02id\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\r\n\x05topic\x18\x03 \x01(\t\x12-\n\x08metadata\x18\x04 \x01(\x0b\x32\x16.chroma.UpdateMetadataH\x00\x88\x01\x01\x12\x16\n\tdimension\x18\x05 \x01(\x05H\x01\x88\x01\x01\x12\x0e\n\x06tenant\x18\x06 \x01(\t\x12\x10\n\x08\x64\x61tabase\x18\x07 \x01(\tB\x0b\n\t_metadataB\x0c\n\n_dimension\"4\n\x08\x44\x61tabase\x12\n\n\x02id\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\x0e\n\x06tenant\x18\x03 \x01(\t\"\x16\n\x06Tenant\x12\x0c\n\x04name\x18\x01 \x01(\t\"b\n\x13UpdateMetadataValue\x12\x16\n\x0cstring_value\x18\x01 \x01(\tH\x00\x12\x13\n\tint_value\x18\x02 \x01(\x03H\x00\x12\x15\n\x0b\x66loat_value\x18\x03 \x01(\x01H\x00\x42\x07\n\x05value\"\x96\x01\n\x0eUpdateMetadata\x12\x36\n\x08metadata\x18\x01 \x03(\x0b\x32$.chroma.UpdateMetadata.MetadataEntry\x1aL\n\rMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12*\n\x05value\x18\x02 \x01(\x0b\x32\x1b.chroma.UpdateMetadataValue:\x02\x38\x01\"\xcc\x01\n\x15SubmitEmbeddingRecord\x12\n\n\x02id\x18\x01 \x01(\t\x12#\n\x06vector\x18\x02 \x01(\x0b\x32\x0e.chroma.VectorH\x00\x88\x01\x01\x12-\n\x08metadata\x18\x03 \x01(\x0b\x32\x16.chroma.UpdateMetadataH\x01\x88\x01\x01\x12$\n\toperation\x18\x04 \x01(\x0e\x32\x11.chroma.Operation\x12\x15\n\rcollection_id\x18\x05 \x01(\tB\t\n\x07_vectorB\x0b\n\t_metadata\"S\n\x15VectorEmbeddingRecord\x12\n\n\x02id\x18\x01 \x01(\t\x12\x0e\n\x06seq_id\x18\x02 \x01(\x0c\x12\x1e\n\x06vector\x18\x03 \x01(\x0b\x32\x0e.chroma.Vector\"q\n\x11VectorQueryResult\x12\n\n\x02id\x18\x01 \x01(\t\x12\x0e\n\x06seq_id\x18\x02 \x01(\x0c\x12\x10\n\x08\x64istance\x18\x03 \x01(\x02\x12#\n\x06vector\x18\x04 \x01(\x0b\x32\x0e.chroma.VectorH\x00\x88\x01\x01\x42\t\n\x07_vector\"@\n\x12VectorQueryResults\x12*\n\x07results\x18\x01 \x03(\x0b\x32\x19.chroma.VectorQueryResult\"4\n\x11GetVectorsRequest\x12\x0b\n\x03ids\x18\x01 \x03(\t\x12\x12\n\nsegment_id\x18\x02 \x01(\t\"D\n\x12GetVectorsResponse\x12.\n\x07records\x18\x01 \x03(\x0b\x32\x1d.chroma.VectorEmbeddingRecord\"\x86\x01\n\x13QueryVectorsRequest\x12\x1f\n\x07vectors\x18\x01 \x03(\x0b\x32\x0e.chroma.Vector\x12\t\n\x01k\x18\x02 \x01(\x05\x12\x13\n\x0b\x61llowed_ids\x18\x03 \x03(\t\x12\x1a\n\x12include_embeddings\x18\x04 \x01(\x08\x12\x12\n\nsegment_id\x18\x05 \x01(\t\"C\n\x14QueryVectorsResponse\x12+\n\x07results\x18\x01 \x03(\x0b\x32\x1a.chroma.VectorQueryResults*8\n\tOperation\x12\x07\n\x03\x41\x44\x44\x10\x00\x12\n\n\x06UPDATE\x10\x01\x12\n\n\x06UPSERT\x10\x02\x12\n\n\x06\x44\x45LETE\x10\x03*(\n\x0eScalarEncoding\x12\x0b\n\x07\x46LOAT32\x10\x00\x12\t\n\x05INT32\x10\x01*(\n\x0cSegmentScope\x12\n\n\x06VECTOR\x10\x00\x12\x0c\n\x08METADATA\x10\x01\x32\xa2\x01\n\x0cVectorReader\x12\x45\n\nGetVectors\x12\x19.chroma.GetVectorsRequest\x1a\x1a.chroma.GetVectorsResponse\"\x00\x12K\n\x0cQueryVectors\x12\x1b.chroma.QueryVectorsRequest\x1a\x1c.chroma.QueryVectorsResponse\"\x00\x42:Z8github.com/chroma-core/chroma/go/pkg/proto/coordinatorpbb\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1b\x63hromadb/proto/chroma.proto\x12\x06\x63hroma\"&\n\x06Status\x12\x0e\n\x06reason\x18\x01 \x01(\t\x12\x0c\n\x04\x63ode\x18\x02 \x01(\x05\"U\n\x06Vector\x12\x11\n\tdimension\x18\x01 \x01(\x05\x12\x0e\n\x06vector\x18\x02 \x01(\x0c\x12(\n\x08\x65ncoding\x18\x03 \x01(\x0e\x32\x16.chroma.ScalarEncoding\"\x1a\n\tFilePaths\x12\r\n\x05paths\x18\x01 \x03(\t\"\xc3\x02\n\x07Segment\x12\n\n\x02id\x18\x01 \x01(\t\x12\x0c\n\x04type\x18\x02 \x01(\t\x12#\n\x05scope\x18\x03 \x01(\x0e\x32\x14.chroma.SegmentScope\x12\x12\n\x05topic\x18\x04 \x01(\tH\x00\x88\x01\x01\x12\x17\n\ncollection\x18\x05 \x01(\tH\x01\x88\x01\x01\x12-\n\x08metadata\x18\x06 \x01(\x0b\x32\x16.chroma.UpdateMetadataH\x02\x88\x01\x01\x12\x32\n\nfile_paths\x18\x07 \x03(\x0b\x32\x1e.chroma.Segment.FilePathsEntry\x1a\x43\n\x0e\x46ilePathsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12 \n\x05value\x18\x02 \x01(\x0b\x32\x11.chroma.FilePaths:\x02\x38\x01\x42\x08\n\x06_topicB\r\n\x0b_collectionB\x0b\n\t_metadata\"\xdf\x01\n\nCollection\x12\n\n\x02id\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\r\n\x05topic\x18\x03 \x01(\t\x12-\n\x08metadata\x18\x04 \x01(\x0b\x32\x16.chroma.UpdateMetadataH\x00\x88\x01\x01\x12\x16\n\tdimension\x18\x05 \x01(\x05H\x01\x88\x01\x01\x12\x0e\n\x06tenant\x18\x06 \x01(\t\x12\x10\n\x08\x64\x61tabase\x18\x07 \x01(\t\x12\x13\n\x0blogPosition\x18\x08 \x01(\x03\x12\x0f\n\x07version\x18\t \x01(\x05\x42\x0b\n\t_metadataB\x0c\n\n_dimension\"4\n\x08\x44\x61tabase\x12\n\n\x02id\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\x0e\n\x06tenant\x18\x03 \x01(\t\"\x16\n\x06Tenant\x12\x0c\n\x04name\x18\x01 \x01(\t\"b\n\x13UpdateMetadataValue\x12\x16\n\x0cstring_value\x18\x01 \x01(\tH\x00\x12\x13\n\tint_value\x18\x02 \x01(\x03H\x00\x12\x15\n\x0b\x66loat_value\x18\x03 \x01(\x01H\x00\x42\x07\n\x05value\"\x96\x01\n\x0eUpdateMetadata\x12\x36\n\x08metadata\x18\x01 \x03(\x0b\x32$.chroma.UpdateMetadata.MetadataEntry\x1aL\n\rMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12*\n\x05value\x18\x02 \x01(\x0b\x32\x1b.chroma.UpdateMetadataValue:\x02\x38\x01\"\xcc\x01\n\x15SubmitEmbeddingRecord\x12\n\n\x02id\x18\x01 \x01(\t\x12#\n\x06vector\x18\x02 \x01(\x0b\x32\x0e.chroma.VectorH\x00\x88\x01\x01\x12-\n\x08metadata\x18\x03 \x01(\x0b\x32\x16.chroma.UpdateMetadataH\x01\x88\x01\x01\x12$\n\toperation\x18\x04 \x01(\x0e\x32\x11.chroma.Operation\x12\x15\n\rcollection_id\x18\x05 \x01(\tB\t\n\x07_vectorB\x0b\n\t_metadata\"S\n\x15VectorEmbeddingRecord\x12\n\n\x02id\x18\x01 \x01(\t\x12\x0e\n\x06seq_id\x18\x02 \x01(\x0c\x12\x1e\n\x06vector\x18\x03 \x01(\x0b\x32\x0e.chroma.Vector\"q\n\x11VectorQueryResult\x12\n\n\x02id\x18\x01 \x01(\t\x12\x0e\n\x06seq_id\x18\x02 \x01(\x0c\x12\x10\n\x08\x64istance\x18\x03 \x01(\x02\x12#\n\x06vector\x18\x04 \x01(\x0b\x32\x0e.chroma.VectorH\x00\x88\x01\x01\x42\t\n\x07_vector\"@\n\x12VectorQueryResults\x12*\n\x07results\x18\x01 \x03(\x0b\x32\x19.chroma.VectorQueryResult\"4\n\x11GetVectorsRequest\x12\x0b\n\x03ids\x18\x01 \x03(\t\x12\x12\n\nsegment_id\x18\x02 \x01(\t\"D\n\x12GetVectorsResponse\x12.\n\x07records\x18\x01 \x03(\x0b\x32\x1d.chroma.VectorEmbeddingRecord\"\x86\x01\n\x13QueryVectorsRequest\x12\x1f\n\x07vectors\x18\x01 \x03(\x0b\x32\x0e.chroma.Vector\x12\t\n\x01k\x18\x02 \x01(\x05\x12\x13\n\x0b\x61llowed_ids\x18\x03 \x03(\t\x12\x1a\n\x12include_embeddings\x18\x04 \x01(\x08\x12\x12\n\nsegment_id\x18\x05 \x01(\t\"C\n\x14QueryVectorsResponse\x12+\n\x07results\x18\x01 \x03(\x0b\x32\x1a.chroma.VectorQueryResults*8\n\tOperation\x12\x07\n\x03\x41\x44\x44\x10\x00\x12\n\n\x06UPDATE\x10\x01\x12\n\n\x06UPSERT\x10\x02\x12\n\n\x06\x44\x45LETE\x10\x03*(\n\x0eScalarEncoding\x12\x0b\n\x07\x46LOAT32\x10\x00\x12\t\n\x05INT32\x10\x01*(\n\x0cSegmentScope\x12\n\n\x06VECTOR\x10\x00\x12\x0c\n\x08METADATA\x10\x01\x32\xa2\x01\n\x0cVectorReader\x12\x45\n\nGetVectors\x12\x19.chroma.GetVectorsRequest\x1a\x1a.chroma.GetVectorsResponse\"\x00\x12K\n\x0cQueryVectors\x12\x1b.chroma.QueryVectorsRequest\x1a\x1c.chroma.QueryVectorsResponse\"\x00\x42:Z8github.com/chroma-core/chroma/go/pkg/proto/coordinatorpbb\x06proto3') _globals = globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) @@ -21,48 +21,54 @@ if _descriptor._USE_C_DESCRIPTORS == False: DESCRIPTOR._options = None DESCRIPTOR._serialized_options = b'Z8github.com/chroma-core/chroma/go/pkg/proto/coordinatorpb' + _SEGMENT_FILEPATHSENTRY._options = None + _SEGMENT_FILEPATHSENTRY._serialized_options = b'8\001' _UPDATEMETADATA_METADATAENTRY._options = None _UPDATEMETADATA_METADATAENTRY._serialized_options = b'8\001' - _globals['_OPERATION']._serialized_start=1693 - _globals['_OPERATION']._serialized_end=1749 - _globals['_SCALARENCODING']._serialized_start=1751 - _globals['_SCALARENCODING']._serialized_end=1791 - _globals['_SEGMENTSCOPE']._serialized_start=1793 - _globals['_SEGMENTSCOPE']._serialized_end=1833 + _globals['_OPERATION']._serialized_start=1880 + _globals['_OPERATION']._serialized_end=1936 + _globals['_SCALARENCODING']._serialized_start=1938 + _globals['_SCALARENCODING']._serialized_end=1978 + _globals['_SEGMENTSCOPE']._serialized_start=1980 + _globals['_SEGMENTSCOPE']._serialized_end=2020 _globals['_STATUS']._serialized_start=39 _globals['_STATUS']._serialized_end=77 _globals['_VECTOR']._serialized_start=79 _globals['_VECTOR']._serialized_end=164 - _globals['_SEGMENT']._serialized_start=167 - _globals['_SEGMENT']._serialized_end=369 - _globals['_COLLECTION']._serialized_start=372 - _globals['_COLLECTION']._serialized_end=557 - _globals['_DATABASE']._serialized_start=559 - _globals['_DATABASE']._serialized_end=611 - _globals['_TENANT']._serialized_start=613 - _globals['_TENANT']._serialized_end=635 - _globals['_UPDATEMETADATAVALUE']._serialized_start=637 - _globals['_UPDATEMETADATAVALUE']._serialized_end=735 - _globals['_UPDATEMETADATA']._serialized_start=738 - _globals['_UPDATEMETADATA']._serialized_end=888 - _globals['_UPDATEMETADATA_METADATAENTRY']._serialized_start=812 - _globals['_UPDATEMETADATA_METADATAENTRY']._serialized_end=888 - _globals['_SUBMITEMBEDDINGRECORD']._serialized_start=891 - _globals['_SUBMITEMBEDDINGRECORD']._serialized_end=1095 - _globals['_VECTOREMBEDDINGRECORD']._serialized_start=1097 - _globals['_VECTOREMBEDDINGRECORD']._serialized_end=1180 - _globals['_VECTORQUERYRESULT']._serialized_start=1182 - _globals['_VECTORQUERYRESULT']._serialized_end=1295 - _globals['_VECTORQUERYRESULTS']._serialized_start=1297 - _globals['_VECTORQUERYRESULTS']._serialized_end=1361 - _globals['_GETVECTORSREQUEST']._serialized_start=1363 - _globals['_GETVECTORSREQUEST']._serialized_end=1415 - _globals['_GETVECTORSRESPONSE']._serialized_start=1417 - _globals['_GETVECTORSRESPONSE']._serialized_end=1485 - _globals['_QUERYVECTORSREQUEST']._serialized_start=1488 - _globals['_QUERYVECTORSREQUEST']._serialized_end=1622 - _globals['_QUERYVECTORSRESPONSE']._serialized_start=1624 - _globals['_QUERYVECTORSRESPONSE']._serialized_end=1691 - _globals['_VECTORREADER']._serialized_start=1836 - _globals['_VECTORREADER']._serialized_end=1998 + _globals['_FILEPATHS']._serialized_start=166 + _globals['_FILEPATHS']._serialized_end=192 + _globals['_SEGMENT']._serialized_start=195 + _globals['_SEGMENT']._serialized_end=518 + _globals['_SEGMENT_FILEPATHSENTRY']._serialized_start=413 + _globals['_SEGMENT_FILEPATHSENTRY']._serialized_end=480 + _globals['_COLLECTION']._serialized_start=521 + _globals['_COLLECTION']._serialized_end=744 + _globals['_DATABASE']._serialized_start=746 + _globals['_DATABASE']._serialized_end=798 + _globals['_TENANT']._serialized_start=800 + _globals['_TENANT']._serialized_end=822 + _globals['_UPDATEMETADATAVALUE']._serialized_start=824 + _globals['_UPDATEMETADATAVALUE']._serialized_end=922 + _globals['_UPDATEMETADATA']._serialized_start=925 + _globals['_UPDATEMETADATA']._serialized_end=1075 + _globals['_UPDATEMETADATA_METADATAENTRY']._serialized_start=999 + _globals['_UPDATEMETADATA_METADATAENTRY']._serialized_end=1075 + _globals['_SUBMITEMBEDDINGRECORD']._serialized_start=1078 + _globals['_SUBMITEMBEDDINGRECORD']._serialized_end=1282 + _globals['_VECTOREMBEDDINGRECORD']._serialized_start=1284 + _globals['_VECTOREMBEDDINGRECORD']._serialized_end=1367 + _globals['_VECTORQUERYRESULT']._serialized_start=1369 + _globals['_VECTORQUERYRESULT']._serialized_end=1482 + _globals['_VECTORQUERYRESULTS']._serialized_start=1484 + _globals['_VECTORQUERYRESULTS']._serialized_end=1548 + _globals['_GETVECTORSREQUEST']._serialized_start=1550 + _globals['_GETVECTORSREQUEST']._serialized_end=1602 + _globals['_GETVECTORSRESPONSE']._serialized_start=1604 + _globals['_GETVECTORSRESPONSE']._serialized_end=1672 + _globals['_QUERYVECTORSREQUEST']._serialized_start=1675 + _globals['_QUERYVECTORSREQUEST']._serialized_end=1809 + _globals['_QUERYVECTORSRESPONSE']._serialized_start=1811 + _globals['_QUERYVECTORSRESPONSE']._serialized_end=1878 + _globals['_VECTORREADER']._serialized_start=2023 + _globals['_VECTORREADER']._serialized_end=2185 # @@protoc_insertion_point(module_scope) diff --git a/chromadb/proto/chroma_pb2.pyi b/chromadb/proto/chroma_pb2.pyi index 9fb730ca6d9..6e8b267a584 100644 --- a/chromadb/proto/chroma_pb2.pyi +++ b/chromadb/proto/chroma_pb2.pyi @@ -49,24 +49,39 @@ class Vector(_message.Message): encoding: ScalarEncoding def __init__(self, dimension: _Optional[int] = ..., vector: _Optional[bytes] = ..., encoding: _Optional[_Union[ScalarEncoding, str]] = ...) -> None: ... +class FilePaths(_message.Message): + __slots__ = ["paths"] + PATHS_FIELD_NUMBER: _ClassVar[int] + paths: _containers.RepeatedScalarFieldContainer[str] + def __init__(self, paths: _Optional[_Iterable[str]] = ...) -> None: ... + class Segment(_message.Message): - __slots__ = ["id", "type", "scope", "topic", "collection", "metadata"] + __slots__ = ["id", "type", "scope", "topic", "collection", "metadata", "file_paths"] + class FilePathsEntry(_message.Message): + __slots__ = ["key", "value"] + KEY_FIELD_NUMBER: _ClassVar[int] + VALUE_FIELD_NUMBER: _ClassVar[int] + key: str + value: FilePaths + def __init__(self, key: _Optional[str] = ..., value: _Optional[_Union[FilePaths, _Mapping]] = ...) -> None: ... ID_FIELD_NUMBER: _ClassVar[int] TYPE_FIELD_NUMBER: _ClassVar[int] SCOPE_FIELD_NUMBER: _ClassVar[int] TOPIC_FIELD_NUMBER: _ClassVar[int] COLLECTION_FIELD_NUMBER: _ClassVar[int] METADATA_FIELD_NUMBER: _ClassVar[int] + FILE_PATHS_FIELD_NUMBER: _ClassVar[int] id: str type: str scope: SegmentScope topic: str collection: str metadata: UpdateMetadata - def __init__(self, id: _Optional[str] = ..., type: _Optional[str] = ..., scope: _Optional[_Union[SegmentScope, str]] = ..., topic: _Optional[str] = ..., collection: _Optional[str] = ..., metadata: _Optional[_Union[UpdateMetadata, _Mapping]] = ...) -> None: ... + file_paths: _containers.MessageMap[str, FilePaths] + def __init__(self, id: _Optional[str] = ..., type: _Optional[str] = ..., scope: _Optional[_Union[SegmentScope, str]] = ..., topic: _Optional[str] = ..., collection: _Optional[str] = ..., metadata: _Optional[_Union[UpdateMetadata, _Mapping]] = ..., file_paths: _Optional[_Mapping[str, FilePaths]] = ...) -> None: ... class Collection(_message.Message): - __slots__ = ["id", "name", "topic", "metadata", "dimension", "tenant", "database"] + __slots__ = ["id", "name", "topic", "metadata", "dimension", "tenant", "database", "logPosition", "version"] ID_FIELD_NUMBER: _ClassVar[int] NAME_FIELD_NUMBER: _ClassVar[int] TOPIC_FIELD_NUMBER: _ClassVar[int] @@ -74,6 +89,8 @@ class Collection(_message.Message): DIMENSION_FIELD_NUMBER: _ClassVar[int] TENANT_FIELD_NUMBER: _ClassVar[int] DATABASE_FIELD_NUMBER: _ClassVar[int] + LOGPOSITION_FIELD_NUMBER: _ClassVar[int] + VERSION_FIELD_NUMBER: _ClassVar[int] id: str name: str topic: str @@ -81,7 +98,9 @@ class Collection(_message.Message): dimension: int tenant: str database: str - def __init__(self, id: _Optional[str] = ..., name: _Optional[str] = ..., topic: _Optional[str] = ..., metadata: _Optional[_Union[UpdateMetadata, _Mapping]] = ..., dimension: _Optional[int] = ..., tenant: _Optional[str] = ..., database: _Optional[str] = ...) -> None: ... + logPosition: int + version: int + def __init__(self, id: _Optional[str] = ..., name: _Optional[str] = ..., topic: _Optional[str] = ..., metadata: _Optional[_Union[UpdateMetadata, _Mapping]] = ..., dimension: _Optional[int] = ..., tenant: _Optional[str] = ..., database: _Optional[str] = ..., logPosition: _Optional[int] = ..., version: _Optional[int] = ...) -> None: ... class Database(_message.Message): __slots__ = ["id", "name", "tenant"] diff --git a/chromadb/proto/coordinator_pb2.py b/chromadb/proto/coordinator_pb2.py index 301c1c2f4f7..7264a86f038 100644 --- a/chromadb/proto/coordinator_pb2.py +++ b/chromadb/proto/coordinator_pb2.py @@ -15,7 +15,7 @@ from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n chromadb/proto/coordinator.proto\x12\x06\x63hroma\x1a\x1b\x63hromadb/proto/chroma.proto\x1a\x1bgoogle/protobuf/empty.proto\"A\n\x15\x43reateDatabaseRequest\x12\n\n\x02id\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\x0e\n\x06tenant\x18\x03 \x01(\t\"8\n\x16\x43reateDatabaseResponse\x12\x1e\n\x06status\x18\x01 \x01(\x0b\x32\x0e.chroma.Status\"2\n\x12GetDatabaseRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0e\n\x06tenant\x18\x02 \x01(\t\"Y\n\x13GetDatabaseResponse\x12\"\n\x08\x64\x61tabase\x18\x01 \x01(\x0b\x32\x10.chroma.Database\x12\x1e\n\x06status\x18\x02 \x01(\x0b\x32\x0e.chroma.Status\"#\n\x13\x43reateTenantRequest\x12\x0c\n\x04name\x18\x02 \x01(\t\"6\n\x14\x43reateTenantResponse\x12\x1e\n\x06status\x18\x01 \x01(\x0b\x32\x0e.chroma.Status\" \n\x10GetTenantRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\"S\n\x11GetTenantResponse\x12\x1e\n\x06tenant\x18\x01 \x01(\x0b\x32\x0e.chroma.Tenant\x12\x1e\n\x06status\x18\x02 \x01(\x0b\x32\x0e.chroma.Status\"8\n\x14\x43reateSegmentRequest\x12 \n\x07segment\x18\x01 \x01(\x0b\x32\x0f.chroma.Segment\"7\n\x15\x43reateSegmentResponse\x12\x1e\n\x06status\x18\x01 \x01(\x0b\x32\x0e.chroma.Status\"\"\n\x14\x44\x65leteSegmentRequest\x12\n\n\x02id\x18\x01 \x01(\t\"7\n\x15\x44\x65leteSegmentResponse\x12\x1e\n\x06status\x18\x01 \x01(\x0b\x32\x0e.chroma.Status\"\xc2\x01\n\x12GetSegmentsRequest\x12\x0f\n\x02id\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x11\n\x04type\x18\x02 \x01(\tH\x01\x88\x01\x01\x12(\n\x05scope\x18\x03 \x01(\x0e\x32\x14.chroma.SegmentScopeH\x02\x88\x01\x01\x12\x12\n\x05topic\x18\x04 \x01(\tH\x03\x88\x01\x01\x12\x17\n\ncollection\x18\x05 \x01(\tH\x04\x88\x01\x01\x42\x05\n\x03_idB\x07\n\x05_typeB\x08\n\x06_scopeB\x08\n\x06_topicB\r\n\x0b_collection\"X\n\x13GetSegmentsResponse\x12!\n\x08segments\x18\x01 \x03(\x0b\x32\x0f.chroma.Segment\x12\x1e\n\x06status\x18\x02 \x01(\x0b\x32\x0e.chroma.Status\"\xfa\x01\n\x14UpdateSegmentRequest\x12\n\n\x02id\x18\x01 \x01(\t\x12\x0f\n\x05topic\x18\x02 \x01(\tH\x00\x12\x15\n\x0breset_topic\x18\x03 \x01(\x08H\x00\x12\x14\n\ncollection\x18\x04 \x01(\tH\x01\x12\x1a\n\x10reset_collection\x18\x05 \x01(\x08H\x01\x12*\n\x08metadata\x18\x06 \x01(\x0b\x32\x16.chroma.UpdateMetadataH\x02\x12\x18\n\x0ereset_metadata\x18\x07 \x01(\x08H\x02\x42\x0e\n\x0ctopic_updateB\x13\n\x11\x63ollection_updateB\x11\n\x0fmetadata_update\"7\n\x15UpdateSegmentResponse\x12\x1e\n\x06status\x18\x01 \x01(\x0b\x32\x0e.chroma.Status\"\xe5\x01\n\x17\x43reateCollectionRequest\x12\n\n\x02id\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\x12-\n\x08metadata\x18\x03 \x01(\x0b\x32\x16.chroma.UpdateMetadataH\x00\x88\x01\x01\x12\x16\n\tdimension\x18\x04 \x01(\x05H\x01\x88\x01\x01\x12\x1a\n\rget_or_create\x18\x05 \x01(\x08H\x02\x88\x01\x01\x12\x0e\n\x06tenant\x18\x06 \x01(\t\x12\x10\n\x08\x64\x61tabase\x18\x07 \x01(\tB\x0b\n\t_metadataB\x0c\n\n_dimensionB\x10\n\x0e_get_or_create\"s\n\x18\x43reateCollectionResponse\x12&\n\ncollection\x18\x01 \x01(\x0b\x32\x12.chroma.Collection\x12\x0f\n\x07\x63reated\x18\x02 \x01(\x08\x12\x1e\n\x06status\x18\x03 \x01(\x0b\x32\x0e.chroma.Status\"G\n\x17\x44\x65leteCollectionRequest\x12\n\n\x02id\x18\x01 \x01(\t\x12\x0e\n\x06tenant\x18\x02 \x01(\t\x12\x10\n\x08\x64\x61tabase\x18\x03 \x01(\t\":\n\x18\x44\x65leteCollectionResponse\x12\x1e\n\x06status\x18\x01 \x01(\x0b\x32\x0e.chroma.Status\"\x8b\x01\n\x15GetCollectionsRequest\x12\x0f\n\x02id\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x11\n\x04name\x18\x02 \x01(\tH\x01\x88\x01\x01\x12\x12\n\x05topic\x18\x03 \x01(\tH\x02\x88\x01\x01\x12\x0e\n\x06tenant\x18\x04 \x01(\t\x12\x10\n\x08\x64\x61tabase\x18\x05 \x01(\tB\x05\n\x03_idB\x07\n\x05_nameB\x08\n\x06_topic\"a\n\x16GetCollectionsResponse\x12\'\n\x0b\x63ollections\x18\x01 \x03(\x0b\x32\x12.chroma.Collection\x12\x1e\n\x06status\x18\x02 \x01(\x0b\x32\x0e.chroma.Status\"\xde\x01\n\x17UpdateCollectionRequest\x12\n\n\x02id\x18\x01 \x01(\t\x12\x12\n\x05topic\x18\x02 \x01(\tH\x01\x88\x01\x01\x12\x11\n\x04name\x18\x03 \x01(\tH\x02\x88\x01\x01\x12\x16\n\tdimension\x18\x04 \x01(\x05H\x03\x88\x01\x01\x12*\n\x08metadata\x18\x05 \x01(\x0b\x32\x16.chroma.UpdateMetadataH\x00\x12\x18\n\x0ereset_metadata\x18\x06 \x01(\x08H\x00\x42\x11\n\x0fmetadata_updateB\x08\n\x06_topicB\x07\n\x05_nameB\x0c\n\n_dimension\":\n\x18UpdateCollectionResponse\x12\x1e\n\x06status\x18\x01 \x01(\x0b\x32\x0e.chroma.Status\"O\n\x0cNotification\x12\n\n\x02id\x18\x01 \x01(\x03\x12\x15\n\rcollection_id\x18\x02 \x01(\t\x12\x0c\n\x04type\x18\x03 \x01(\t\x12\x0e\n\x06status\x18\x04 \x01(\t\"4\n\x12ResetStateResponse\x12\x1e\n\x06status\x18\x01 \x01(\x0b\x32\x0e.chroma.Status\":\n%GetLastCompactionTimeForTenantRequest\x12\x11\n\ttenant_id\x18\x01 \x03(\t\"K\n\x18TenantLastCompactionTime\x12\x11\n\ttenant_id\x18\x01 \x01(\t\x12\x1c\n\x14last_compaction_time\x18\x02 \x01(\x03\"o\n&GetLastCompactionTimeForTenantResponse\x12\x45\n\x1btenant_last_compaction_time\x18\x01 \x03(\x0b\x32 .chroma.TenantLastCompactionTime\"n\n%SetLastCompactionTimeForTenantRequest\x12\x45\n\x1btenant_last_compaction_time\x18\x01 \x01(\x0b\x32 .chroma.TenantLastCompactionTime2\x80\n\n\x05SysDB\x12Q\n\x0e\x43reateDatabase\x12\x1d.chroma.CreateDatabaseRequest\x1a\x1e.chroma.CreateDatabaseResponse\"\x00\x12H\n\x0bGetDatabase\x12\x1a.chroma.GetDatabaseRequest\x1a\x1b.chroma.GetDatabaseResponse\"\x00\x12K\n\x0c\x43reateTenant\x12\x1b.chroma.CreateTenantRequest\x1a\x1c.chroma.CreateTenantResponse\"\x00\x12\x42\n\tGetTenant\x12\x18.chroma.GetTenantRequest\x1a\x19.chroma.GetTenantResponse\"\x00\x12N\n\rCreateSegment\x12\x1c.chroma.CreateSegmentRequest\x1a\x1d.chroma.CreateSegmentResponse\"\x00\x12N\n\rDeleteSegment\x12\x1c.chroma.DeleteSegmentRequest\x1a\x1d.chroma.DeleteSegmentResponse\"\x00\x12H\n\x0bGetSegments\x12\x1a.chroma.GetSegmentsRequest\x1a\x1b.chroma.GetSegmentsResponse\"\x00\x12N\n\rUpdateSegment\x12\x1c.chroma.UpdateSegmentRequest\x1a\x1d.chroma.UpdateSegmentResponse\"\x00\x12W\n\x10\x43reateCollection\x12\x1f.chroma.CreateCollectionRequest\x1a .chroma.CreateCollectionResponse\"\x00\x12W\n\x10\x44\x65leteCollection\x12\x1f.chroma.DeleteCollectionRequest\x1a .chroma.DeleteCollectionResponse\"\x00\x12Q\n\x0eGetCollections\x12\x1d.chroma.GetCollectionsRequest\x1a\x1e.chroma.GetCollectionsResponse\"\x00\x12W\n\x10UpdateCollection\x12\x1f.chroma.UpdateCollectionRequest\x1a .chroma.UpdateCollectionResponse\"\x00\x12\x42\n\nResetState\x12\x16.google.protobuf.Empty\x1a\x1a.chroma.ResetStateResponse\"\x00\x12\x81\x01\n\x1eGetLastCompactionTimeForTenant\x12-.chroma.GetLastCompactionTimeForTenantRequest\x1a..chroma.GetLastCompactionTimeForTenantResponse\"\x00\x12i\n\x1eSetLastCompactionTimeForTenant\x12-.chroma.SetLastCompactionTimeForTenantRequest\x1a\x16.google.protobuf.Empty\"\x00\x42:Z8github.com/chroma-core/chroma/go/pkg/proto/coordinatorpbb\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n chromadb/proto/coordinator.proto\x12\x06\x63hroma\x1a\x1b\x63hromadb/proto/chroma.proto\x1a\x1bgoogle/protobuf/empty.proto\"A\n\x15\x43reateDatabaseRequest\x12\n\n\x02id\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\x0e\n\x06tenant\x18\x03 \x01(\t\"8\n\x16\x43reateDatabaseResponse\x12\x1e\n\x06status\x18\x01 \x01(\x0b\x32\x0e.chroma.Status\"2\n\x12GetDatabaseRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0e\n\x06tenant\x18\x02 \x01(\t\"Y\n\x13GetDatabaseResponse\x12\"\n\x08\x64\x61tabase\x18\x01 \x01(\x0b\x32\x10.chroma.Database\x12\x1e\n\x06status\x18\x02 \x01(\x0b\x32\x0e.chroma.Status\"#\n\x13\x43reateTenantRequest\x12\x0c\n\x04name\x18\x02 \x01(\t\"6\n\x14\x43reateTenantResponse\x12\x1e\n\x06status\x18\x01 \x01(\x0b\x32\x0e.chroma.Status\" \n\x10GetTenantRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\"S\n\x11GetTenantResponse\x12\x1e\n\x06tenant\x18\x01 \x01(\x0b\x32\x0e.chroma.Tenant\x12\x1e\n\x06status\x18\x02 \x01(\x0b\x32\x0e.chroma.Status\"8\n\x14\x43reateSegmentRequest\x12 \n\x07segment\x18\x01 \x01(\x0b\x32\x0f.chroma.Segment\"7\n\x15\x43reateSegmentResponse\x12\x1e\n\x06status\x18\x01 \x01(\x0b\x32\x0e.chroma.Status\"\"\n\x14\x44\x65leteSegmentRequest\x12\n\n\x02id\x18\x01 \x01(\t\"7\n\x15\x44\x65leteSegmentResponse\x12\x1e\n\x06status\x18\x01 \x01(\x0b\x32\x0e.chroma.Status\"\xc2\x01\n\x12GetSegmentsRequest\x12\x0f\n\x02id\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x11\n\x04type\x18\x02 \x01(\tH\x01\x88\x01\x01\x12(\n\x05scope\x18\x03 \x01(\x0e\x32\x14.chroma.SegmentScopeH\x02\x88\x01\x01\x12\x12\n\x05topic\x18\x04 \x01(\tH\x03\x88\x01\x01\x12\x17\n\ncollection\x18\x05 \x01(\tH\x04\x88\x01\x01\x42\x05\n\x03_idB\x07\n\x05_typeB\x08\n\x06_scopeB\x08\n\x06_topicB\r\n\x0b_collection\"X\n\x13GetSegmentsResponse\x12!\n\x08segments\x18\x01 \x03(\x0b\x32\x0f.chroma.Segment\x12\x1e\n\x06status\x18\x02 \x01(\x0b\x32\x0e.chroma.Status\"\xfa\x01\n\x14UpdateSegmentRequest\x12\n\n\x02id\x18\x01 \x01(\t\x12\x0f\n\x05topic\x18\x02 \x01(\tH\x00\x12\x15\n\x0breset_topic\x18\x03 \x01(\x08H\x00\x12\x14\n\ncollection\x18\x04 \x01(\tH\x01\x12\x1a\n\x10reset_collection\x18\x05 \x01(\x08H\x01\x12*\n\x08metadata\x18\x06 \x01(\x0b\x32\x16.chroma.UpdateMetadataH\x02\x12\x18\n\x0ereset_metadata\x18\x07 \x01(\x08H\x02\x42\x0e\n\x0ctopic_updateB\x13\n\x11\x63ollection_updateB\x11\n\x0fmetadata_update\"7\n\x15UpdateSegmentResponse\x12\x1e\n\x06status\x18\x01 \x01(\x0b\x32\x0e.chroma.Status\"\xe5\x01\n\x17\x43reateCollectionRequest\x12\n\n\x02id\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\x12-\n\x08metadata\x18\x03 \x01(\x0b\x32\x16.chroma.UpdateMetadataH\x00\x88\x01\x01\x12\x16\n\tdimension\x18\x04 \x01(\x05H\x01\x88\x01\x01\x12\x1a\n\rget_or_create\x18\x05 \x01(\x08H\x02\x88\x01\x01\x12\x0e\n\x06tenant\x18\x06 \x01(\t\x12\x10\n\x08\x64\x61tabase\x18\x07 \x01(\tB\x0b\n\t_metadataB\x0c\n\n_dimensionB\x10\n\x0e_get_or_create\"s\n\x18\x43reateCollectionResponse\x12&\n\ncollection\x18\x01 \x01(\x0b\x32\x12.chroma.Collection\x12\x0f\n\x07\x63reated\x18\x02 \x01(\x08\x12\x1e\n\x06status\x18\x03 \x01(\x0b\x32\x0e.chroma.Status\"G\n\x17\x44\x65leteCollectionRequest\x12\n\n\x02id\x18\x01 \x01(\t\x12\x0e\n\x06tenant\x18\x02 \x01(\t\x12\x10\n\x08\x64\x61tabase\x18\x03 \x01(\t\":\n\x18\x44\x65leteCollectionResponse\x12\x1e\n\x06status\x18\x01 \x01(\x0b\x32\x0e.chroma.Status\"\x8b\x01\n\x15GetCollectionsRequest\x12\x0f\n\x02id\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x11\n\x04name\x18\x02 \x01(\tH\x01\x88\x01\x01\x12\x12\n\x05topic\x18\x03 \x01(\tH\x02\x88\x01\x01\x12\x0e\n\x06tenant\x18\x04 \x01(\t\x12\x10\n\x08\x64\x61tabase\x18\x05 \x01(\tB\x05\n\x03_idB\x07\n\x05_nameB\x08\n\x06_topic\"a\n\x16GetCollectionsResponse\x12\'\n\x0b\x63ollections\x18\x01 \x03(\x0b\x32\x12.chroma.Collection\x12\x1e\n\x06status\x18\x02 \x01(\x0b\x32\x0e.chroma.Status\"\xde\x01\n\x17UpdateCollectionRequest\x12\n\n\x02id\x18\x01 \x01(\t\x12\x12\n\x05topic\x18\x02 \x01(\tH\x01\x88\x01\x01\x12\x11\n\x04name\x18\x03 \x01(\tH\x02\x88\x01\x01\x12\x16\n\tdimension\x18\x04 \x01(\x05H\x03\x88\x01\x01\x12*\n\x08metadata\x18\x05 \x01(\x0b\x32\x16.chroma.UpdateMetadataH\x00\x12\x18\n\x0ereset_metadata\x18\x06 \x01(\x08H\x00\x42\x11\n\x0fmetadata_updateB\x08\n\x06_topicB\x07\n\x05_nameB\x0c\n\n_dimension\":\n\x18UpdateCollectionResponse\x12\x1e\n\x06status\x18\x01 \x01(\x0b\x32\x0e.chroma.Status\"O\n\x0cNotification\x12\n\n\x02id\x18\x01 \x01(\x03\x12\x15\n\rcollection_id\x18\x02 \x01(\t\x12\x0c\n\x04type\x18\x03 \x01(\t\x12\x0e\n\x06status\x18\x04 \x01(\t\"4\n\x12ResetStateResponse\x12\x1e\n\x06status\x18\x01 \x01(\x0b\x32\x0e.chroma.Status\":\n%GetLastCompactionTimeForTenantRequest\x12\x11\n\ttenant_id\x18\x01 \x03(\t\"K\n\x18TenantLastCompactionTime\x12\x11\n\ttenant_id\x18\x01 \x01(\t\x12\x1c\n\x14last_compaction_time\x18\x02 \x01(\x03\"o\n&GetLastCompactionTimeForTenantResponse\x12\x45\n\x1btenant_last_compaction_time\x18\x01 \x03(\x0b\x32 .chroma.TenantLastCompactionTime\"n\n%SetLastCompactionTimeForTenantRequest\x12\x45\n\x1btenant_last_compaction_time\x18\x01 \x01(\x0b\x32 .chroma.TenantLastCompactionTime\"\xbc\x01\n\x1a\x46lushSegmentCompactionInfo\x12\x12\n\nsegment_id\x18\x01 \x01(\t\x12\x45\n\nfile_paths\x18\x02 \x03(\x0b\x32\x31.chroma.FlushSegmentCompactionInfo.FilePathsEntry\x1a\x43\n\x0e\x46ilePathsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12 \n\x05value\x18\x02 \x01(\x0b\x32\x11.chroma.FilePaths:\x02\x38\x01\"\xc3\x01\n FlushCollectionCompactionRequest\x12\x11\n\ttenant_id\x18\x01 \x01(\t\x12\x15\n\rcollection_id\x18\x02 \x01(\t\x12\x14\n\x0clog_position\x18\x03 \x01(\x03\x12\x1a\n\x12\x63ollection_version\x18\x04 \x01(\x05\x12\x43\n\x17segment_compaction_info\x18\x05 \x03(\x0b\x32\".chroma.FlushSegmentCompactionInfo\"t\n!FlushCollectionCompactionResponse\x12\x15\n\rcollection_id\x18\x01 \x01(\t\x12\x1a\n\x12\x63ollection_version\x18\x02 \x01(\x05\x12\x1c\n\x14last_compaction_time\x18\x03 \x01(\x03\x32\xf4\n\n\x05SysDB\x12Q\n\x0e\x43reateDatabase\x12\x1d.chroma.CreateDatabaseRequest\x1a\x1e.chroma.CreateDatabaseResponse\"\x00\x12H\n\x0bGetDatabase\x12\x1a.chroma.GetDatabaseRequest\x1a\x1b.chroma.GetDatabaseResponse\"\x00\x12K\n\x0c\x43reateTenant\x12\x1b.chroma.CreateTenantRequest\x1a\x1c.chroma.CreateTenantResponse\"\x00\x12\x42\n\tGetTenant\x12\x18.chroma.GetTenantRequest\x1a\x19.chroma.GetTenantResponse\"\x00\x12N\n\rCreateSegment\x12\x1c.chroma.CreateSegmentRequest\x1a\x1d.chroma.CreateSegmentResponse\"\x00\x12N\n\rDeleteSegment\x12\x1c.chroma.DeleteSegmentRequest\x1a\x1d.chroma.DeleteSegmentResponse\"\x00\x12H\n\x0bGetSegments\x12\x1a.chroma.GetSegmentsRequest\x1a\x1b.chroma.GetSegmentsResponse\"\x00\x12N\n\rUpdateSegment\x12\x1c.chroma.UpdateSegmentRequest\x1a\x1d.chroma.UpdateSegmentResponse\"\x00\x12W\n\x10\x43reateCollection\x12\x1f.chroma.CreateCollectionRequest\x1a .chroma.CreateCollectionResponse\"\x00\x12W\n\x10\x44\x65leteCollection\x12\x1f.chroma.DeleteCollectionRequest\x1a .chroma.DeleteCollectionResponse\"\x00\x12Q\n\x0eGetCollections\x12\x1d.chroma.GetCollectionsRequest\x1a\x1e.chroma.GetCollectionsResponse\"\x00\x12W\n\x10UpdateCollection\x12\x1f.chroma.UpdateCollectionRequest\x1a .chroma.UpdateCollectionResponse\"\x00\x12\x42\n\nResetState\x12\x16.google.protobuf.Empty\x1a\x1a.chroma.ResetStateResponse\"\x00\x12\x81\x01\n\x1eGetLastCompactionTimeForTenant\x12-.chroma.GetLastCompactionTimeForTenantRequest\x1a..chroma.GetLastCompactionTimeForTenantResponse\"\x00\x12i\n\x1eSetLastCompactionTimeForTenant\x12-.chroma.SetLastCompactionTimeForTenantRequest\x1a\x16.google.protobuf.Empty\"\x00\x12r\n\x19\x46lushCollectionCompaction\x12(.chroma.FlushCollectionCompactionRequest\x1a).chroma.FlushCollectionCompactionResponse\"\x00\x42:Z8github.com/chroma-core/chroma/go/pkg/proto/coordinatorpbb\x06proto3') _globals = globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) @@ -23,6 +23,8 @@ if _descriptor._USE_C_DESCRIPTORS == False: DESCRIPTOR._options = None DESCRIPTOR._serialized_options = b'Z8github.com/chroma-core/chroma/go/pkg/proto/coordinatorpb' + _FLUSHSEGMENTCOMPACTIONINFO_FILEPATHSENTRY._options = None + _FLUSHSEGMENTCOMPACTIONINFO_FILEPATHSENTRY._serialized_options = b'8\001' _globals['_CREATEDATABASEREQUEST']._serialized_start=102 _globals['_CREATEDATABASEREQUEST']._serialized_end=167 _globals['_CREATEDATABASERESPONSE']._serialized_start=169 @@ -83,6 +85,14 @@ _globals['_GETLASTCOMPACTIONTIMEFORTENANTRESPONSE']._serialized_end=2778 _globals['_SETLASTCOMPACTIONTIMEFORTENANTREQUEST']._serialized_start=2780 _globals['_SETLASTCOMPACTIONTIMEFORTENANTREQUEST']._serialized_end=2890 - _globals['_SYSDB']._serialized_start=2893 - _globals['_SYSDB']._serialized_end=4173 + _globals['_FLUSHSEGMENTCOMPACTIONINFO']._serialized_start=2893 + _globals['_FLUSHSEGMENTCOMPACTIONINFO']._serialized_end=3081 + _globals['_FLUSHSEGMENTCOMPACTIONINFO_FILEPATHSENTRY']._serialized_start=3014 + _globals['_FLUSHSEGMENTCOMPACTIONINFO_FILEPATHSENTRY']._serialized_end=3081 + _globals['_FLUSHCOLLECTIONCOMPACTIONREQUEST']._serialized_start=3084 + _globals['_FLUSHCOLLECTIONCOMPACTIONREQUEST']._serialized_end=3279 + _globals['_FLUSHCOLLECTIONCOMPACTIONRESPONSE']._serialized_start=3281 + _globals['_FLUSHCOLLECTIONCOMPACTIONRESPONSE']._serialized_end=3397 + _globals['_SYSDB']._serialized_start=3400 + _globals['_SYSDB']._serialized_end=4796 # @@protoc_insertion_point(module_scope) diff --git a/chromadb/proto/coordinator_pb2.pyi b/chromadb/proto/coordinator_pb2.pyi index 185a41b901a..6175b63917e 100644 --- a/chromadb/proto/coordinator_pb2.pyi +++ b/chromadb/proto/coordinator_pb2.pyi @@ -266,3 +266,42 @@ class SetLastCompactionTimeForTenantRequest(_message.Message): TENANT_LAST_COMPACTION_TIME_FIELD_NUMBER: _ClassVar[int] tenant_last_compaction_time: TenantLastCompactionTime def __init__(self, tenant_last_compaction_time: _Optional[_Union[TenantLastCompactionTime, _Mapping]] = ...) -> None: ... + +class FlushSegmentCompactionInfo(_message.Message): + __slots__ = ["segment_id", "file_paths"] + class FilePathsEntry(_message.Message): + __slots__ = ["key", "value"] + KEY_FIELD_NUMBER: _ClassVar[int] + VALUE_FIELD_NUMBER: _ClassVar[int] + key: str + value: _chroma_pb2.FilePaths + def __init__(self, key: _Optional[str] = ..., value: _Optional[_Union[_chroma_pb2.FilePaths, _Mapping]] = ...) -> None: ... + SEGMENT_ID_FIELD_NUMBER: _ClassVar[int] + FILE_PATHS_FIELD_NUMBER: _ClassVar[int] + segment_id: str + file_paths: _containers.MessageMap[str, _chroma_pb2.FilePaths] + def __init__(self, segment_id: _Optional[str] = ..., file_paths: _Optional[_Mapping[str, _chroma_pb2.FilePaths]] = ...) -> None: ... + +class FlushCollectionCompactionRequest(_message.Message): + __slots__ = ["tenant_id", "collection_id", "log_position", "collection_version", "segment_compaction_info"] + TENANT_ID_FIELD_NUMBER: _ClassVar[int] + COLLECTION_ID_FIELD_NUMBER: _ClassVar[int] + LOG_POSITION_FIELD_NUMBER: _ClassVar[int] + COLLECTION_VERSION_FIELD_NUMBER: _ClassVar[int] + SEGMENT_COMPACTION_INFO_FIELD_NUMBER: _ClassVar[int] + tenant_id: str + collection_id: str + log_position: int + collection_version: int + segment_compaction_info: _containers.RepeatedCompositeFieldContainer[FlushSegmentCompactionInfo] + def __init__(self, tenant_id: _Optional[str] = ..., collection_id: _Optional[str] = ..., log_position: _Optional[int] = ..., collection_version: _Optional[int] = ..., segment_compaction_info: _Optional[_Iterable[_Union[FlushSegmentCompactionInfo, _Mapping]]] = ...) -> None: ... + +class FlushCollectionCompactionResponse(_message.Message): + __slots__ = ["collection_id", "collection_version", "last_compaction_time"] + COLLECTION_ID_FIELD_NUMBER: _ClassVar[int] + COLLECTION_VERSION_FIELD_NUMBER: _ClassVar[int] + LAST_COMPACTION_TIME_FIELD_NUMBER: _ClassVar[int] + collection_id: str + collection_version: int + last_compaction_time: int + def __init__(self, collection_id: _Optional[str] = ..., collection_version: _Optional[int] = ..., last_compaction_time: _Optional[int] = ...) -> None: ... diff --git a/chromadb/proto/coordinator_pb2_grpc.py b/chromadb/proto/coordinator_pb2_grpc.py index 74bcba4c8d8..92ede663915 100644 --- a/chromadb/proto/coordinator_pb2_grpc.py +++ b/chromadb/proto/coordinator_pb2_grpc.py @@ -16,80 +16,85 @@ def __init__(self, channel): channel: A grpc.Channel. """ self.CreateDatabase = channel.unary_unary( - "/chroma.SysDB/CreateDatabase", - request_serializer=chromadb_dot_proto_dot_coordinator__pb2.CreateDatabaseRequest.SerializeToString, - response_deserializer=chromadb_dot_proto_dot_coordinator__pb2.CreateDatabaseResponse.FromString, - ) + '/chroma.SysDB/CreateDatabase', + request_serializer=chromadb_dot_proto_dot_coordinator__pb2.CreateDatabaseRequest.SerializeToString, + response_deserializer=chromadb_dot_proto_dot_coordinator__pb2.CreateDatabaseResponse.FromString, + ) self.GetDatabase = channel.unary_unary( - "/chroma.SysDB/GetDatabase", - request_serializer=chromadb_dot_proto_dot_coordinator__pb2.GetDatabaseRequest.SerializeToString, - response_deserializer=chromadb_dot_proto_dot_coordinator__pb2.GetDatabaseResponse.FromString, - ) + '/chroma.SysDB/GetDatabase', + request_serializer=chromadb_dot_proto_dot_coordinator__pb2.GetDatabaseRequest.SerializeToString, + response_deserializer=chromadb_dot_proto_dot_coordinator__pb2.GetDatabaseResponse.FromString, + ) self.CreateTenant = channel.unary_unary( - "/chroma.SysDB/CreateTenant", - request_serializer=chromadb_dot_proto_dot_coordinator__pb2.CreateTenantRequest.SerializeToString, - response_deserializer=chromadb_dot_proto_dot_coordinator__pb2.CreateTenantResponse.FromString, - ) + '/chroma.SysDB/CreateTenant', + request_serializer=chromadb_dot_proto_dot_coordinator__pb2.CreateTenantRequest.SerializeToString, + response_deserializer=chromadb_dot_proto_dot_coordinator__pb2.CreateTenantResponse.FromString, + ) self.GetTenant = channel.unary_unary( - "/chroma.SysDB/GetTenant", - request_serializer=chromadb_dot_proto_dot_coordinator__pb2.GetTenantRequest.SerializeToString, - response_deserializer=chromadb_dot_proto_dot_coordinator__pb2.GetTenantResponse.FromString, - ) + '/chroma.SysDB/GetTenant', + request_serializer=chromadb_dot_proto_dot_coordinator__pb2.GetTenantRequest.SerializeToString, + response_deserializer=chromadb_dot_proto_dot_coordinator__pb2.GetTenantResponse.FromString, + ) self.CreateSegment = channel.unary_unary( - "/chroma.SysDB/CreateSegment", - request_serializer=chromadb_dot_proto_dot_coordinator__pb2.CreateSegmentRequest.SerializeToString, - response_deserializer=chromadb_dot_proto_dot_coordinator__pb2.CreateSegmentResponse.FromString, - ) + '/chroma.SysDB/CreateSegment', + request_serializer=chromadb_dot_proto_dot_coordinator__pb2.CreateSegmentRequest.SerializeToString, + response_deserializer=chromadb_dot_proto_dot_coordinator__pb2.CreateSegmentResponse.FromString, + ) self.DeleteSegment = channel.unary_unary( - "/chroma.SysDB/DeleteSegment", - request_serializer=chromadb_dot_proto_dot_coordinator__pb2.DeleteSegmentRequest.SerializeToString, - response_deserializer=chromadb_dot_proto_dot_coordinator__pb2.DeleteSegmentResponse.FromString, - ) + '/chroma.SysDB/DeleteSegment', + request_serializer=chromadb_dot_proto_dot_coordinator__pb2.DeleteSegmentRequest.SerializeToString, + response_deserializer=chromadb_dot_proto_dot_coordinator__pb2.DeleteSegmentResponse.FromString, + ) self.GetSegments = channel.unary_unary( - "/chroma.SysDB/GetSegments", - request_serializer=chromadb_dot_proto_dot_coordinator__pb2.GetSegmentsRequest.SerializeToString, - response_deserializer=chromadb_dot_proto_dot_coordinator__pb2.GetSegmentsResponse.FromString, - ) + '/chroma.SysDB/GetSegments', + request_serializer=chromadb_dot_proto_dot_coordinator__pb2.GetSegmentsRequest.SerializeToString, + response_deserializer=chromadb_dot_proto_dot_coordinator__pb2.GetSegmentsResponse.FromString, + ) self.UpdateSegment = channel.unary_unary( - "/chroma.SysDB/UpdateSegment", - request_serializer=chromadb_dot_proto_dot_coordinator__pb2.UpdateSegmentRequest.SerializeToString, - response_deserializer=chromadb_dot_proto_dot_coordinator__pb2.UpdateSegmentResponse.FromString, - ) + '/chroma.SysDB/UpdateSegment', + request_serializer=chromadb_dot_proto_dot_coordinator__pb2.UpdateSegmentRequest.SerializeToString, + response_deserializer=chromadb_dot_proto_dot_coordinator__pb2.UpdateSegmentResponse.FromString, + ) self.CreateCollection = channel.unary_unary( - "/chroma.SysDB/CreateCollection", - request_serializer=chromadb_dot_proto_dot_coordinator__pb2.CreateCollectionRequest.SerializeToString, - response_deserializer=chromadb_dot_proto_dot_coordinator__pb2.CreateCollectionResponse.FromString, - ) + '/chroma.SysDB/CreateCollection', + request_serializer=chromadb_dot_proto_dot_coordinator__pb2.CreateCollectionRequest.SerializeToString, + response_deserializer=chromadb_dot_proto_dot_coordinator__pb2.CreateCollectionResponse.FromString, + ) self.DeleteCollection = channel.unary_unary( - "/chroma.SysDB/DeleteCollection", - request_serializer=chromadb_dot_proto_dot_coordinator__pb2.DeleteCollectionRequest.SerializeToString, - response_deserializer=chromadb_dot_proto_dot_coordinator__pb2.DeleteCollectionResponse.FromString, - ) + '/chroma.SysDB/DeleteCollection', + request_serializer=chromadb_dot_proto_dot_coordinator__pb2.DeleteCollectionRequest.SerializeToString, + response_deserializer=chromadb_dot_proto_dot_coordinator__pb2.DeleteCollectionResponse.FromString, + ) self.GetCollections = channel.unary_unary( - "/chroma.SysDB/GetCollections", - request_serializer=chromadb_dot_proto_dot_coordinator__pb2.GetCollectionsRequest.SerializeToString, - response_deserializer=chromadb_dot_proto_dot_coordinator__pb2.GetCollectionsResponse.FromString, - ) + '/chroma.SysDB/GetCollections', + request_serializer=chromadb_dot_proto_dot_coordinator__pb2.GetCollectionsRequest.SerializeToString, + response_deserializer=chromadb_dot_proto_dot_coordinator__pb2.GetCollectionsResponse.FromString, + ) self.UpdateCollection = channel.unary_unary( - "/chroma.SysDB/UpdateCollection", - request_serializer=chromadb_dot_proto_dot_coordinator__pb2.UpdateCollectionRequest.SerializeToString, - response_deserializer=chromadb_dot_proto_dot_coordinator__pb2.UpdateCollectionResponse.FromString, - ) + '/chroma.SysDB/UpdateCollection', + request_serializer=chromadb_dot_proto_dot_coordinator__pb2.UpdateCollectionRequest.SerializeToString, + response_deserializer=chromadb_dot_proto_dot_coordinator__pb2.UpdateCollectionResponse.FromString, + ) self.ResetState = channel.unary_unary( - "/chroma.SysDB/ResetState", - request_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, - response_deserializer=chromadb_dot_proto_dot_coordinator__pb2.ResetStateResponse.FromString, - ) + '/chroma.SysDB/ResetState', + request_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + response_deserializer=chromadb_dot_proto_dot_coordinator__pb2.ResetStateResponse.FromString, + ) self.GetLastCompactionTimeForTenant = channel.unary_unary( - "/chroma.SysDB/GetLastCompactionTimeForTenant", - request_serializer=chromadb_dot_proto_dot_coordinator__pb2.GetLastCompactionTimeForTenantRequest.SerializeToString, - response_deserializer=chromadb_dot_proto_dot_coordinator__pb2.GetLastCompactionTimeForTenantResponse.FromString, - ) + '/chroma.SysDB/GetLastCompactionTimeForTenant', + request_serializer=chromadb_dot_proto_dot_coordinator__pb2.GetLastCompactionTimeForTenantRequest.SerializeToString, + response_deserializer=chromadb_dot_proto_dot_coordinator__pb2.GetLastCompactionTimeForTenantResponse.FromString, + ) self.SetLastCompactionTimeForTenant = channel.unary_unary( - "/chroma.SysDB/SetLastCompactionTimeForTenant", - request_serializer=chromadb_dot_proto_dot_coordinator__pb2.SetLastCompactionTimeForTenantRequest.SerializeToString, - response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, - ) + '/chroma.SysDB/SetLastCompactionTimeForTenant', + request_serializer=chromadb_dot_proto_dot_coordinator__pb2.SetLastCompactionTimeForTenantRequest.SerializeToString, + response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ) + self.FlushCollectionCompaction = channel.unary_unary( + '/chroma.SysDB/FlushCollectionCompaction', + request_serializer=chromadb_dot_proto_dot_coordinator__pb2.FlushCollectionCompactionRequest.SerializeToString, + response_deserializer=chromadb_dot_proto_dot_coordinator__pb2.FlushCollectionCompactionResponse.FromString, + ) class SysDBServicer(object): @@ -98,613 +103,460 @@ class SysDBServicer(object): def CreateDatabase(self, request, context): """Missing associated documentation comment in .proto file.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') def GetDatabase(self, request, context): """Missing associated documentation comment in .proto file.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') def CreateTenant(self, request, context): """Missing associated documentation comment in .proto file.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') def GetTenant(self, request, context): """Missing associated documentation comment in .proto file.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') def CreateSegment(self, request, context): """Missing associated documentation comment in .proto file.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') def DeleteSegment(self, request, context): """Missing associated documentation comment in .proto file.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') def GetSegments(self, request, context): """Missing associated documentation comment in .proto file.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') def UpdateSegment(self, request, context): """Missing associated documentation comment in .proto file.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') def CreateCollection(self, request, context): """Missing associated documentation comment in .proto file.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') def DeleteCollection(self, request, context): """Missing associated documentation comment in .proto file.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') def GetCollections(self, request, context): """Missing associated documentation comment in .proto file.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') def UpdateCollection(self, request, context): """Missing associated documentation comment in .proto file.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') def ResetState(self, request, context): """Missing associated documentation comment in .proto file.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') def GetLastCompactionTimeForTenant(self, request, context): """Missing associated documentation comment in .proto file.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') def SetLastCompactionTimeForTenant(self, request, context): """Missing associated documentation comment in .proto file.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def FlushCollectionCompaction(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') def add_SysDBServicer_to_server(servicer, server): rpc_method_handlers = { - "CreateDatabase": grpc.unary_unary_rpc_method_handler( - servicer.CreateDatabase, - request_deserializer=chromadb_dot_proto_dot_coordinator__pb2.CreateDatabaseRequest.FromString, - response_serializer=chromadb_dot_proto_dot_coordinator__pb2.CreateDatabaseResponse.SerializeToString, - ), - "GetDatabase": grpc.unary_unary_rpc_method_handler( - servicer.GetDatabase, - request_deserializer=chromadb_dot_proto_dot_coordinator__pb2.GetDatabaseRequest.FromString, - response_serializer=chromadb_dot_proto_dot_coordinator__pb2.GetDatabaseResponse.SerializeToString, - ), - "CreateTenant": grpc.unary_unary_rpc_method_handler( - servicer.CreateTenant, - request_deserializer=chromadb_dot_proto_dot_coordinator__pb2.CreateTenantRequest.FromString, - response_serializer=chromadb_dot_proto_dot_coordinator__pb2.CreateTenantResponse.SerializeToString, - ), - "GetTenant": grpc.unary_unary_rpc_method_handler( - servicer.GetTenant, - request_deserializer=chromadb_dot_proto_dot_coordinator__pb2.GetTenantRequest.FromString, - response_serializer=chromadb_dot_proto_dot_coordinator__pb2.GetTenantResponse.SerializeToString, - ), - "CreateSegment": grpc.unary_unary_rpc_method_handler( - servicer.CreateSegment, - request_deserializer=chromadb_dot_proto_dot_coordinator__pb2.CreateSegmentRequest.FromString, - response_serializer=chromadb_dot_proto_dot_coordinator__pb2.CreateSegmentResponse.SerializeToString, - ), - "DeleteSegment": grpc.unary_unary_rpc_method_handler( - servicer.DeleteSegment, - request_deserializer=chromadb_dot_proto_dot_coordinator__pb2.DeleteSegmentRequest.FromString, - response_serializer=chromadb_dot_proto_dot_coordinator__pb2.DeleteSegmentResponse.SerializeToString, - ), - "GetSegments": grpc.unary_unary_rpc_method_handler( - servicer.GetSegments, - request_deserializer=chromadb_dot_proto_dot_coordinator__pb2.GetSegmentsRequest.FromString, - response_serializer=chromadb_dot_proto_dot_coordinator__pb2.GetSegmentsResponse.SerializeToString, - ), - "UpdateSegment": grpc.unary_unary_rpc_method_handler( - servicer.UpdateSegment, - request_deserializer=chromadb_dot_proto_dot_coordinator__pb2.UpdateSegmentRequest.FromString, - response_serializer=chromadb_dot_proto_dot_coordinator__pb2.UpdateSegmentResponse.SerializeToString, - ), - "CreateCollection": grpc.unary_unary_rpc_method_handler( - servicer.CreateCollection, - request_deserializer=chromadb_dot_proto_dot_coordinator__pb2.CreateCollectionRequest.FromString, - response_serializer=chromadb_dot_proto_dot_coordinator__pb2.CreateCollectionResponse.SerializeToString, - ), - "DeleteCollection": grpc.unary_unary_rpc_method_handler( - servicer.DeleteCollection, - request_deserializer=chromadb_dot_proto_dot_coordinator__pb2.DeleteCollectionRequest.FromString, - response_serializer=chromadb_dot_proto_dot_coordinator__pb2.DeleteCollectionResponse.SerializeToString, - ), - "GetCollections": grpc.unary_unary_rpc_method_handler( - servicer.GetCollections, - request_deserializer=chromadb_dot_proto_dot_coordinator__pb2.GetCollectionsRequest.FromString, - response_serializer=chromadb_dot_proto_dot_coordinator__pb2.GetCollectionsResponse.SerializeToString, - ), - "UpdateCollection": grpc.unary_unary_rpc_method_handler( - servicer.UpdateCollection, - request_deserializer=chromadb_dot_proto_dot_coordinator__pb2.UpdateCollectionRequest.FromString, - response_serializer=chromadb_dot_proto_dot_coordinator__pb2.UpdateCollectionResponse.SerializeToString, - ), - "ResetState": grpc.unary_unary_rpc_method_handler( - servicer.ResetState, - request_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, - response_serializer=chromadb_dot_proto_dot_coordinator__pb2.ResetStateResponse.SerializeToString, - ), - "GetLastCompactionTimeForTenant": grpc.unary_unary_rpc_method_handler( - servicer.GetLastCompactionTimeForTenant, - request_deserializer=chromadb_dot_proto_dot_coordinator__pb2.GetLastCompactionTimeForTenantRequest.FromString, - response_serializer=chromadb_dot_proto_dot_coordinator__pb2.GetLastCompactionTimeForTenantResponse.SerializeToString, - ), - "SetLastCompactionTimeForTenant": grpc.unary_unary_rpc_method_handler( - servicer.SetLastCompactionTimeForTenant, - request_deserializer=chromadb_dot_proto_dot_coordinator__pb2.SetLastCompactionTimeForTenantRequest.FromString, - response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, - ), + 'CreateDatabase': grpc.unary_unary_rpc_method_handler( + servicer.CreateDatabase, + request_deserializer=chromadb_dot_proto_dot_coordinator__pb2.CreateDatabaseRequest.FromString, + response_serializer=chromadb_dot_proto_dot_coordinator__pb2.CreateDatabaseResponse.SerializeToString, + ), + 'GetDatabase': grpc.unary_unary_rpc_method_handler( + servicer.GetDatabase, + request_deserializer=chromadb_dot_proto_dot_coordinator__pb2.GetDatabaseRequest.FromString, + response_serializer=chromadb_dot_proto_dot_coordinator__pb2.GetDatabaseResponse.SerializeToString, + ), + 'CreateTenant': grpc.unary_unary_rpc_method_handler( + servicer.CreateTenant, + request_deserializer=chromadb_dot_proto_dot_coordinator__pb2.CreateTenantRequest.FromString, + response_serializer=chromadb_dot_proto_dot_coordinator__pb2.CreateTenantResponse.SerializeToString, + ), + 'GetTenant': grpc.unary_unary_rpc_method_handler( + servicer.GetTenant, + request_deserializer=chromadb_dot_proto_dot_coordinator__pb2.GetTenantRequest.FromString, + response_serializer=chromadb_dot_proto_dot_coordinator__pb2.GetTenantResponse.SerializeToString, + ), + 'CreateSegment': grpc.unary_unary_rpc_method_handler( + servicer.CreateSegment, + request_deserializer=chromadb_dot_proto_dot_coordinator__pb2.CreateSegmentRequest.FromString, + response_serializer=chromadb_dot_proto_dot_coordinator__pb2.CreateSegmentResponse.SerializeToString, + ), + 'DeleteSegment': grpc.unary_unary_rpc_method_handler( + servicer.DeleteSegment, + request_deserializer=chromadb_dot_proto_dot_coordinator__pb2.DeleteSegmentRequest.FromString, + response_serializer=chromadb_dot_proto_dot_coordinator__pb2.DeleteSegmentResponse.SerializeToString, + ), + 'GetSegments': grpc.unary_unary_rpc_method_handler( + servicer.GetSegments, + request_deserializer=chromadb_dot_proto_dot_coordinator__pb2.GetSegmentsRequest.FromString, + response_serializer=chromadb_dot_proto_dot_coordinator__pb2.GetSegmentsResponse.SerializeToString, + ), + 'UpdateSegment': grpc.unary_unary_rpc_method_handler( + servicer.UpdateSegment, + request_deserializer=chromadb_dot_proto_dot_coordinator__pb2.UpdateSegmentRequest.FromString, + response_serializer=chromadb_dot_proto_dot_coordinator__pb2.UpdateSegmentResponse.SerializeToString, + ), + 'CreateCollection': grpc.unary_unary_rpc_method_handler( + servicer.CreateCollection, + request_deserializer=chromadb_dot_proto_dot_coordinator__pb2.CreateCollectionRequest.FromString, + response_serializer=chromadb_dot_proto_dot_coordinator__pb2.CreateCollectionResponse.SerializeToString, + ), + 'DeleteCollection': grpc.unary_unary_rpc_method_handler( + servicer.DeleteCollection, + request_deserializer=chromadb_dot_proto_dot_coordinator__pb2.DeleteCollectionRequest.FromString, + response_serializer=chromadb_dot_proto_dot_coordinator__pb2.DeleteCollectionResponse.SerializeToString, + ), + 'GetCollections': grpc.unary_unary_rpc_method_handler( + servicer.GetCollections, + request_deserializer=chromadb_dot_proto_dot_coordinator__pb2.GetCollectionsRequest.FromString, + response_serializer=chromadb_dot_proto_dot_coordinator__pb2.GetCollectionsResponse.SerializeToString, + ), + 'UpdateCollection': grpc.unary_unary_rpc_method_handler( + servicer.UpdateCollection, + request_deserializer=chromadb_dot_proto_dot_coordinator__pb2.UpdateCollectionRequest.FromString, + response_serializer=chromadb_dot_proto_dot_coordinator__pb2.UpdateCollectionResponse.SerializeToString, + ), + 'ResetState': grpc.unary_unary_rpc_method_handler( + servicer.ResetState, + request_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, + response_serializer=chromadb_dot_proto_dot_coordinator__pb2.ResetStateResponse.SerializeToString, + ), + 'GetLastCompactionTimeForTenant': grpc.unary_unary_rpc_method_handler( + servicer.GetLastCompactionTimeForTenant, + request_deserializer=chromadb_dot_proto_dot_coordinator__pb2.GetLastCompactionTimeForTenantRequest.FromString, + response_serializer=chromadb_dot_proto_dot_coordinator__pb2.GetLastCompactionTimeForTenantResponse.SerializeToString, + ), + 'SetLastCompactionTimeForTenant': grpc.unary_unary_rpc_method_handler( + servicer.SetLastCompactionTimeForTenant, + request_deserializer=chromadb_dot_proto_dot_coordinator__pb2.SetLastCompactionTimeForTenantRequest.FromString, + response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ), + 'FlushCollectionCompaction': grpc.unary_unary_rpc_method_handler( + servicer.FlushCollectionCompaction, + request_deserializer=chromadb_dot_proto_dot_coordinator__pb2.FlushCollectionCompactionRequest.FromString, + response_serializer=chromadb_dot_proto_dot_coordinator__pb2.FlushCollectionCompactionResponse.SerializeToString, + ), } generic_handler = grpc.method_handlers_generic_handler( - "chroma.SysDB", rpc_method_handlers - ) + 'chroma.SysDB', rpc_method_handlers) server.add_generic_rpc_handlers((generic_handler,)) -# This class is part of an EXPERIMENTAL API. + # This class is part of an EXPERIMENTAL API. class SysDB(object): """Missing associated documentation comment in .proto file.""" @staticmethod - def CreateDatabase( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, + def CreateDatabase(request, target, - "/chroma.SysDB/CreateDatabase", + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/chroma.SysDB/CreateDatabase', chromadb_dot_proto_dot_coordinator__pb2.CreateDatabaseRequest.SerializeToString, chromadb_dot_proto_dot_coordinator__pb2.CreateDatabaseResponse.FromString, - options, - channel_credentials, - insecure, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod - def GetDatabase( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, + def GetDatabase(request, target, - "/chroma.SysDB/GetDatabase", + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/chroma.SysDB/GetDatabase', chromadb_dot_proto_dot_coordinator__pb2.GetDatabaseRequest.SerializeToString, chromadb_dot_proto_dot_coordinator__pb2.GetDatabaseResponse.FromString, - options, - channel_credentials, - insecure, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod - def CreateTenant( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, + def CreateTenant(request, target, - "/chroma.SysDB/CreateTenant", + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/chroma.SysDB/CreateTenant', chromadb_dot_proto_dot_coordinator__pb2.CreateTenantRequest.SerializeToString, chromadb_dot_proto_dot_coordinator__pb2.CreateTenantResponse.FromString, - options, - channel_credentials, - insecure, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod - def GetTenant( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, + def GetTenant(request, target, - "/chroma.SysDB/GetTenant", + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/chroma.SysDB/GetTenant', chromadb_dot_proto_dot_coordinator__pb2.GetTenantRequest.SerializeToString, chromadb_dot_proto_dot_coordinator__pb2.GetTenantResponse.FromString, - options, - channel_credentials, - insecure, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod - def CreateSegment( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, + def CreateSegment(request, target, - "/chroma.SysDB/CreateSegment", + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/chroma.SysDB/CreateSegment', chromadb_dot_proto_dot_coordinator__pb2.CreateSegmentRequest.SerializeToString, chromadb_dot_proto_dot_coordinator__pb2.CreateSegmentResponse.FromString, - options, - channel_credentials, - insecure, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod - def DeleteSegment( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, + def DeleteSegment(request, target, - "/chroma.SysDB/DeleteSegment", + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/chroma.SysDB/DeleteSegment', chromadb_dot_proto_dot_coordinator__pb2.DeleteSegmentRequest.SerializeToString, chromadb_dot_proto_dot_coordinator__pb2.DeleteSegmentResponse.FromString, - options, - channel_credentials, - insecure, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod - def GetSegments( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, + def GetSegments(request, target, - "/chroma.SysDB/GetSegments", + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/chroma.SysDB/GetSegments', chromadb_dot_proto_dot_coordinator__pb2.GetSegmentsRequest.SerializeToString, chromadb_dot_proto_dot_coordinator__pb2.GetSegmentsResponse.FromString, - options, - channel_credentials, - insecure, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod - def UpdateSegment( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, + def UpdateSegment(request, target, - "/chroma.SysDB/UpdateSegment", + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/chroma.SysDB/UpdateSegment', chromadb_dot_proto_dot_coordinator__pb2.UpdateSegmentRequest.SerializeToString, chromadb_dot_proto_dot_coordinator__pb2.UpdateSegmentResponse.FromString, - options, - channel_credentials, - insecure, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod - def CreateCollection( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, + def CreateCollection(request, target, - "/chroma.SysDB/CreateCollection", + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/chroma.SysDB/CreateCollection', chromadb_dot_proto_dot_coordinator__pb2.CreateCollectionRequest.SerializeToString, chromadb_dot_proto_dot_coordinator__pb2.CreateCollectionResponse.FromString, - options, - channel_credentials, - insecure, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod - def DeleteCollection( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, + def DeleteCollection(request, target, - "/chroma.SysDB/DeleteCollection", + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/chroma.SysDB/DeleteCollection', chromadb_dot_proto_dot_coordinator__pb2.DeleteCollectionRequest.SerializeToString, chromadb_dot_proto_dot_coordinator__pb2.DeleteCollectionResponse.FromString, - options, - channel_credentials, - insecure, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod - def GetCollections( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, + def GetCollections(request, target, - "/chroma.SysDB/GetCollections", + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/chroma.SysDB/GetCollections', chromadb_dot_proto_dot_coordinator__pb2.GetCollectionsRequest.SerializeToString, chromadb_dot_proto_dot_coordinator__pb2.GetCollectionsResponse.FromString, - options, - channel_credentials, - insecure, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod - def UpdateCollection( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, + def UpdateCollection(request, target, - "/chroma.SysDB/UpdateCollection", + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/chroma.SysDB/UpdateCollection', chromadb_dot_proto_dot_coordinator__pb2.UpdateCollectionRequest.SerializeToString, chromadb_dot_proto_dot_coordinator__pb2.UpdateCollectionResponse.FromString, - options, - channel_credentials, - insecure, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod - def ResetState( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, + def ResetState(request, target, - "/chroma.SysDB/ResetState", + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/chroma.SysDB/ResetState', google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, chromadb_dot_proto_dot_coordinator__pb2.ResetStateResponse.FromString, - options, - channel_credentials, - insecure, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod - def GetLastCompactionTimeForTenant( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, + def GetLastCompactionTimeForTenant(request, target, - "/chroma.SysDB/GetLastCompactionTimeForTenant", + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/chroma.SysDB/GetLastCompactionTimeForTenant', chromadb_dot_proto_dot_coordinator__pb2.GetLastCompactionTimeForTenantRequest.SerializeToString, chromadb_dot_proto_dot_coordinator__pb2.GetLastCompactionTimeForTenantResponse.FromString, - options, - channel_credentials, - insecure, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod - def SetLastCompactionTimeForTenant( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, + def SetLastCompactionTimeForTenant(request, target, - "/chroma.SysDB/SetLastCompactionTimeForTenant", + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/chroma.SysDB/SetLastCompactionTimeForTenant', chromadb_dot_proto_dot_coordinator__pb2.SetLastCompactionTimeForTenantRequest.SerializeToString, google_dot_protobuf_dot_empty__pb2.Empty.FromString, - options, - channel_credentials, - insecure, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def FlushCollectionCompaction(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/chroma.SysDB/FlushCollectionCompaction', + chromadb_dot_proto_dot_coordinator__pb2.FlushCollectionCompactionRequest.SerializeToString, + chromadb_dot_proto_dot_coordinator__pb2.FlushCollectionCompactionResponse.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) diff --git a/chromadb/proto/logservice_pb2.py b/chromadb/proto/logservice_pb2.py index 5ce9b4c5dcd..f4a7b89cfff 100644 --- a/chromadb/proto/logservice_pb2.py +++ b/chromadb/proto/logservice_pb2.py @@ -6,7 +6,6 @@ from google.protobuf import descriptor_pool as _descriptor_pool from google.protobuf import symbol_database as _symbol_database from google.protobuf.internal import builder as _builder - # @@protoc_insertion_point(imports) _sym_db = _symbol_database.Default() @@ -15,36 +14,30 @@ from chromadb.proto import chroma_pb2 as chromadb_dot_proto_dot_chroma__pb2 -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile( - b'\n\x1f\x63hromadb/proto/logservice.proto\x12\x06\x63hroma\x1a\x1b\x63hromadb/proto/chroma.proto"X\n\x0fPushLogsRequest\x12\x15\n\rcollection_id\x18\x01 \x01(\t\x12.\n\x07records\x18\x02 \x03(\x0b\x32\x1d.chroma.SubmitEmbeddingRecord"(\n\x10PushLogsResponse\x12\x14\n\x0crecord_count\x18\x01 \x01(\x05"S\n\x0fPullLogsRequest\x12\x15\n\rcollection_id\x18\x01 \x01(\t\x12\x15\n\rstart_from_id\x18\x02 \x01(\x03\x12\x12\n\nbatch_size\x18\x03 \x01(\x05"J\n\tRecordLog\x12\x0e\n\x06log_id\x18\x01 \x01(\x03\x12-\n\x06record\x18\x02 \x01(\x0b\x32\x1d.chroma.SubmitEmbeddingRecord"6\n\x10PullLogsResponse\x12"\n\x07records\x18\x01 \x03(\x0b\x32\x11.chroma.RecordLog"V\n\x0e\x43ollectionInfo\x12\x15\n\rcollection_id\x18\x01 \x01(\t\x12\x14\n\x0c\x66irst_log_id\x18\x02 \x01(\x03\x12\x17\n\x0f\x66irst_log_id_ts\x18\x03 \x01(\x03"&\n$GetAllCollectionInfoToCompactRequest"\\\n%GetAllCollectionInfoToCompactResponse\x12\x33\n\x13\x61ll_collection_info\x18\x01 \x03(\x0b\x32\x16.chroma.CollectionInfo2\x8e\x02\n\nLogService\x12?\n\x08PushLogs\x12\x17.chroma.PushLogsRequest\x1a\x18.chroma.PushLogsResponse"\x00\x12?\n\x08PullLogs\x12\x17.chroma.PullLogsRequest\x1a\x18.chroma.PullLogsResponse"\x00\x12~\n\x1dGetAllCollectionInfoToCompact\x12,.chroma.GetAllCollectionInfoToCompactRequest\x1a-.chroma.GetAllCollectionInfoToCompactResponse"\x00\x42\x39Z7github.com/chroma-core/chroma/go/pkg/proto/logservicepbb\x06proto3' -) +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1f\x63hromadb/proto/logservice.proto\x12\x06\x63hroma\x1a\x1b\x63hromadb/proto/chroma.proto\"X\n\x0fPushLogsRequest\x12\x15\n\rcollection_id\x18\x01 \x01(\t\x12.\n\x07records\x18\x02 \x03(\x0b\x32\x1d.chroma.SubmitEmbeddingRecord\"(\n\x10PushLogsResponse\x12\x14\n\x0crecord_count\x18\x01 \x01(\x05\"S\n\x0fPullLogsRequest\x12\x15\n\rcollection_id\x18\x01 \x01(\t\x12\x15\n\rstart_from_id\x18\x02 \x01(\x03\x12\x12\n\nbatch_size\x18\x03 \x01(\x05\"J\n\tRecordLog\x12\x0e\n\x06log_id\x18\x01 \x01(\x03\x12-\n\x06record\x18\x02 \x01(\x0b\x32\x1d.chroma.SubmitEmbeddingRecord\"6\n\x10PullLogsResponse\x12\"\n\x07records\x18\x01 \x03(\x0b\x32\x11.chroma.RecordLog\"V\n\x0e\x43ollectionInfo\x12\x15\n\rcollection_id\x18\x01 \x01(\t\x12\x14\n\x0c\x66irst_log_id\x18\x02 \x01(\x03\x12\x17\n\x0f\x66irst_log_id_ts\x18\x03 \x01(\x03\"&\n$GetAllCollectionInfoToCompactRequest\"\\\n%GetAllCollectionInfoToCompactResponse\x12\x33\n\x13\x61ll_collection_info\x18\x01 \x03(\x0b\x32\x16.chroma.CollectionInfo2\x8e\x02\n\nLogService\x12?\n\x08PushLogs\x12\x17.chroma.PushLogsRequest\x1a\x18.chroma.PushLogsResponse\"\x00\x12?\n\x08PullLogs\x12\x17.chroma.PullLogsRequest\x1a\x18.chroma.PullLogsResponse\"\x00\x12~\n\x1dGetAllCollectionInfoToCompact\x12,.chroma.GetAllCollectionInfoToCompactRequest\x1a-.chroma.GetAllCollectionInfoToCompactResponse\"\x00\x42\x39Z7github.com/chroma-core/chroma/go/pkg/proto/logservicepbb\x06proto3') _globals = globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) -_builder.BuildTopDescriptorsAndMessages( - DESCRIPTOR, "chromadb.proto.logservice_pb2", _globals -) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'chromadb.proto.logservice_pb2', _globals) if _descriptor._USE_C_DESCRIPTORS == False: - DESCRIPTOR._options = None - DESCRIPTOR._serialized_options = ( - b"Z7github.com/chroma-core/chroma/go/pkg/proto/logservicepb" - ) - _globals["_PUSHLOGSREQUEST"]._serialized_start = 72 - _globals["_PUSHLOGSREQUEST"]._serialized_end = 160 - _globals["_PUSHLOGSRESPONSE"]._serialized_start = 162 - _globals["_PUSHLOGSRESPONSE"]._serialized_end = 202 - _globals["_PULLLOGSREQUEST"]._serialized_start = 204 - _globals["_PULLLOGSREQUEST"]._serialized_end = 287 - _globals["_RECORDLOG"]._serialized_start = 289 - _globals["_RECORDLOG"]._serialized_end = 363 - _globals["_PULLLOGSRESPONSE"]._serialized_start = 365 - _globals["_PULLLOGSRESPONSE"]._serialized_end = 419 - _globals["_COLLECTIONINFO"]._serialized_start = 421 - _globals["_COLLECTIONINFO"]._serialized_end = 507 - _globals["_GETALLCOLLECTIONINFOTOCOMPACTREQUEST"]._serialized_start = 509 - _globals["_GETALLCOLLECTIONINFOTOCOMPACTREQUEST"]._serialized_end = 547 - _globals["_GETALLCOLLECTIONINFOTOCOMPACTRESPONSE"]._serialized_start = 549 - _globals["_GETALLCOLLECTIONINFOTOCOMPACTRESPONSE"]._serialized_end = 641 - _globals["_LOGSERVICE"]._serialized_start = 644 - _globals["_LOGSERVICE"]._serialized_end = 914 + DESCRIPTOR._options = None + DESCRIPTOR._serialized_options = b'Z7github.com/chroma-core/chroma/go/pkg/proto/logservicepb' + _globals['_PUSHLOGSREQUEST']._serialized_start=72 + _globals['_PUSHLOGSREQUEST']._serialized_end=160 + _globals['_PUSHLOGSRESPONSE']._serialized_start=162 + _globals['_PUSHLOGSRESPONSE']._serialized_end=202 + _globals['_PULLLOGSREQUEST']._serialized_start=204 + _globals['_PULLLOGSREQUEST']._serialized_end=287 + _globals['_RECORDLOG']._serialized_start=289 + _globals['_RECORDLOG']._serialized_end=363 + _globals['_PULLLOGSRESPONSE']._serialized_start=365 + _globals['_PULLLOGSRESPONSE']._serialized_end=419 + _globals['_COLLECTIONINFO']._serialized_start=421 + _globals['_COLLECTIONINFO']._serialized_end=507 + _globals['_GETALLCOLLECTIONINFOTOCOMPACTREQUEST']._serialized_start=509 + _globals['_GETALLCOLLECTIONINFOTOCOMPACTREQUEST']._serialized_end=547 + _globals['_GETALLCOLLECTIONINFOTOCOMPACTRESPONSE']._serialized_start=549 + _globals['_GETALLCOLLECTIONINFOTOCOMPACTRESPONSE']._serialized_end=641 + _globals['_LOGSERVICE']._serialized_start=644 + _globals['_LOGSERVICE']._serialized_end=914 # @@protoc_insertion_point(module_scope) diff --git a/chromadb/proto/logservice_pb2.pyi b/chromadb/proto/logservice_pb2.pyi index 62d8d74f3c2..e7e58ebe8a8 100644 --- a/chromadb/proto/logservice_pb2.pyi +++ b/chromadb/proto/logservice_pb2.pyi @@ -2,13 +2,7 @@ from chromadb.proto import chroma_pb2 as _chroma_pb2 from google.protobuf.internal import containers as _containers from google.protobuf import descriptor as _descriptor from google.protobuf import message as _message -from typing import ( - ClassVar as _ClassVar, - Iterable as _Iterable, - Mapping as _Mapping, - Optional as _Optional, - Union as _Union, -) +from typing import ClassVar as _ClassVar, Iterable as _Iterable, Mapping as _Mapping, Optional as _Optional, Union as _Union DESCRIPTOR: _descriptor.FileDescriptor @@ -17,16 +11,8 @@ class PushLogsRequest(_message.Message): COLLECTION_ID_FIELD_NUMBER: _ClassVar[int] RECORDS_FIELD_NUMBER: _ClassVar[int] collection_id: str - records: _containers.RepeatedCompositeFieldContainer[ - _chroma_pb2.SubmitEmbeddingRecord - ] - def __init__( - self, - collection_id: _Optional[str] = ..., - records: _Optional[ - _Iterable[_Union[_chroma_pb2.SubmitEmbeddingRecord, _Mapping]] - ] = ..., - ) -> None: ... + records: _containers.RepeatedCompositeFieldContainer[_chroma_pb2.SubmitEmbeddingRecord] + def __init__(self, collection_id: _Optional[str] = ..., records: _Optional[_Iterable[_Union[_chroma_pb2.SubmitEmbeddingRecord, _Mapping]]] = ...) -> None: ... class PushLogsResponse(_message.Message): __slots__ = ["record_count"] @@ -42,12 +28,7 @@ class PullLogsRequest(_message.Message): collection_id: str start_from_id: int batch_size: int - def __init__( - self, - collection_id: _Optional[str] = ..., - start_from_id: _Optional[int] = ..., - batch_size: _Optional[int] = ..., - ) -> None: ... + def __init__(self, collection_id: _Optional[str] = ..., start_from_id: _Optional[int] = ..., batch_size: _Optional[int] = ...) -> None: ... class RecordLog(_message.Message): __slots__ = ["log_id", "record"] @@ -55,19 +36,13 @@ class RecordLog(_message.Message): RECORD_FIELD_NUMBER: _ClassVar[int] log_id: int record: _chroma_pb2.SubmitEmbeddingRecord - def __init__( - self, - log_id: _Optional[int] = ..., - record: _Optional[_Union[_chroma_pb2.SubmitEmbeddingRecord, _Mapping]] = ..., - ) -> None: ... + def __init__(self, log_id: _Optional[int] = ..., record: _Optional[_Union[_chroma_pb2.SubmitEmbeddingRecord, _Mapping]] = ...) -> None: ... class PullLogsResponse(_message.Message): __slots__ = ["records"] RECORDS_FIELD_NUMBER: _ClassVar[int] records: _containers.RepeatedCompositeFieldContainer[RecordLog] - def __init__( - self, records: _Optional[_Iterable[_Union[RecordLog, _Mapping]]] = ... - ) -> None: ... + def __init__(self, records: _Optional[_Iterable[_Union[RecordLog, _Mapping]]] = ...) -> None: ... class CollectionInfo(_message.Message): __slots__ = ["collection_id", "first_log_id", "first_log_id_ts"] @@ -77,12 +52,7 @@ class CollectionInfo(_message.Message): collection_id: str first_log_id: int first_log_id_ts: int - def __init__( - self, - collection_id: _Optional[str] = ..., - first_log_id: _Optional[int] = ..., - first_log_id_ts: _Optional[int] = ..., - ) -> None: ... + def __init__(self, collection_id: _Optional[str] = ..., first_log_id: _Optional[int] = ..., first_log_id_ts: _Optional[int] = ...) -> None: ... class GetAllCollectionInfoToCompactRequest(_message.Message): __slots__ = [] @@ -92,9 +62,4 @@ class GetAllCollectionInfoToCompactResponse(_message.Message): __slots__ = ["all_collection_info"] ALL_COLLECTION_INFO_FIELD_NUMBER: _ClassVar[int] all_collection_info: _containers.RepeatedCompositeFieldContainer[CollectionInfo] - def __init__( - self, - all_collection_info: _Optional[ - _Iterable[_Union[CollectionInfo, _Mapping]] - ] = ..., - ) -> None: ... + def __init__(self, all_collection_info: _Optional[_Iterable[_Union[CollectionInfo, _Mapping]]] = ...) -> None: ... diff --git a/chromadb/proto/logservice_pb2_grpc.py b/chromadb/proto/logservice_pb2_grpc.py index 7e4ab6a7c29..ab20441aa9a 100644 --- a/chromadb/proto/logservice_pb2_grpc.py +++ b/chromadb/proto/logservice_pb2_grpc.py @@ -15,20 +15,20 @@ def __init__(self, channel): channel: A grpc.Channel. """ self.PushLogs = channel.unary_unary( - "/chroma.LogService/PushLogs", - request_serializer=chromadb_dot_proto_dot_logservice__pb2.PushLogsRequest.SerializeToString, - response_deserializer=chromadb_dot_proto_dot_logservice__pb2.PushLogsResponse.FromString, - ) + '/chroma.LogService/PushLogs', + request_serializer=chromadb_dot_proto_dot_logservice__pb2.PushLogsRequest.SerializeToString, + response_deserializer=chromadb_dot_proto_dot_logservice__pb2.PushLogsResponse.FromString, + ) self.PullLogs = channel.unary_unary( - "/chroma.LogService/PullLogs", - request_serializer=chromadb_dot_proto_dot_logservice__pb2.PullLogsRequest.SerializeToString, - response_deserializer=chromadb_dot_proto_dot_logservice__pb2.PullLogsResponse.FromString, - ) + '/chroma.LogService/PullLogs', + request_serializer=chromadb_dot_proto_dot_logservice__pb2.PullLogsRequest.SerializeToString, + response_deserializer=chromadb_dot_proto_dot_logservice__pb2.PullLogsResponse.FromString, + ) self.GetAllCollectionInfoToCompact = channel.unary_unary( - "/chroma.LogService/GetAllCollectionInfoToCompact", - request_serializer=chromadb_dot_proto_dot_logservice__pb2.GetAllCollectionInfoToCompactRequest.SerializeToString, - response_deserializer=chromadb_dot_proto_dot_logservice__pb2.GetAllCollectionInfoToCompactResponse.FromString, - ) + '/chroma.LogService/GetAllCollectionInfoToCompact', + request_serializer=chromadb_dot_proto_dot_logservice__pb2.GetAllCollectionInfoToCompactRequest.SerializeToString, + response_deserializer=chromadb_dot_proto_dot_logservice__pb2.GetAllCollectionInfoToCompactResponse.FromString, + ) class LogServiceServicer(object): @@ -37,133 +37,96 @@ class LogServiceServicer(object): def PushLogs(self, request, context): """Missing associated documentation comment in .proto file.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') def PullLogs(self, request, context): """Missing associated documentation comment in .proto file.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') def GetAllCollectionInfoToCompact(self, request, context): """Missing associated documentation comment in .proto file.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') def add_LogServiceServicer_to_server(servicer, server): rpc_method_handlers = { - "PushLogs": grpc.unary_unary_rpc_method_handler( - servicer.PushLogs, - request_deserializer=chromadb_dot_proto_dot_logservice__pb2.PushLogsRequest.FromString, - response_serializer=chromadb_dot_proto_dot_logservice__pb2.PushLogsResponse.SerializeToString, - ), - "PullLogs": grpc.unary_unary_rpc_method_handler( - servicer.PullLogs, - request_deserializer=chromadb_dot_proto_dot_logservice__pb2.PullLogsRequest.FromString, - response_serializer=chromadb_dot_proto_dot_logservice__pb2.PullLogsResponse.SerializeToString, - ), - "GetAllCollectionInfoToCompact": grpc.unary_unary_rpc_method_handler( - servicer.GetAllCollectionInfoToCompact, - request_deserializer=chromadb_dot_proto_dot_logservice__pb2.GetAllCollectionInfoToCompactRequest.FromString, - response_serializer=chromadb_dot_proto_dot_logservice__pb2.GetAllCollectionInfoToCompactResponse.SerializeToString, - ), + 'PushLogs': grpc.unary_unary_rpc_method_handler( + servicer.PushLogs, + request_deserializer=chromadb_dot_proto_dot_logservice__pb2.PushLogsRequest.FromString, + response_serializer=chromadb_dot_proto_dot_logservice__pb2.PushLogsResponse.SerializeToString, + ), + 'PullLogs': grpc.unary_unary_rpc_method_handler( + servicer.PullLogs, + request_deserializer=chromadb_dot_proto_dot_logservice__pb2.PullLogsRequest.FromString, + response_serializer=chromadb_dot_proto_dot_logservice__pb2.PullLogsResponse.SerializeToString, + ), + 'GetAllCollectionInfoToCompact': grpc.unary_unary_rpc_method_handler( + servicer.GetAllCollectionInfoToCompact, + request_deserializer=chromadb_dot_proto_dot_logservice__pb2.GetAllCollectionInfoToCompactRequest.FromString, + response_serializer=chromadb_dot_proto_dot_logservice__pb2.GetAllCollectionInfoToCompactResponse.SerializeToString, + ), } generic_handler = grpc.method_handlers_generic_handler( - "chroma.LogService", rpc_method_handlers - ) + 'chroma.LogService', rpc_method_handlers) server.add_generic_rpc_handlers((generic_handler,)) -# This class is part of an EXPERIMENTAL API. + # This class is part of an EXPERIMENTAL API. class LogService(object): """Missing associated documentation comment in .proto file.""" @staticmethod - def PushLogs( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, + def PushLogs(request, target, - "/chroma.LogService/PushLogs", + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/chroma.LogService/PushLogs', chromadb_dot_proto_dot_logservice__pb2.PushLogsRequest.SerializeToString, chromadb_dot_proto_dot_logservice__pb2.PushLogsResponse.FromString, - options, - channel_credentials, - insecure, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod - def PullLogs( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, + def PullLogs(request, target, - "/chroma.LogService/PullLogs", + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/chroma.LogService/PullLogs', chromadb_dot_proto_dot_logservice__pb2.PullLogsRequest.SerializeToString, chromadb_dot_proto_dot_logservice__pb2.PullLogsResponse.FromString, - options, - channel_credentials, - insecure, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod - def GetAllCollectionInfoToCompact( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, + def GetAllCollectionInfoToCompact(request, target, - "/chroma.LogService/GetAllCollectionInfoToCompact", + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/chroma.LogService/GetAllCollectionInfoToCompact', chromadb_dot_proto_dot_logservice__pb2.GetAllCollectionInfoToCompactRequest.SerializeToString, chromadb_dot_proto_dot_logservice__pb2.GetAllCollectionInfoToCompactResponse.FromString, - options, - channel_credentials, - insecure, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) diff --git a/go/Makefile b/go/Makefile index 286fd5388a0..8bd6606d50b 100644 --- a/go/Makefile +++ b/go/Makefile @@ -4,7 +4,8 @@ build: go build -v -o bin/logservice ./cmd/logservice/ test: build - go test -p 1 -cover ./... + go test -race -cover ./... + lint: #brew install golangci-lint diff --git a/go/go.sum b/go/go.sum index 7dddbec0ed6..2e0c9378567 100644 --- a/go/go.sum +++ b/go/go.sum @@ -1,9 +1,5 @@ -ariga.io/atlas-go-sdk v0.1.1-0.20231001054405-7edfcfc14f1c h1:jvi4KB/7DmYYT+Wy2TFImccaBU0+dw7V8Un67NDGuio= -ariga.io/atlas-go-sdk v0.1.1-0.20231001054405-7edfcfc14f1c/go.mod h1:MLvZ9QwZx1KhI6+8XguxHPUPm0/PTTUr46S5GQAe9WI= ariga.io/atlas-go-sdk v0.2.3 h1:DpKruiJ9ElJcNhYxnQM9ddzupHXEYFH0Jx6ZcZ7lKYQ= ariga.io/atlas-go-sdk v0.2.3/go.mod h1:owkEEXw6jqne5KPVDfKsYB7cwMiMk3jtOiAAeKxS/yU= -ariga.io/atlas-provider-gorm v0.1.1 h1:Y0VsZCQkXJRYIJxenn2BM6sW2u9SkTca5mLvJumqrgE= -ariga.io/atlas-provider-gorm v0.1.1/go.mod h1:jb8uYcN+ul8Nf7OVzi5Vd2y+SQXrI4dHYBEUCiCi/6Q= ariga.io/atlas-provider-gorm v0.3.1 h1:+RrnoBwlqMj+B1x/Cf1BfwtZzq6v5vKzHdl2A6nZuBU= ariga.io/atlas-provider-gorm v0.3.1/go.mod h1:NOXGkyHfWFm8vQO7T+je5Zj5DdLZhkzReXGfxnnK4VM= cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= @@ -16,14 +12,20 @@ github.com/AthenZ/athenz v1.10.39/go.mod h1:3Tg8HLsiQZp81BJY58JBeU2BR6B/H4/0MQGf github.com/Azure/azure-sdk-for-go/sdk/azcore v1.4.0/go.mod h1:ON4tFdPTwRcgWEaVDrN3584Ef+b7GgSJaXxe5fW9t4M= github.com/Azure/azure-sdk-for-go/sdk/azcore v1.6.0/go.mod h1:bjGvMhVMb+EEm3VRNQawDMUyMMjo+S5ewNjflkep/0Q= github.com/Azure/azure-sdk-for-go/sdk/azcore v1.6.1/go.mod h1:bjGvMhVMb+EEm3VRNQawDMUyMMjo+S5ewNjflkep/0Q= +github.com/Azure/azure-sdk-for-go/sdk/azcore v1.7.1 h1:/iHxaJhsFr0+xVFfbMr5vxz848jyiWuIEDhYq3y5odY= github.com/Azure/azure-sdk-for-go/sdk/azcore v1.7.1/go.mod h1:bjGvMhVMb+EEm3VRNQawDMUyMMjo+S5ewNjflkep/0Q= +github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.3.0 h1:vcYCAze6p19qBW7MhZybIsqD8sMV8js0NyQM8JDnVtg= github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.3.0/go.mod h1:OQeznEEkTZ9OrhHJoDD8ZDq51FHgXjqtP9z6bEwBq9U= github.com/Azure/azure-sdk-for-go/sdk/internal v1.1.2/go.mod h1:eWRD7oawr1Mu1sLCawqVc0CUiF43ia3qQMxLscsKQ9w= github.com/Azure/azure-sdk-for-go/sdk/internal v1.2.0/go.mod h1:eWRD7oawr1Mu1sLCawqVc0CUiF43ia3qQMxLscsKQ9w= +github.com/Azure/azure-sdk-for-go/sdk/internal v1.3.0 h1:sXr+ck84g/ZlZUOZiNELInmMgOsuGwdjjVkEIde0OtY= github.com/Azure/azure-sdk-for-go/sdk/internal v1.3.0/go.mod h1:okt5dMMTOFjX/aovMlrjvvXoPMBVSPzk9185BT0+eZM= +github.com/Azure/azure-sdk-for-go/sdk/security/keyvault/azkeys v1.0.0 h1:yfJe15aSwEQ6Oo6J+gdfdulPNoZ3TEhmbhLIoxZcA+U= github.com/Azure/azure-sdk-for-go/sdk/security/keyvault/azkeys v1.0.0/go.mod h1:Q28U+75mpCaSCDowNEmhIo/rmgdkqmkmzI7N6TGR4UY= +github.com/Azure/azure-sdk-for-go/sdk/security/keyvault/internal v0.8.0 h1:T028gtTPiYt/RMUfs8nVsAL7FDQrfLlrm/NnRG/zcC4= github.com/Azure/azure-sdk-for-go/sdk/security/keyvault/internal v0.8.0/go.mod h1:cw4zVQgBby0Z5f2v0itn6se2dDP17nTjbZFXW5uPyHA= github.com/AzureAD/microsoft-authentication-library-for-go v1.0.0/go.mod h1:kgDmCTgBzIEPFElEF+FK0SdjAor06dRq2Go927dnQ6o= +github.com/AzureAD/microsoft-authentication-library-for-go v1.1.0 h1:HCc0+LpPfpCKs6LGGLAhwBARt9632unrVcI6i8s/8os= github.com/AzureAD/microsoft-authentication-library-for-go v1.1.0/go.mod h1:wP83P5OoQ5p6ip3ScPr0BAq0BvuPAvacpEuSzyouqAI= github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= github.com/DataDog/zstd v1.5.0 h1:+K/VEwIAaPcHiMtQvpLD4lqW7f0Gk3xdYZmI1hD+CXo= @@ -72,6 +74,7 @@ github.com/emicklei/go-restful/v3 v3.9.0/go.mod h1:6n3XBCmQQb25CM2LCACGz8ukIrRry github.com/evanphx/json-patch v4.12.0+incompatible h1:4onqiflcdA9EOZ4RxV643DvftH5pOlLGNtQ5lPWQu84= github.com/evanphx/json-patch v4.12.0+incompatible/go.mod h1:50XU6AFN0ol/bzJsmQLiYLvXMP4fmwYFNcr97nuDLSk= github.com/fsnotify/fsnotify v1.4.9 h1:hsms1Qyu0jgnwNXIxa+/V/PDsU6CfLf6CNO8H7IWoS4= +github.com/fsnotify/fsnotify v1.4.9/go.mod h1:znqG4EE+3YCdAaPaxE2ZRY/06pZUdp0tY4IgpuI1SZQ= github.com/go-kit/kit v0.8.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as= github.com/go-kit/kit v0.9.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as= github.com/go-kit/log v0.1.0/go.mod h1:zbhenjAZHb184qTLMA9ZjW7ThYL0H2mk7Q6pNt4vbaY= @@ -96,6 +99,7 @@ github.com/go-sql-driver/mysql v1.7.1 h1:lUIinVbN1DY0xBg0eMOzmmtGoHwWBbvnWubQUrt github.com/go-sql-driver/mysql v1.7.1/go.mod h1:OXbVy3sEdcQ2Doequ6Z5BW6fXNQTmx+9S1MCJN5yJMI= github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY= github.com/go-task/slim-sprig v0.0.0-20230315185526-52ccab3ef572 h1:tfuBGBXKqDEevZMzYi5KSi8KkcZtzBcTgAUUtapy0OI= +github.com/go-task/slim-sprig v0.0.0-20230315185526-52ccab3ef572/go.mod h1:9Pwr4B2jHnOSGXyyzV8ROjYa2ojvAY6HCGYYfMoC3Ls= github.com/godbus/dbus v0.0.0-20190726142602-4481cbc300e2 h1:ZpnhV/YsD2/4cESfV5+Hoeu/iUR3ruzNvZ+yQfO03a0= github.com/godbus/dbus v0.0.0-20190726142602-4481cbc300e2/go.mod h1:bBOAhwG1umN6/6ZUMtDFBMQR8jRg9O75tm9K00oMsK4= github.com/godbus/dbus/v5 v5.0.4/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA= @@ -106,6 +110,7 @@ github.com/golang-jwt/jwt v3.2.1+incompatible h1:73Z+4BJcrTC+KczS6WvTPvRGOp1WmfE github.com/golang-jwt/jwt v3.2.1+incompatible/go.mod h1:8pz2t5EyA70fFQQSrl6XZXzqecmYZeUEB8OUGHkxJ+I= github.com/golang-jwt/jwt/v4 v4.4.3/go.mod h1:m21LjoU+eqJr34lmDMbreY2eSTRJ1cv77w39/MY0Ch0= github.com/golang-jwt/jwt/v4 v4.5.0/go.mod h1:m21LjoU+eqJr34lmDMbreY2eSTRJ1cv77w39/MY0Ch0= +github.com/golang-jwt/jwt/v5 v5.0.0 h1:1n1XNM9hk7O9mnQoNBGolZvzebBQ7p93ULHRc28XJUE= github.com/golang-jwt/jwt/v5 v5.0.0/go.mod h1:pqrtFR0X4osieyHYxtmOUWsAWrfe1Q5UVIyoH402zdk= github.com/golang-sql/civil v0.0.0-20220223132316-b832511892a9 h1:au07oEsX2xN0ktxqI+Sida1w446QrXBRJ0nee3SNZlA= github.com/golang-sql/civil v0.0.0-20220223132316-b832511892a9/go.mod h1:8vg3r2VgvsThLBIFL93Qb5yWzgyZWhEmBwUJWevAkK0= @@ -142,6 +147,7 @@ github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/ github.com/google/gofuzz v1.2.0 h1:xRy4A+RhZaiKjJ1bPfwQ8sedCA+YS2YcCHW6ec7JMi0= github.com/google/gofuzz v1.2.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= github.com/google/pprof v0.0.0-20210720184732-4bb14d4b1be1 h1:K6RDEckDVWvDI9JAJYCmNdQXq6neHJOYx3V6jnqNEec= +github.com/google/pprof v0.0.0-20210720184732-4bb14d4b1be1/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= github.com/google/uuid v1.3.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/google/uuid v1.4.0 h1:MtMxsa51/r9yyhkyLsVeVt0B+BGQZzpQiTQ4eHZ8bc4= github.com/google/uuid v1.4.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= @@ -196,11 +202,16 @@ github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORN github.com/kr/pretty v0.2.0/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI= github.com/kr/pretty v0.2.1/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI= github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE= +github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk= github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= +github.com/kylelemons/godebug v1.1.0 h1:RPNrshWIDI6G2gRW9EHilWtl7Z6Sb1BR0xunSBf0SNc= github.com/kylelemons/godebug v1.1.0/go.mod h1:9/0rRGxNHcop5bhtWyNeEfOS8JIWk580+fNqagV/RAw= +github.com/lib/pq v1.10.0/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= +github.com/lib/pq v1.10.9 h1:YXG7RB+JIjhP29X+OtkiDnYaXQwpS4JEWq7dtCCRUEw= +github.com/lib/pq v1.10.9/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= github.com/linkedin/goavro/v2 v2.9.8 h1:jN50elxBsGBDGVDEKqUlDuU1cFwJ11K/yrJCBMe/7Wg= github.com/linkedin/goavro/v2 v2.9.8/go.mod h1:UgQUb2N/pmueQYH9bfqFioWxzYCZXSfF8Jw03O5sjqA= github.com/mailru/easyjson v0.7.7 h1:UGYAvKxe3sBsEDzO8ZeWOSlIQfWFlxbzLZe7hwFURr0= @@ -233,15 +244,20 @@ github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRW github.com/mwitkow/go-conntrack v0.0.0-20190716064945-2f068394615f/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U= github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLANZcx1PVRCS0qkT7pwLkGfwJo4zjcN/Tysno= github.com/nxadm/tail v1.4.8 h1:nPr65rt6Y5JFSKQO7qToXr7pePgD6Gwiw05lkbyAQTE= +github.com/nxadm/tail v1.4.8/go.mod h1:+ncqLTQzXmGhMZNUePPaPqPvBxHAIsmXswZKocGu+AU= github.com/onsi/ginkgo v1.16.5 h1:8xi0RTUf59SOSfEtZMvwTvXYMzG4gV23XVHOZiXNtnE= +github.com/onsi/ginkgo v1.16.5/go.mod h1:+E8gABHa3K6zRBolWtd+ROzc/U5bkGt0FwiG042wbpU= github.com/onsi/ginkgo/v2 v2.9.4 h1:xR7vG4IXt5RWx6FfIjyAtsoMAtnc3C/rFXBBd2AjZwE= +github.com/onsi/ginkgo/v2 v2.9.4/go.mod h1:gCQYp2Q+kSoIj7ykSVb9nskRSsR6PUj4AiLywzIhbKM= github.com/onsi/gomega v1.27.6 h1:ENqfyGeS5AX/rlXDd/ETokDz93u0YufY1Pgxuy/PvWE= +github.com/onsi/gomega v1.27.6/go.mod h1:PIQNjfQwkP3aQAH7lf7j87O/5FiNr+ZR8+ipb+qQlhg= github.com/pierrec/lz4 v2.0.5+incompatible h1:2xWsjqPFWcplujydGg4WmhC/6fZqK42wMM8aXeqhl0I= github.com/pierrec/lz4 v2.0.5+incompatible/go.mod h1:pdkljMzZIN41W+lC3N2tnIh5sFi+IEE17M5jbnwPHcY= +github.com/pingcap/errors v0.11.0 h1:DCJQB8jrHbQ1VVlMFIrbj2ApScNNotVmkSNplu2yUt4= github.com/pingcap/errors v0.11.0/go.mod h1:Oi8TUi2kEtXXLMJk9l1cGmz20kV3TaQ0usTwv5KuLY8= -github.com/pingcap/errors v0.11.5-0.20210425183316-da1aaba5fb63 h1:+FZIDR/D97YOPik4N4lPDaUcLDF/EQPogxtlHB2ZZRM= github.com/pingcap/log v1.1.0 h1:ELiPxACz7vdo1qAvvaWJg1NrYFoY6gqAh/+Uo6aXdD8= github.com/pingcap/log v1.1.0/go.mod h1:DWQW5jICDR7UJh4HtxXSM20Churx4CQL0fwL/SoOSA4= +github.com/pkg/browser v0.0.0-20210911075715-681adbf594b8 h1:KoWmjvw+nsYOo29YJK9vDA65RGE3NrOnUtO7a+RF9HU= github.com/pkg/browser v0.0.0-20210911075715-681adbf594b8/go.mod h1:HKlIX3XHQyzLZPlr7++PzdhaXEj94dEiJgZDTsxEqUI= github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= @@ -250,6 +266,7 @@ github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINE github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/prashantv/gostub v1.1.0 h1:BTyx3RfQjRHnUWaGF9oQos79AlQ5k8WNktv7VGvVH4g= +github.com/prashantv/gostub v1.1.0/go.mod h1:A5zLQHz7ieHGG7is6LLXLz7I8+3LZzsrV0P1IAHhP5U= github.com/prometheus/client_golang v0.9.1/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw= github.com/prometheus/client_golang v1.0.0/go.mod h1:db9x61etRT2tGnBNRi70OPL5FsnadC4Ky3P0J6CfImo= github.com/prometheus/client_golang v1.7.1/go.mod h1:PY5Wy2awLA44sXw4AOSfFBetzPP4j5+D6mVACh+pe2M= @@ -327,6 +344,7 @@ go.uber.org/automaxprocs v1.5.3 h1:kWazyxZUrS3Gs4qUpbwo5kEIMGe/DAvi5Z4tl2NW4j8= go.uber.org/automaxprocs v1.5.3/go.mod h1:eRbA25aqJrxAbsLO0xy5jVwPt7FQnRgjW+efnwa1WM0= go.uber.org/goleak v1.1.10/go.mod h1:8a7PlsEVH3e/a/GLqe5IIrQx6GzcnRmZEufDUTk4A7A= go.uber.org/goleak v1.3.0 h1:2K3zAYmnTNqV73imy9J1T3WC+gmCePx2hEGkimedGto= +go.uber.org/goleak v1.3.0/go.mod h1:CoHD4mav9JJNrW/WLlf7HGZPjdw8EucARQHekz1X6bE= go.uber.org/multierr v1.6.0/go.mod h1:cdWPpRnG4AhwMwsgIHip0KRBQjJy5kYEpYjJxpXp9iU= go.uber.org/multierr v1.7.0/go.mod h1:7EAYxJLBy9rStEaz58O2t4Uvip6FSURkq8/ppBp95ak= go.uber.org/multierr v1.11.0 h1:blXXJkSxSSfBVBlC76pxqeO+LN3aDfLQo+309xJstO0= @@ -344,8 +362,6 @@ golang.org/x/crypto v0.6.0/go.mod h1:OFC/31mSvZgRz0V1QTNCzfAI1aIRzbiufJtkMIlEp58 golang.org/x/crypto v0.7.0/go.mod h1:pYwdfH91IfpZVANVyUOhSIPZaFoJGxTFbZhFTx+dXZU= golang.org/x/crypto v0.9.0/go.mod h1:yrmDGqONDYtNj3tH8X9dzUun2m2lzPa9ngI6/RUPGR0= golang.org/x/crypto v0.12.0/go.mod h1:NF0Gs7EO5K4qLn+Ylc+fih8BSTeIjAP05siRnAh98yw= -golang.org/x/crypto v0.16.0 h1:mMMrFzRSCF0GvB7Ne27XVtVAaXLrPmgPC7/v0tkwHaY= -golang.org/x/crypto v0.16.0/go.mod h1:gCAAfMLgwOJRpTjQ2zCCt2OcSfYMTeZVSRtQlPC7Nq4= golang.org/x/crypto v0.18.0 h1:PGVlW0xEltQnzFZ55hkuX5+KLyrMYhHld1YHO4AKcdc= golang.org/x/crypto v0.18.0/go.mod h1:R0j02AL6hcrfOiy9T4ZYp/rcWeMxM3L6QYxlOuEG1mg= golang.org/x/lint v0.0.0-20190930215403-16217165b5de/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= @@ -353,8 +369,6 @@ golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= -golang.org/x/mod v0.11.0 h1:bUO06HqtnRcc/7l71XBe4WcqTZ+3AH1J59zWDDwLKgU= -golang.org/x/mod v0.11.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= golang.org/x/mod v0.15.0 h1:SernR4v+D55NyBH2QiEQrlBAnj1ECL6AGrA5+dPaMY8= golang.org/x/mod v0.15.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c= golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= @@ -378,8 +392,6 @@ golang.org/x/net v0.7.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= golang.org/x/net v0.8.0/go.mod h1:QVkue5JL9kW//ek3r6jTKnTFis1tRmNAW2P1shuFdJc= golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg= golang.org/x/net v0.14.0/go.mod h1:PpSgVXXLK0OxS0F31C1/tv6XNguvCrnXIDrFMspZIUI= -golang.org/x/net v0.19.0 h1:zTwKpTd2XuCqf8huc7Fo2iSy+4RHPd10s4KzeTnVr1c= -golang.org/x/net v0.19.0/go.mod h1:CfAk/cbD4CthTvqiEl8NpboMuiuOYsAr/7NOjZJtv1U= golang.org/x/net v0.20.0 h1:aCL9BSgETF1k+blQaYUBx9hJ9LOGP3gAVemcZlf1Kpo= golang.org/x/net v0.20.0/go.mod h1:z8BVo6PvndSri0LbOE3hAn0apkU+1YvI6E70E9jsnvY= golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= @@ -427,8 +439,6 @@ golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k= golang.org/x/term v0.6.0/go.mod h1:m6U89DPEgQRMq3DNkDClhWw02AUbt2daBVO4cn4Hv9U= golang.org/x/term v0.8.0/go.mod h1:xPskH00ivmX89bAKVGSKKtLOWNx2+17Eiy94tnKShWo= golang.org/x/term v0.11.0/go.mod h1:zC9APTIj3jG3FdV/Ons+XE1riIZXG4aZ4GTHiPZJPIU= -golang.org/x/term v0.15.0 h1:y/Oo/a/q3IXu26lQgl04j/gjuBDOBlx7X6Om1j2CPW4= -golang.org/x/term v0.15.0/go.mod h1:BDl952bC7+uMoWR75FIrCDx79TPU9oHkTZ9yRbYOrX0= golang.org/x/term v0.16.0 h1:m+B6fahuftsE9qjo0VWp2FW0mB3MTJvR0BaMQrq0pmE= golang.org/x/term v0.16.0/go.mod h1:yn7UURbUtPyrVJPGPq404EukNFxcm/foM+bV/bfcDsY= golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= @@ -455,8 +465,6 @@ golang.org/x/tools v0.0.0-20200619180055-7c47624df98f/go.mod h1:EkVYQZoAsY45+roY golang.org/x/tools v0.0.0-20210106214847-113979e3529a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU= -golang.org/x/tools v0.10.0 h1:tvDr/iQoUqNdohiYm0LmmKcBk+q86lb9EprIUFhHHGg= -golang.org/x/tools v0.10.0/go.mod h1:UJwyiVBsOA2uwvK/e5OY3GTpDUJriEd+/YlqAwLPmyM= golang.org/x/tools v0.17.0 h1:FvmRgNOcs3kOa+T20R1uhfP9F6HgG2mfxDv1vrx1Htc= golang.org/x/tools v0.17.0/go.mod h1:xsh6VxdV005rRVaS6SSAf9oiAqljS7UZUacMZ8Bnsps= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= @@ -467,6 +475,7 @@ google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7 google.golang.org/appengine v1.6.8 h1:IhEN5q69dyKagZPYMSdIjS2HqprW324FRQZJcGqPAsM= google.golang.org/appengine v1.6.8/go.mod h1:1jJ3jBArFh5pcgW8gCtRJnepW8FzD1V44FJffLiz/Ds= google.golang.org/genproto v0.0.0-20231212172506-995d672761c0 h1:YJ5pD9rF8o9Qtta0Cmy9rdBwkSjrTCT6XTiUQVOtIos= +google.golang.org/genproto v0.0.0-20231212172506-995d672761c0/go.mod h1:l/k7rMz0vFTBPy+tFSGvXEd3z+BcoG1k7EHbqm+YBsY= google.golang.org/genproto/googleapis/api v0.0.0-20240102182953-50ed04b92917 h1:rcS6EyEaoCO52hQDupoSfrxI3R6C2Tq741is7X8OvnM= google.golang.org/genproto/googleapis/api v0.0.0-20240102182953-50ed04b92917/go.mod h1:CmlNWB9lSezaYELKS5Ym1r44VrrbPUa7JTvw+6MbpJ0= google.golang.org/genproto/googleapis/rpc v0.0.0-20240102182953-50ed04b92917 h1:6G8oQ016D88m1xAKljMlBOOGWDZkes4kMhgGFlf8WcQ= @@ -497,6 +506,7 @@ gopkg.in/natefinch/lumberjack.v2 v2.2.1 h1:bBRl1b0OH9s/DuPhuXpNl+VtCaJXFZ5/uEFST gopkg.in/natefinch/lumberjack.v2 v2.2.1/go.mod h1:YD8tP3GAjkrDg1eZH7EGmyESg/lsYskCTPBJVb9jqSc= gopkg.in/square/go-jose.v2 v2.4.1/go.mod h1:M9dMgbHiYLoDGQrXy7OpJDJWiKiU//h+vD76mk0e1AI= gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7 h1:uRGJdciOHaEIrze2W8Q3AKkepLTh2hOroT7a+7czfdQ= +gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7/go.mod h1:dt/ZhP58zS4L8KSrWDmTeBkI65Dw0HsyUHuEVlX15mw= gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= @@ -519,8 +529,6 @@ gorm.io/driver/sqlserver v1.5.2 h1:+o4RQ8w1ohPbADhFqDxeeZnSWjwOcBnxBckjTbcP4wk= gorm.io/driver/sqlserver v1.5.2/go.mod h1:gaKF0MO0cfTq9Q3/XhkowSw4g6nIwHPGAs4hzKCmvBo= gorm.io/gorm v1.25.2-0.20230530020048-26663ab9bf55/go.mod h1:L4uxeKpfBml98NYqVqwAdmV1a2nBtAec/cf3fpucW/k= gorm.io/gorm v1.25.2-0.20230610234218-206613868439/go.mod h1:L4uxeKpfBml98NYqVqwAdmV1a2nBtAec/cf3fpucW/k= -gorm.io/gorm v1.25.5 h1:zR9lOiiYf09VNh5Q1gphfyia1JpiClIWG9hQaxB/mls= -gorm.io/gorm v1.25.5/go.mod h1:hbnx/Oo0ChWMn1BIhpy1oYozzpM15i4YPuHDmfYtwg8= gorm.io/gorm v1.25.7 h1:VsD6acwRjz2zFxGO50gPO6AkNs7KKnvfzUjHQhZDz/A= gorm.io/gorm v1.25.7/go.mod h1:hbnx/Oo0ChWMn1BIhpy1oYozzpM15i4YPuHDmfYtwg8= k8s.io/api v0.28.3 h1:Gj1HtbSdB4P08C8rs9AR94MfSGpRhJgsS+GF9V26xMM= diff --git a/go/migrations/20240309223050.sql b/go/migrations/20240313233558.sql similarity index 97% rename from go/migrations/20240309223050.sql rename to go/migrations/20240313233558.sql index 91cca57c953..e8d72ab372a 100644 --- a/go/migrations/20240309223050.sql +++ b/go/migrations/20240313233558.sql @@ -22,6 +22,7 @@ CREATE TABLE "public"."collections" ( "created_at" timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, "updated_at" timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, "log_position" bigint NULL DEFAULT 0, + "version" integer NULL DEFAULT 0, PRIMARY KEY ("id") ); -- Create index "uni_collections_name" to table: "collections" @@ -78,6 +79,7 @@ CREATE TABLE "public"."segments" ( "is_deleted" boolean NULL DEFAULT false, "created_at" timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, "updated_at" timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, + "file_paths" text NULL DEFAULT '{}', PRIMARY KEY ("collection_id", "id") ); -- Create "tenants" table diff --git a/go/migrations/atlas.sum b/go/migrations/atlas.sum index 828fcfc446d..df6b20e0eee 100644 --- a/go/migrations/atlas.sum +++ b/go/migrations/atlas.sum @@ -1,2 +1,2 @@ -h1:w35hwPquwsvenxzG956rH1l7vvSoB2S6XNTGOz2C78w= -20240309223050.sql h1:N3DifBqpCQpbRHqCtOc9sr+Qaq7mZek5Zz59KoFAy8g= +h1:Q+UeSEuBZon9dDhW0jtrv4UYIX0CkFd+WYE9xRH7hoM= +20240313233558.sql h1:WqdAFn0qL9z9fAItKv7kQuENhDpawT0FRsIrWDhLoJ0= diff --git a/go/pkg/common/errors.go b/go/pkg/common/errors.go index a5a3119bd1f..209ea7a21af 100644 --- a/go/pkg/common/errors.go +++ b/go/pkg/common/errors.go @@ -20,6 +20,9 @@ var ( ErrCollectionTopicEmpty = errors.New("collection topic is empty") ErrCollectionUniqueConstraintViolation = errors.New("collection unique constraint violation") ErrCollectionDeleteNonExistingCollection = errors.New("delete non existing collection") + ErrCollectionLogPositionStale = errors.New("collection log position Stale") + ErrCollectionVersionStale = errors.New("collection version stale") + ErrCollectionVersionInvalid = errors.New("collection version invalid") // Collection metadata errors ErrUnknownCollectionMetadataType = errors.New("collection metadata value type not supported") @@ -35,7 +38,4 @@ var ( // Segment metadata errors ErrUnknownSegmentMetadataType = errors.New("segment metadata value type not supported") - - // Record Log errors - ErrPushLogs = errors.New("error pushing logs") ) diff --git a/go/pkg/coordinator/apis.go b/go/pkg/coordinator/apis.go index c1e5e9f2231..13f75943c78 100644 --- a/go/pkg/coordinator/apis.go +++ b/go/pkg/coordinator/apis.go @@ -30,6 +30,7 @@ type ICoordinator interface { GetTenant(ctx context.Context, getTenant *model.GetTenant) (*model.Tenant, error) SetTenantLastCompactionTime(ctx context.Context, tenantID string, lastCompactionTime int64) error GetTenantsLastCompactionTime(ctx context.Context, tenantIDs []string) ([]*dbmodel.Tenant, error) + FlushCollectionCompaction(ctx context.Context, flushCollectionCompaction *model.FlushCollectionCompaction) (*model.FlushCollectionInfo, error) } func (s *Coordinator) ResetState(ctx context.Context) error { @@ -69,12 +70,12 @@ func (s *Coordinator) GetTenant(ctx context.Context, getTenant *model.GetTenant) } func (s *Coordinator) CreateCollection(ctx context.Context, createCollection *model.CreateCollection) (*model.Collection, error) { + log.Info("create collection", zap.Any("createCollection", createCollection)) collectionTopic, err := s.assignCollection(createCollection.ID) if err != nil { return nil, err } createCollection.Topic = collectionTopic - log.Info("apis create collection", zap.Any("collection", createCollection)) collection, err := s.catalog.CreateCollection(ctx, createCollection, createCollection.Ts) if err != nil { return nil, err @@ -167,3 +168,7 @@ func (s *Coordinator) SetTenantLastCompactionTime(ctx context.Context, tenantID func (s *Coordinator) GetTenantsLastCompactionTime(ctx context.Context, tenantIDs []string) ([]*dbmodel.Tenant, error) { return s.catalog.GetTenantsLastCompactionTime(ctx, tenantIDs) } + +func (s *Coordinator) FlushCollectionCompaction(ctx context.Context, flushCollectionCompaction *model.FlushCollectionCompaction) (*model.FlushCollectionInfo, error) { + return s.catalog.FlushCollectionCompaction(ctx, flushCollectionCompaction) +} diff --git a/go/pkg/coordinator/apis_test.go b/go/pkg/coordinator/apis_test.go index 24aee2c4a5a..47a8b9b3218 100644 --- a/go/pkg/coordinator/apis_test.go +++ b/go/pkg/coordinator/apis_test.go @@ -2,10 +2,12 @@ package coordinator import ( "context" + "github.com/chroma-core/chroma/go/pkg/metastore/db/dao" "github.com/pingcap/log" "github.com/stretchr/testify/suite" "gorm.io/gorm" "sort" + "strconv" "testing" "github.com/chroma-core/chroma/go/pkg/common" @@ -13,17 +15,20 @@ import ( "github.com/chroma-core/chroma/go/pkg/model" "github.com/chroma-core/chroma/go/pkg/types" "github.com/google/uuid" - "github.com/stretchr/testify/assert" "pgregory.net/rapid" ) type APIsTestSuite struct { suite.Suite - db *gorm.DB - t *testing.T - collectionId1 types.UniqueID - collectionId2 types.UniqueID - records [][]byte + db *gorm.DB + collectionId1 types.UniqueID + collectionId2 types.UniqueID + records [][]byte + tenantName string + databaseName string + databaseId string + sampleCollections []*model.Collection + coordinator *Coordinator } func (suite *APIsTestSuite) SetupSuite() { @@ -33,12 +38,43 @@ func (suite *APIsTestSuite) SetupSuite() { func (suite *APIsTestSuite) SetupTest() { log.Info("setup test") - dbcore.ResetTestTables(suite.db) + suite.tenantName = "tenant_" + suite.T().Name() + suite.databaseName = "database_" + suite.T().Name() + DbId, err := dao.CreateTestTenantAndDatabase(suite.db, suite.tenantName, suite.databaseName) + suite.NoError(err) + suite.databaseId = DbId + suite.sampleCollections = SampleCollections(suite.tenantName, suite.databaseName) + for index, collection := range suite.sampleCollections { + collection.ID = types.NewUniqueID() + collection.Name = "collection_" + suite.T().Name() + strconv.Itoa(index) + } + assignmentPolicy := NewMockAssignmentPolicy(suite.sampleCollections) + ctx := context.Background() + c, err := NewCoordinator(ctx, assignmentPolicy, suite.db, nil, nil) + if err != nil { + suite.T().Fatalf("error creating coordinator: %v", err) + } + suite.coordinator = c + for _, collection := range suite.sampleCollections { + _, errCollectionCreation := c.CreateCollection(ctx, &model.CreateCollection{ + ID: collection.ID, + Name: collection.Name, + Topic: collection.Topic, + Metadata: collection.Metadata, + Dimension: collection.Dimension, + TenantID: collection.TenantID, + DatabaseName: collection.DatabaseName, + }) + suite.NoError(errCollectionCreation) + } } func (suite *APIsTestSuite) TearDownTest() { log.Info("teardown test") - dbcore.ResetTestTables(suite.db) + err := dao.CleanUpTestDatabase(suite.db, suite.tenantName, suite.databaseName) + suite.NoError(err) + err = dao.CleanUpTestTenant(suite.db, suite.tenantName) + suite.NoError(err) } // TODO: This is not complete yet. We need to add more tests for the other APIs. @@ -180,7 +216,7 @@ func TestAPIs(t *testing.T) { // rapid.Check(t, testSegment) } -func SampleCollections(t *testing.T, tenantID string, databaseName string) []*model.Collection { +func SampleCollections(tenantID string, databaseName string) []*model.Collection { dimension := int32(128) metadata1 := model.NewCollectionMetadata[model.CollectionMetadataValueType]() metadata1.Add("test_str", &model.CollectionMetadataValueStringType{Value: "str1"}) @@ -248,446 +284,411 @@ func (m *MockAssignmentPolicy) AssignCollection(collectionID types.UniqueID) (st } func (suite *APIsTestSuite) TestCreateGetDeleteCollections() { - - sampleCollections := SampleCollections(suite.t, common.DefaultTenant, common.DefaultDatabase) - ctx := context.Background() - assignmentPolicy := NewMockAssignmentPolicy(sampleCollections) - c, err := NewCoordinator(ctx, assignmentPolicy, suite.db, nil, nil) - if err != nil { - suite.t.Fatalf("error creating coordinator: %v", err) - } - c.ResetState(ctx) - - for _, collection := range sampleCollections { - c.CreateCollection(ctx, &model.CreateCollection{ - ID: collection.ID, - Name: collection.Name, - Topic: collection.Topic, - Metadata: collection.Metadata, - Dimension: collection.Dimension, - TenantID: collection.TenantID, - DatabaseName: collection.DatabaseName, - }) - } - - results, err := c.GetCollections(ctx, types.NilUniqueID(), nil, nil, common.DefaultTenant, common.DefaultDatabase) - assert.NoError(suite.t, err) + results, err := suite.coordinator.GetCollections(ctx, types.NilUniqueID(), nil, nil, suite.tenantName, suite.databaseName) + suite.NoError(err) sort.Slice(results, func(i, j int) bool { return results[i].Name < results[j].Name }) - - assert.Equal(suite.t, sampleCollections, results) + suite.Equal(suite.sampleCollections, results) // Duplicate create fails - _, err = c.CreateCollection(ctx, &model.CreateCollection{ - ID: sampleCollections[0].ID, - Name: sampleCollections[0].Name, - TenantID: common.DefaultTenant, - DatabaseName: common.DefaultDatabase, + _, err = suite.coordinator.CreateCollection(ctx, &model.CreateCollection{ + ID: suite.sampleCollections[0].ID, + Name: suite.sampleCollections[0].Name, + TenantID: suite.tenantName, + DatabaseName: suite.databaseName, }) - assert.Error(suite.t, err) + suite.Error(err) // Find by name - for _, collection := range sampleCollections { - result, err := c.GetCollections(ctx, types.NilUniqueID(), &collection.Name, nil, common.DefaultTenant, common.DefaultDatabase) - assert.NoError(suite.t, err) - assert.Equal(suite.t, []*model.Collection{collection}, result) + for _, collection := range suite.sampleCollections { + result, err := suite.coordinator.GetCollections(ctx, types.NilUniqueID(), &collection.Name, nil, suite.tenantName, suite.databaseName) + suite.NoError(err) + suite.Equal([]*model.Collection{collection}, result) } // Find by topic - for _, collection := range sampleCollections { - result, err := c.GetCollections(ctx, types.NilUniqueID(), nil, &collection.Topic, common.DefaultTenant, common.DefaultDatabase) - assert.NoError(suite.t, err) - assert.Equal(suite.t, []*model.Collection{collection}, result) + for _, collection := range suite.sampleCollections { + result, err := suite.coordinator.GetCollections(ctx, types.NilUniqueID(), nil, &collection.Topic, suite.tenantName, suite.databaseName) + suite.NoError(err) + suite.Equal([]*model.Collection{collection}, result) } // Find by id - for _, collection := range sampleCollections { - result, err := c.GetCollections(ctx, collection.ID, nil, nil, common.DefaultTenant, common.DefaultDatabase) - assert.NoError(suite.t, err) - assert.Equal(suite.t, []*model.Collection{collection}, result) + for _, collection := range suite.sampleCollections { + result, err := suite.coordinator.GetCollections(ctx, collection.ID, nil, nil, suite.tenantName, suite.databaseName) + suite.NoError(err) + suite.Equal([]*model.Collection{collection}, result) } // Find by id and topic (positive case) - for _, collection := range sampleCollections { - result, err := c.GetCollections(ctx, collection.ID, nil, &collection.Topic, common.DefaultTenant, common.DefaultDatabase) - assert.NoError(suite.t, err) - assert.Equal(suite.t, []*model.Collection{collection}, result) + for _, collection := range suite.sampleCollections { + result, err := suite.coordinator.GetCollections(ctx, collection.ID, nil, &collection.Topic, suite.tenantName, suite.databaseName) + suite.NoError(err) + suite.Equal([]*model.Collection{collection}, result) } // find by id and topic (negative case) - for _, collection := range sampleCollections { + for _, collection := range suite.sampleCollections { otherTopic := "other topic" - result, err := c.GetCollections(ctx, collection.ID, nil, &otherTopic, common.DefaultTenant, common.DefaultDatabase) - assert.NoError(suite.t, err) - assert.Empty(suite.t, result) + result, err := suite.coordinator.GetCollections(ctx, collection.ID, nil, &otherTopic, suite.tenantName, suite.databaseName) + suite.NoError(err) + suite.Empty(result) } // Delete - c1 := sampleCollections[0] + c1 := suite.sampleCollections[0] deleteCollection := &model.DeleteCollection{ ID: c1.ID, - DatabaseName: common.DefaultDatabase, - TenantID: common.DefaultTenant, + DatabaseName: suite.databaseName, + TenantID: suite.tenantName, } - err = c.DeleteCollection(ctx, deleteCollection) - assert.NoError(suite.t, err) + err = suite.coordinator.DeleteCollection(ctx, deleteCollection) + suite.NoError(err) - results, err = c.GetCollections(ctx, types.NilUniqueID(), nil, nil, common.DefaultTenant, common.DefaultDatabase) - assert.NoError(suite.t, err) + results, err = suite.coordinator.GetCollections(ctx, types.NilUniqueID(), nil, nil, suite.tenantName, suite.databaseName) + suite.NoError(err) - assert.NotContains(suite.t, results, c1) - assert.Len(suite.t, results, len(sampleCollections)-1) - assert.ElementsMatch(suite.t, results, sampleCollections[1:]) - byIDResult, err := c.GetCollections(ctx, c1.ID, nil, nil, common.DefaultTenant, common.DefaultDatabase) - assert.NoError(suite.t, err) - assert.Empty(suite.t, byIDResult) + suite.NotContains(results, c1) + suite.Len(results, len(suite.sampleCollections)-1) + suite.ElementsMatch(results, suite.sampleCollections[1:]) + byIDResult, err := suite.coordinator.GetCollections(ctx, c1.ID, nil, nil, suite.tenantName, suite.databaseName) + suite.NoError(err) + suite.Empty(byIDResult) // Duplicate delete throws an exception - err = c.DeleteCollection(ctx, deleteCollection) - assert.Error(suite.t, err) + err = suite.coordinator.DeleteCollection(ctx, deleteCollection) + suite.Error(err) } func (suite *APIsTestSuite) TestUpdateCollections() { - sampleCollections := SampleCollections(suite.t, common.DefaultTenant, common.DefaultDatabase) - ctx := context.Background() - assignmentPolicy := NewMockAssignmentPolicy(sampleCollections) - c, err := NewCoordinator(ctx, assignmentPolicy, suite.db, nil, nil) - if err != nil { - suite.t.Fatalf("error creating coordinator: %v", err) - } - c.ResetState(ctx) - coll := &model.Collection{ - Name: sampleCollections[0].Name, - ID: sampleCollections[0].ID, - Topic: sampleCollections[0].Topic, - Metadata: sampleCollections[0].Metadata, - Dimension: sampleCollections[0].Dimension, - TenantID: sampleCollections[0].TenantID, - DatabaseName: sampleCollections[0].DatabaseName, + Name: suite.sampleCollections[0].Name, + ID: suite.sampleCollections[0].ID, + Topic: suite.sampleCollections[0].Topic, + Metadata: suite.sampleCollections[0].Metadata, + Dimension: suite.sampleCollections[0].Dimension, + TenantID: suite.sampleCollections[0].TenantID, + DatabaseName: suite.sampleCollections[0].DatabaseName, } - c.CreateCollection(ctx, &model.CreateCollection{ - ID: coll.ID, - Name: coll.Name, - Topic: coll.Topic, - Metadata: coll.Metadata, - Dimension: coll.Dimension, - TenantID: coll.TenantID, - DatabaseName: coll.DatabaseName, - }) - // Update name coll.Name = "new_name" - result, err := c.UpdateCollection(ctx, &model.UpdateCollection{ID: coll.ID, Name: &coll.Name}) - assert.NoError(suite.t, err) - assert.Equal(suite.t, coll, result) - resultList, err := c.GetCollections(ctx, types.NilUniqueID(), &coll.Name, nil, common.DefaultTenant, common.DefaultDatabase) - assert.NoError(suite.t, err) - assert.Equal(suite.t, []*model.Collection{coll}, resultList) + result, err := suite.coordinator.UpdateCollection(ctx, &model.UpdateCollection{ID: coll.ID, Name: &coll.Name}) + suite.NoError(err) + suite.Equal(coll, result) + resultList, err := suite.coordinator.GetCollections(ctx, types.NilUniqueID(), &coll.Name, nil, suite.tenantName, suite.databaseName) + suite.NoError(err) + suite.Equal([]*model.Collection{coll}, resultList) // Update topic coll.Topic = "new_topic" - result, err = c.UpdateCollection(ctx, &model.UpdateCollection{ID: coll.ID, Topic: &coll.Topic}) - assert.NoError(suite.t, err) - assert.Equal(suite.t, coll, result) - resultList, err = c.GetCollections(ctx, types.NilUniqueID(), nil, &coll.Topic, common.DefaultTenant, common.DefaultDatabase) - assert.NoError(suite.t, err) - assert.Equal(suite.t, []*model.Collection{coll}, resultList) + result, err = suite.coordinator.UpdateCollection(ctx, &model.UpdateCollection{ID: coll.ID, Topic: &coll.Topic}) + suite.NoError(err) + suite.Equal(coll, result) + resultList, err = suite.coordinator.GetCollections(ctx, types.NilUniqueID(), nil, &coll.Topic, suite.tenantName, suite.databaseName) + suite.NoError(err) + suite.Equal([]*model.Collection{coll}, resultList) // Update dimension newDimension := int32(128) coll.Dimension = &newDimension - result, err = c.UpdateCollection(ctx, &model.UpdateCollection{ID: coll.ID, Dimension: coll.Dimension}) - assert.NoError(suite.t, err) - assert.Equal(suite.t, coll, result) - resultList, err = c.GetCollections(ctx, coll.ID, nil, nil, common.DefaultTenant, common.DefaultDatabase) - assert.NoError(suite.t, err) - assert.Equal(suite.t, []*model.Collection{coll}, resultList) + result, err = suite.coordinator.UpdateCollection(ctx, &model.UpdateCollection{ID: coll.ID, Dimension: coll.Dimension}) + suite.NoError(err) + suite.Equal(coll, result) + resultList, err = suite.coordinator.GetCollections(ctx, coll.ID, nil, nil, suite.tenantName, suite.databaseName) + suite.NoError(err) + suite.Equal([]*model.Collection{coll}, resultList) // Reset the metadata newMetadata := model.NewCollectionMetadata[model.CollectionMetadataValueType]() newMetadata.Add("test_str2", &model.CollectionMetadataValueStringType{Value: "str2"}) coll.Metadata = newMetadata - result, err = c.UpdateCollection(ctx, &model.UpdateCollection{ID: coll.ID, Metadata: coll.Metadata}) - assert.NoError(suite.t, err) - assert.Equal(suite.t, coll, result) - resultList, err = c.GetCollections(ctx, coll.ID, nil, nil, common.DefaultTenant, common.DefaultDatabase) - assert.NoError(suite.t, err) - assert.Equal(suite.t, []*model.Collection{coll}, resultList) + result, err = suite.coordinator.UpdateCollection(ctx, &model.UpdateCollection{ID: coll.ID, Metadata: coll.Metadata}) + suite.NoError(err) + suite.Equal(coll, result) + resultList, err = suite.coordinator.GetCollections(ctx, coll.ID, nil, nil, suite.tenantName, suite.databaseName) + suite.NoError(err) + suite.Equal([]*model.Collection{coll}, resultList) // Delete all metadata keys coll.Metadata = nil - result, err = c.UpdateCollection(ctx, &model.UpdateCollection{ID: coll.ID, Metadata: coll.Metadata, ResetMetadata: true}) - assert.NoError(suite.t, err) - assert.Equal(suite.t, coll, result) - resultList, err = c.GetCollections(ctx, coll.ID, nil, nil, common.DefaultTenant, common.DefaultDatabase) - assert.NoError(suite.t, err) - assert.Equal(suite.t, []*model.Collection{coll}, resultList) + result, err = suite.coordinator.UpdateCollection(ctx, &model.UpdateCollection{ID: coll.ID, Metadata: coll.Metadata, ResetMetadata: true}) + suite.NoError(err) + suite.Equal(coll, result) + resultList, err = suite.coordinator.GetCollections(ctx, coll.ID, nil, nil, suite.tenantName, suite.databaseName) + suite.NoError(err) + suite.Equal([]*model.Collection{coll}, resultList) } func (suite *APIsTestSuite) TestCreateUpdateWithDatabase() { - sampleCollections := SampleCollections(suite.t, common.DefaultTenant, common.DefaultDatabase) ctx := context.Background() - assignmentPolicy := NewMockAssignmentPolicy(sampleCollections) - c, err := NewCoordinator(ctx, assignmentPolicy, suite.db, nil, nil) - if err != nil { - suite.t.Fatalf("error creating coordinator: %v", err) - } - c.ResetState(ctx) - _, err = c.CreateDatabase(ctx, &model.CreateDatabase{ - ID: types.MustParse("00000000-d7d7-413b-92e1-731098a6e492").String(), - Name: "new_database", - Tenant: common.DefaultTenant, - }) - assert.NoError(suite.t, err) - - c.CreateCollection(ctx, &model.CreateCollection{ - ID: sampleCollections[0].ID, - Name: sampleCollections[0].Name, - Topic: sampleCollections[0].Topic, - Metadata: sampleCollections[0].Metadata, - Dimension: sampleCollections[0].Dimension, - TenantID: sampleCollections[0].TenantID, - DatabaseName: "new_database", + newDatabaseName := "test_apis_CreateUpdateWithDatabase" + newDatabaseId := uuid.New().String() + _, err := suite.coordinator.CreateDatabase(ctx, &model.CreateDatabase{ + ID: newDatabaseId, + Name: newDatabaseName, + Tenant: suite.tenantName, }) - - c.CreateCollection(ctx, &model.CreateCollection{ - ID: sampleCollections[1].ID, - Name: sampleCollections[1].Name, - Topic: sampleCollections[1].Topic, - Metadata: sampleCollections[1].Metadata, - Dimension: sampleCollections[1].Dimension, - TenantID: sampleCollections[1].TenantID, - DatabaseName: sampleCollections[1].DatabaseName, + suite.NoError(err) + + suite.sampleCollections[0].ID = types.NewUniqueID() + suite.sampleCollections[0].Name = suite.sampleCollections[0].Name + "1" + _, err = suite.coordinator.CreateCollection(ctx, &model.CreateCollection{ + ID: suite.sampleCollections[0].ID, + Name: suite.sampleCollections[0].Name, + Topic: suite.sampleCollections[0].Topic, + Metadata: suite.sampleCollections[0].Metadata, + Dimension: suite.sampleCollections[0].Dimension, + TenantID: suite.sampleCollections[0].TenantID, + DatabaseName: newDatabaseName, }) - + suite.NoError(err) newName1 := "new_name_1" - c.UpdateCollection(ctx, &model.UpdateCollection{ - ID: sampleCollections[1].ID, + _, err = suite.coordinator.UpdateCollection(ctx, &model.UpdateCollection{ + ID: suite.sampleCollections[1].ID, Name: &newName1, }) - - result, err := c.GetCollections(ctx, sampleCollections[1].ID, nil, nil, common.DefaultTenant, common.DefaultDatabase) - assert.NoError(suite.t, err) - assert.Equal(suite.t, 1, len(result)) - assert.Equal(suite.t, "new_name_1", result[0].Name) + suite.NoError(err) + result, err := suite.coordinator.GetCollections(ctx, suite.sampleCollections[1].ID, nil, nil, suite.tenantName, suite.databaseName) + suite.NoError(err) + suite.Len(result, 1) + suite.Equal(newName1, result[0].Name) newName0 := "new_name_0" - c.UpdateCollection(ctx, &model.UpdateCollection{ - ID: sampleCollections[0].ID, + _, err = suite.coordinator.UpdateCollection(ctx, &model.UpdateCollection{ + ID: suite.sampleCollections[0].ID, Name: &newName0, }) - result, err = c.GetCollections(ctx, sampleCollections[0].ID, nil, nil, common.DefaultTenant, "new_database") - assert.NoError(suite.t, err) - assert.Equal(suite.t, 1, len(result)) - assert.Equal(suite.t, "new_name_0", result[0].Name) + suite.NoError(err) + //suite.Equal(newName0, collection.Name) + result, err = suite.coordinator.GetCollections(ctx, suite.sampleCollections[0].ID, nil, nil, suite.tenantName, newDatabaseName) + suite.NoError(err) + suite.Len(result, 1) + suite.Equal(newName0, result[0].Name) + + // clean up + err = dao.CleanUpTestDatabase(suite.db, suite.tenantName, newDatabaseName) + suite.NoError(err) } func (suite *APIsTestSuite) TestGetMultipleWithDatabase() { - sampleCollections := SampleCollections(suite.t, common.DefaultTenant, "new_database") + newDatabaseName := "test_apis_GetMultipleWithDatabase" ctx := context.Background() - assignmentPolicy := NewMockAssignmentPolicy(sampleCollections) - c, err := NewCoordinator(ctx, assignmentPolicy, suite.db, nil, nil) - if err != nil { - suite.t.Fatalf("error creating coordinator: %v", err) - } - c.ResetState(ctx) - _, err = c.CreateDatabase(ctx, &model.CreateDatabase{ - ID: types.MustParse("00000000-d7d7-413b-92e1-731098a6e492").String(), - Name: "new_database", - Tenant: common.DefaultTenant, - }) - assert.NoError(suite.t, err) - for _, collection := range sampleCollections { - c.CreateCollection(ctx, &model.CreateCollection{ + newDatabaseId := uuid.New().String() + _, err := suite.coordinator.CreateDatabase(ctx, &model.CreateDatabase{ + ID: newDatabaseId, + Name: newDatabaseName, + Tenant: suite.tenantName, + }) + suite.NoError(err) + + for index, collection := range suite.sampleCollections { + collection.ID = types.NewUniqueID() + collection.Name = collection.Name + "1" + collection.TenantID = suite.tenantName + collection.DatabaseName = newDatabaseName + _, err := suite.coordinator.CreateCollection(ctx, &model.CreateCollection{ ID: collection.ID, Name: collection.Name, Topic: collection.Topic, Metadata: collection.Metadata, Dimension: collection.Dimension, - TenantID: common.DefaultTenant, - DatabaseName: "new_database", + TenantID: collection.TenantID, + DatabaseName: collection.DatabaseName, }) + suite.NoError(err) + suite.sampleCollections[index] = collection } - result, err := c.GetCollections(ctx, types.NilUniqueID(), nil, nil, common.DefaultTenant, "new_database") - assert.NoError(suite.t, err) - assert.Equal(suite.t, len(sampleCollections), len(result)) + result, err := suite.coordinator.GetCollections(ctx, types.NilUniqueID(), nil, nil, suite.tenantName, newDatabaseName) + suite.NoError(err) + suite.Equal(len(suite.sampleCollections), len(result)) sort.Slice(result, func(i, j int) bool { return result[i].Name < result[j].Name }) - assert.Equal(suite.t, sampleCollections, result) + suite.Equal(suite.sampleCollections, result) - result, err = c.GetCollections(ctx, types.NilUniqueID(), nil, nil, common.DefaultTenant, common.DefaultDatabase) - assert.NoError(suite.t, err) - assert.Equal(suite.t, 0, len(result)) + result, err = suite.coordinator.GetCollections(ctx, types.NilUniqueID(), nil, nil, suite.tenantName, suite.databaseName) + suite.NoError(err) + suite.Equal(len(suite.sampleCollections), len(result)) + + // clean up + err = dao.CleanUpTestDatabase(suite.db, suite.tenantName, newDatabaseName) + suite.NoError(err) } func (suite *APIsTestSuite) TestCreateDatabaseWithTenants() { - sampleCollections := SampleCollections(suite.t, common.DefaultTenant, common.DefaultDatabase) ctx := context.Background() - assignmentPolicy := NewMockAssignmentPolicy(sampleCollections) - c, err := NewCoordinator(ctx, assignmentPolicy, suite.db, nil, nil) - if err != nil { - suite.t.Fatalf("error creating coordinator: %v", err) - } - c.ResetState(ctx) // Create a new tenant - _, err = c.CreateTenant(ctx, &model.CreateTenant{ - Name: "tenant1", + newTenantName := "tenant1" + _, err := suite.coordinator.CreateTenant(ctx, &model.CreateTenant{ + Name: newTenantName, }) - assert.NoError(suite.t, err) + suite.NoError(err) // Create tenant that already exits and expect an error - _, err = c.CreateTenant(ctx, &model.CreateTenant{ - Name: "tenant1", + _, err = suite.coordinator.CreateTenant(ctx, &model.CreateTenant{ + Name: newTenantName, }) - assert.Error(suite.t, err) + suite.Error(err) // Create tenant that already exits and expect an error - _, err = c.CreateTenant(ctx, &model.CreateTenant{ - Name: common.DefaultTenant, + _, err = suite.coordinator.CreateTenant(ctx, &model.CreateTenant{ + Name: suite.tenantName, }) - assert.Error(suite.t, err) + suite.Error(err) // Create a new database within this tenant and also in the default tenant - _, err = c.CreateDatabase(ctx, &model.CreateDatabase{ + newDatabaseName := "test_apis_CreateDatabaseWithTenants" + _, err = suite.coordinator.CreateDatabase(ctx, &model.CreateDatabase{ ID: types.MustParse("33333333-d7d7-413b-92e1-731098a6e492").String(), - Name: "new_database", - Tenant: "tenant1", + Name: newDatabaseName, + Tenant: newTenantName, }) - assert.NoError(suite.t, err) + suite.NoError(err) - _, err = c.CreateDatabase(ctx, &model.CreateDatabase{ + _, err = suite.coordinator.CreateDatabase(ctx, &model.CreateDatabase{ ID: types.MustParse("44444444-d7d7-413b-92e1-731098a6e492").String(), - Name: "new_database", - Tenant: common.DefaultTenant, + Name: newDatabaseName, + Tenant: suite.tenantName, }) - assert.NoError(suite.t, err) + suite.NoError(err) // Create a new collection in the new tenant - _, err = c.CreateCollection(ctx, &model.CreateCollection{ - ID: sampleCollections[0].ID, - Name: sampleCollections[0].Name, - Topic: sampleCollections[0].Topic, - Metadata: sampleCollections[0].Metadata, - Dimension: sampleCollections[0].Dimension, - TenantID: "tenant1", - DatabaseName: "new_database", + suite.sampleCollections[0].ID = types.NewUniqueID() + suite.sampleCollections[0].Name = suite.sampleCollections[0].Name + "1" + _, err = suite.coordinator.CreateCollection(ctx, &model.CreateCollection{ + ID: suite.sampleCollections[0].ID, + Name: suite.sampleCollections[0].Name, + Topic: suite.sampleCollections[0].Topic, + Metadata: suite.sampleCollections[0].Metadata, + Dimension: suite.sampleCollections[0].Dimension, + TenantID: newTenantName, + DatabaseName: newDatabaseName, }) - assert.NoError(suite.t, err) + suite.NoError(err) // Create a new collection in the default tenant - c.CreateCollection(ctx, &model.CreateCollection{ - ID: sampleCollections[1].ID, - Name: sampleCollections[1].Name, - Topic: sampleCollections[1].Topic, - Metadata: sampleCollections[1].Metadata, - Dimension: sampleCollections[1].Dimension, - TenantID: common.DefaultTenant, - DatabaseName: "new_database", + suite.sampleCollections[1].ID = types.NewUniqueID() + suite.sampleCollections[1].Name = suite.sampleCollections[1].Name + "2" + _, err = suite.coordinator.CreateCollection(ctx, &model.CreateCollection{ + ID: suite.sampleCollections[1].ID, + Name: suite.sampleCollections[1].Name, + Topic: suite.sampleCollections[1].Topic, + Metadata: suite.sampleCollections[1].Metadata, + Dimension: suite.sampleCollections[1].Dimension, + TenantID: suite.tenantName, + DatabaseName: newDatabaseName, }) + suite.NoError(err) // Check that both tenants have the correct collections - expected := []*model.Collection{sampleCollections[0]} - expected[0].TenantID = "tenant1" - expected[0].DatabaseName = "new_database" - result, err := c.GetCollections(ctx, types.NilUniqueID(), nil, nil, "tenant1", "new_database") - assert.NoError(suite.t, err) - assert.Equal(suite.t, 1, len(result)) - assert.Equal(suite.t, expected[0], result[0]) - - expected = []*model.Collection{sampleCollections[1]} - expected[0].TenantID = common.DefaultTenant - expected[0].DatabaseName = "new_database" - result, err = c.GetCollections(ctx, types.NilUniqueID(), nil, nil, common.DefaultTenant, "new_database") - assert.NoError(suite.t, err) - assert.Equal(suite.t, 1, len(result)) - assert.Equal(suite.t, expected[0], result[0]) + expected := []*model.Collection{suite.sampleCollections[0]} + expected[0].TenantID = newTenantName + expected[0].DatabaseName = newDatabaseName + result, err := suite.coordinator.GetCollections(ctx, types.NilUniqueID(), nil, nil, newTenantName, newDatabaseName) + suite.NoError(err) + suite.Len(result, 1) + suite.Equal(expected[0], result[0]) + + expected = []*model.Collection{suite.sampleCollections[1]} + expected[0].TenantID = suite.tenantName + expected[0].DatabaseName = newDatabaseName + result, err = suite.coordinator.GetCollections(ctx, types.NilUniqueID(), nil, nil, suite.tenantName, newDatabaseName) + suite.NoError(err) + suite.Len(result, 1) + suite.Equal(expected[0], result[0]) // A new tenant DOES NOT have a default database. This does not error, instead 0 // results are returned - result, err = c.GetCollections(ctx, types.NilUniqueID(), nil, nil, "tenant1", common.DefaultDatabase) - assert.NoError(suite.t, err) - assert.Nil(suite.t, result) + result, err = suite.coordinator.GetCollections(ctx, types.NilUniqueID(), nil, nil, newTenantName, suite.databaseName) + suite.NoError(err) + suite.Nil(result) + + // clean up + err = dao.CleanUpTestTenant(suite.db, newTenantName) + suite.NoError(err) + err = dao.CleanUpTestDatabase(suite.db, suite.tenantName, newDatabaseName) + suite.NoError(err) } func (suite *APIsTestSuite) TestCreateGetDeleteTenants() { ctx := context.Background() - assignmentPolicy := NewMockAssignmentPolicy(nil) - c, err := NewCoordinator(ctx, assignmentPolicy, suite.db, nil, nil) - if err != nil { - suite.t.Fatalf("error creating coordinator: %v", err) - } - c.ResetState(ctx) // Create a new tenant - _, err = c.CreateTenant(ctx, &model.CreateTenant{ - Name: "tenant1", + newTenantName := "tenant1" + _, err := suite.coordinator.CreateTenant(ctx, &model.CreateTenant{ + Name: newTenantName, }) - assert.NoError(suite.t, err) + suite.NoError(err) // Create tenant that already exits and expect an error - _, err = c.CreateTenant(ctx, &model.CreateTenant{ - Name: "tenant1", + _, err = suite.coordinator.CreateTenant(ctx, &model.CreateTenant{ + Name: newTenantName, }) - assert.Error(suite.t, err) + suite.Error(err) // Create tenant that already exits and expect an error - _, err = c.CreateTenant(ctx, &model.CreateTenant{ - Name: common.DefaultTenant, + _, err = suite.coordinator.CreateTenant(ctx, &model.CreateTenant{ + Name: suite.tenantName, }) - assert.Error(suite.t, err) + suite.Error(err) // Get the tenant and check that it exists - result, err := c.GetTenant(ctx, &model.GetTenant{Name: "tenant1"}) - assert.NoError(suite.t, err) - assert.Equal(suite.t, "tenant1", result.Name) + result, err := suite.coordinator.GetTenant(ctx, &model.GetTenant{Name: newTenantName}) + suite.NoError(err) + suite.Equal(newTenantName, result.Name) // Get a tenant that does not exist and expect an error - _, err = c.GetTenant(ctx, &model.GetTenant{Name: "tenant2"}) - assert.Error(suite.t, err) + _, err = suite.coordinator.GetTenant(ctx, &model.GetTenant{Name: "tenant2"}) + suite.Error(err) // Create a new database within this tenant - _, err = c.CreateDatabase(ctx, &model.CreateDatabase{ + newDatabaseName := "test_apis_CreateGetDeleteTenants" + _, err = suite.coordinator.CreateDatabase(ctx, &model.CreateDatabase{ ID: types.MustParse("33333333-d7d7-413b-92e1-731098a6e492").String(), - Name: "new_database", - Tenant: "tenant1", + Name: newDatabaseName, + Tenant: newTenantName, }) - assert.NoError(suite.t, err) + suite.NoError(err) // Get the database and check that it exists - databaseResult, err := c.GetDatabase(ctx, &model.GetDatabase{ - Name: "new_database", - Tenant: "tenant1", + databaseResult, err := suite.coordinator.GetDatabase(ctx, &model.GetDatabase{ + Name: newDatabaseName, + Tenant: newTenantName, }) - assert.NoError(suite.t, err) - assert.Equal(suite.t, "new_database", databaseResult.Name) - assert.Equal(suite.t, "tenant1", databaseResult.Tenant) + suite.NoError(err) + suite.Equal(newDatabaseName, databaseResult.Name) + suite.Equal(newTenantName, databaseResult.Tenant) // Get a database that does not exist in a tenant that does exist and expect an error - _, err = c.GetDatabase(ctx, &model.GetDatabase{ + _, err = suite.coordinator.GetDatabase(ctx, &model.GetDatabase{ Name: "new_database1", - Tenant: "tenant1", + Tenant: newTenantName, }) - assert.Error(suite.t, err) + suite.Error(err) // Get a database that does not exist in a tenant that does not exist and expect an // error - _, err = c.GetDatabase(ctx, &model.GetDatabase{ + _, err = suite.coordinator.GetDatabase(ctx, &model.GetDatabase{ Name: "new_database1", Tenant: "tenant2", }) - assert.Error(suite.t, err) + suite.Error(err) + + // clean up + err = dao.CleanUpTestTenant(suite.db, newTenantName) + suite.NoError(err) + err = dao.CleanUpTestDatabase(suite.db, suite.tenantName, newDatabaseName) + suite.NoError(err) } -func SampleSegments(t *testing.T, sampleCollections []*model.Collection) []*model.Segment { +func SampleSegments(sampleCollections []*model.Collection) []*model.Segment { metadata1 := model.NewSegmentMetadata[model.SegmentMetadataValueType]() metadata1.Set("test_str", &model.SegmentMetadataValueStringType{Value: "str1"}) metadata1.Set("test_int", &model.SegmentMetadataValueInt64Type{Value: 1}) @@ -713,6 +714,7 @@ func SampleSegments(t *testing.T, sampleCollections []*model.Collection) []*mode Scope: "VECTOR", CollectionID: sampleCollections[0].ID, Metadata: metadata1, + FilePaths: map[string][]string{}, }, { ID: types.MustParse("11111111-d7d7-413b-92e1-731098a6e492"), @@ -721,6 +723,7 @@ func SampleSegments(t *testing.T, sampleCollections []*model.Collection) []*mode Scope: "VECTOR", CollectionID: sampleCollections[1].ID, Metadata: metadata2, + FilePaths: map[string][]string{}, }, { ID: types.MustParse("22222222-d7d7-413b-92e1-731098a6e492"), @@ -729,36 +732,19 @@ func SampleSegments(t *testing.T, sampleCollections []*model.Collection) []*mode Scope: "METADATA", CollectionID: types.NilUniqueID(), Metadata: metadata3, // This segment is not assigned to any collection + FilePaths: map[string][]string{}, }, } return sampleSegments } func (suite *APIsTestSuite) TestCreateGetDeleteSegments() { - sampleCollections := SampleCollections(suite.t, common.DefaultTenant, common.DefaultDatabase) ctx := context.Background() - assignmentPolicy := NewMockAssignmentPolicy(sampleCollections) - c, err := NewCoordinator(ctx, assignmentPolicy, suite.db, nil, nil) - if err != nil { - suite.t.Fatalf("error creating coordinator: %v", err) - } - c.ResetState(ctx) - - for _, collection := range sampleCollections { - c.CreateCollection(ctx, &model.CreateCollection{ - ID: collection.ID, - Name: collection.Name, - Topic: collection.Topic, - Metadata: collection.Metadata, - Dimension: collection.Dimension, - TenantID: collection.TenantID, - DatabaseName: collection.DatabaseName, - }) - } + c := suite.coordinator - sampleSegments := SampleSegments(suite.t, sampleCollections) + sampleSegments := SampleSegments(suite.sampleCollections) for _, segment := range sampleSegments { - c.CreateSegment(ctx, &model.CreateSegment{ + errSegmentCreation := c.CreateSegment(ctx, &model.CreateSegment{ ID: segment.ID, Type: segment.Type, Topic: segment.Topic, @@ -766,17 +752,23 @@ func (suite *APIsTestSuite) TestCreateGetDeleteSegments() { CollectionID: segment.CollectionID, Metadata: segment.Metadata, }) + suite.NoError(errSegmentCreation) } - results, err := c.GetSegments(ctx, types.NilUniqueID(), nil, nil, nil, types.NilUniqueID()) + var results []*model.Segment + for _, segment := range sampleSegments { + result, err := c.GetSegments(ctx, segment.ID, nil, nil, nil, types.NilUniqueID()) + suite.NoError(err) + suite.Equal([]*model.Segment{segment}, result) + results = append(results, result...) + } sort.Slice(results, func(i, j int) bool { return results[i].ID.String() < results[j].ID.String() }) - assert.NoError(suite.t, err) - assert.Equal(suite.t, sampleSegments, results) + suite.Equal(sampleSegments, results) // Duplicate create fails - err = c.CreateSegment(ctx, &model.CreateSegment{ + err := c.CreateSegment(ctx, &model.CreateSegment{ ID: sampleSegments[0].ID, Type: sampleSegments[0].Type, Topic: sampleSegments[0].Topic, @@ -784,67 +776,63 @@ func (suite *APIsTestSuite) TestCreateGetDeleteSegments() { CollectionID: sampleSegments[0].CollectionID, Metadata: sampleSegments[0].Metadata, }) - assert.Error(suite.t, err) + suite.Error(err) // Find by id for _, segment := range sampleSegments { result, err := c.GetSegments(ctx, segment.ID, nil, nil, nil, types.NilUniqueID()) - assert.NoError(suite.t, err) - assert.Equal(suite.t, []*model.Segment{segment}, result) + suite.NoError(err) + suite.Equal([]*model.Segment{segment}, result) } // Find by type testTypeA := "test_type_a" result, err := c.GetSegments(ctx, types.NilUniqueID(), &testTypeA, nil, nil, types.NilUniqueID()) - assert.NoError(suite.t, err) - assert.Equal(suite.t, sampleSegments[:1], result) + suite.NoError(err) + suite.Equal(sampleSegments[:1], result) testTypeB := "test_type_b" result, err = c.GetSegments(ctx, types.NilUniqueID(), &testTypeB, nil, nil, types.NilUniqueID()) - assert.NoError(suite.t, err) - assert.ElementsMatch(suite.t, result, sampleSegments[1:]) + suite.NoError(err) + suite.ElementsMatch(sampleSegments[1:], result) // Find by collection ID - result, err = c.GetSegments(ctx, types.NilUniqueID(), nil, nil, nil, sampleCollections[0].ID) - assert.NoError(suite.t, err) - assert.Equal(suite.t, sampleSegments[:1], result) + result, err = c.GetSegments(ctx, types.NilUniqueID(), nil, nil, nil, suite.sampleCollections[0].ID) + suite.NoError(err) + suite.Equal(sampleSegments[:1], result) // Find by type and collection ID (positive case) - result, err = c.GetSegments(ctx, types.NilUniqueID(), &testTypeA, nil, nil, sampleCollections[0].ID) - assert.NoError(suite.t, err) - assert.Equal(suite.t, sampleSegments[:1], result) + result, err = c.GetSegments(ctx, types.NilUniqueID(), &testTypeA, nil, nil, suite.sampleCollections[0].ID) + suite.NoError(err) + suite.Equal(sampleSegments[:1], result) // Find by type and collection ID (negative case) - result, err = c.GetSegments(ctx, types.NilUniqueID(), &testTypeB, nil, nil, sampleCollections[0].ID) - assert.NoError(suite.t, err) - assert.Empty(suite.t, result) + result, err = c.GetSegments(ctx, types.NilUniqueID(), &testTypeB, nil, nil, suite.sampleCollections[0].ID) + suite.NoError(err) + suite.Empty(result) // Delete s1 := sampleSegments[0] err = c.DeleteSegment(ctx, s1.ID) - assert.NoError(suite.t, err) + suite.NoError(err) results, err = c.GetSegments(ctx, types.NilUniqueID(), nil, nil, nil, types.NilUniqueID()) - assert.NoError(suite.t, err) - assert.NotContains(suite.t, results, s1) - assert.Len(suite.t, results, len(sampleSegments)-1) - assert.ElementsMatch(suite.t, results, sampleSegments[1:]) + suite.NoError(err) + suite.NotContains(results, s1) + suite.Len(results, len(sampleSegments)-1) + suite.ElementsMatch(results, sampleSegments[1:]) // Duplicate delete throws an exception err = c.DeleteSegment(ctx, s1.ID) - assert.Error(suite.t, err) -} + suite.Error(err) -func (suite *APIsTestSuite) TestUpdateSegment() { - sampleCollections := SampleCollections(suite.t, common.DefaultTenant, common.DefaultDatabase) - ctx := context.Background() - assignmentPolicy := NewMockAssignmentPolicy(sampleCollections) - c, err := NewCoordinator(ctx, assignmentPolicy, suite.db, nil, nil) - if err != nil { - suite.t.Fatalf("error creating coordinator: %v", err) + // clean up segments + for _, segment := range sampleSegments { + _ = c.DeleteSegment(ctx, segment.ID) } - c.ResetState(ctx) +} +func (suite *APIsTestSuite) TestUpdateSegment() { testTopic := "test_topic_a" metadata := model.NewSegmentMetadata[model.SegmentMetadataValueType]() @@ -857,25 +845,13 @@ func (suite *APIsTestSuite) TestUpdateSegment() { Type: "test_type_a", Scope: "VECTOR", Topic: &testTopic, - CollectionID: sampleCollections[0].ID, + CollectionID: suite.sampleCollections[0].ID, Metadata: metadata, + FilePaths: map[string][]string{}, } - for _, collection := range sampleCollections { - _, err := c.CreateCollection(ctx, &model.CreateCollection{ - ID: collection.ID, - Name: collection.Name, - Topic: collection.Topic, - Metadata: collection.Metadata, - Dimension: collection.Dimension, - TenantID: collection.TenantID, - DatabaseName: collection.DatabaseName, - }) - - assert.NoError(suite.t, err) - } - - c.CreateSegment(ctx, &model.CreateSegment{ + ctx := context.Background() + errSegmentCreation := suite.coordinator.CreateSegment(ctx, &model.CreateSegment{ ID: segment.ID, Type: segment.Type, Topic: segment.Topic, @@ -883,31 +859,34 @@ func (suite *APIsTestSuite) TestUpdateSegment() { CollectionID: segment.CollectionID, Metadata: segment.Metadata, }) + suite.NoError(errSegmentCreation) // Update topic to new value collectionID := segment.CollectionID.String() newTopic := "new_topic" segment.Topic = &newTopic - c.UpdateSegment(ctx, &model.UpdateSegment{ + _, err := suite.coordinator.UpdateSegment(ctx, &model.UpdateSegment{ Collection: &collectionID, ID: segment.ID, Topic: segment.Topic, }) - result, err := c.GetSegments(ctx, segment.ID, nil, nil, nil, types.NilUniqueID()) - assert.NoError(suite.t, err) - assert.Equal(suite.t, []*model.Segment{segment}, result) + suite.NoError(err) + result, err := suite.coordinator.GetSegments(ctx, segment.ID, nil, nil, nil, types.NilUniqueID()) + suite.NoError(err) + suite.Equal([]*model.Segment{segment}, result) // Update topic to None segment.Topic = nil - c.UpdateSegment(ctx, &model.UpdateSegment{ + _, err = suite.coordinator.UpdateSegment(ctx, &model.UpdateSegment{ Collection: &collectionID, ID: segment.ID, Topic: segment.Topic, ResetTopic: true, }) - result, err = c.GetSegments(ctx, segment.ID, nil, nil, nil, types.NilUniqueID()) - assert.NoError(suite.t, err) - assert.Equal(suite.t, []*model.Segment{segment}, result) + suite.NoError(err) + result, err = suite.coordinator.GetSegments(ctx, segment.ID, nil, nil, nil, types.NilUniqueID()) + suite.NoError(err) + suite.Equal([]*model.Segment{segment}, result) // TODO: revisit why we need this // Update collection to new value @@ -934,51 +913,54 @@ func (suite *APIsTestSuite) TestUpdateSegment() { // Add a new metadata key segment.Metadata.Set("test_str2", &model.SegmentMetadataValueStringType{Value: "str2"}) - c.UpdateSegment(ctx, &model.UpdateSegment{ + _, err = suite.coordinator.UpdateSegment(ctx, &model.UpdateSegment{ Collection: &collectionID, ID: segment.ID, Metadata: segment.Metadata}) - result, err = c.GetSegments(ctx, segment.ID, nil, nil, nil, types.NilUniqueID()) - assert.NoError(suite.t, err) - assert.Equal(suite.t, []*model.Segment{segment}, result) + suite.NoError(err) + result, err = suite.coordinator.GetSegments(ctx, segment.ID, nil, nil, nil, types.NilUniqueID()) + suite.NoError(err) + suite.Equal([]*model.Segment{segment}, result) // Update a metadata key segment.Metadata.Set("test_str", &model.SegmentMetadataValueStringType{Value: "str3"}) - c.UpdateSegment(ctx, &model.UpdateSegment{ + _, err = suite.coordinator.UpdateSegment(ctx, &model.UpdateSegment{ Collection: &collectionID, ID: segment.ID, Metadata: segment.Metadata}) - result, err = c.GetSegments(ctx, segment.ID, nil, nil, nil, types.NilUniqueID()) - assert.NoError(suite.t, err) - assert.Equal(suite.t, []*model.Segment{segment}, result) + suite.NoError(err) + result, err = suite.coordinator.GetSegments(ctx, segment.ID, nil, nil, nil, types.NilUniqueID()) + suite.NoError(err) + suite.Equal([]*model.Segment{segment}, result) // Delete a metadata key segment.Metadata.Remove("test_str") newMetadata := model.NewSegmentMetadata[model.SegmentMetadataValueType]() newMetadata.Set("test_str", nil) - c.UpdateSegment(ctx, &model.UpdateSegment{ + _, err = suite.coordinator.UpdateSegment(ctx, &model.UpdateSegment{ Collection: &collectionID, ID: segment.ID, Metadata: newMetadata}) - result, err = c.GetSegments(ctx, segment.ID, nil, nil, nil, types.NilUniqueID()) - assert.NoError(suite.t, err) - assert.Equal(suite.t, []*model.Segment{segment}, result) + suite.NoError(err) + result, err = suite.coordinator.GetSegments(ctx, segment.ID, nil, nil, nil, types.NilUniqueID()) + suite.NoError(err) + suite.Equal([]*model.Segment{segment}, result) // Delete all metadata keys segment.Metadata = nil - c.UpdateSegment(ctx, &model.UpdateSegment{ + _, err = suite.coordinator.UpdateSegment(ctx, &model.UpdateSegment{ Collection: &collectionID, ID: segment.ID, Metadata: segment.Metadata, ResetMetadata: true}, ) - result, err = c.GetSegments(ctx, segment.ID, nil, nil, nil, types.NilUniqueID()) - assert.NoError(suite.t, err) - assert.Equal(suite.t, []*model.Segment{segment}, result) + suite.NoError(err) + result, err = suite.coordinator.GetSegments(ctx, segment.ID, nil, nil, nil, types.NilUniqueID()) + suite.NoError(err) + suite.Equal([]*model.Segment{segment}, result) } func TestAPIsTestSuite(t *testing.T) { testSuite := new(APIsTestSuite) - testSuite.t = t suite.Run(t, testSuite) } diff --git a/go/pkg/coordinator/coordinator.go b/go/pkg/coordinator/coordinator.go index 110b641be44..d52aeaf8954 100644 --- a/go/pkg/coordinator/coordinator.go +++ b/go/pkg/coordinator/coordinator.go @@ -38,7 +38,6 @@ func NewCoordinator(ctx context.Context, assignmentPolicy CollectionAssignmentPo txnImpl := dbcore.NewTxImpl() metaDomain := dao.NewMetaDomain() s.catalog = coordinator.NewTableCatalogWithNotification(txnImpl, metaDomain, notificationStore) - return s, nil } diff --git a/go/pkg/coordinator/grpc/collection_service.go b/go/pkg/coordinator/grpc/collection_service.go index aa9b6a7151f..a6b9816ec62 100644 --- a/go/pkg/coordinator/grpc/collection_service.go +++ b/go/pkg/coordinator/grpc/collection_service.go @@ -2,7 +2,9 @@ package grpc import ( "context" + "encoding/json" "errors" + "github.com/chroma-core/chroma/go/pkg/grpcutils" "github.com/chroma-core/chroma/go/pkg/common" "github.com/chroma-core/chroma/go/pkg/model" @@ -211,6 +213,53 @@ func (s *Server) UpdateCollection(ctx context.Context, req *coordinatorpb.Update return res, nil } +func (s *Server) FlushCollectionCompaction(ctx context.Context, req *coordinatorpb.FlushCollectionCompactionRequest) (*coordinatorpb.FlushCollectionCompactionResponse, error) { + blob, err := json.Marshal(req) + if err != nil { + return nil, err + } + log.Info("flush collection compaction", zap.String("request", string(blob))) + collectionID, err := types.ToUniqueID(&req.CollectionId) + err = grpcutils.BuildErrorForUUID(collectionID, "collection", err) + if err != nil { + return nil, err + } + segmentCompactionInfo := make([]*model.FlushSegmentCompaction, 0, len(req.SegmentCompactionInfo)) + for _, flushSegmentCompaction := range req.SegmentCompactionInfo { + segmentID, err := types.ToUniqueID(&flushSegmentCompaction.SegmentId) + err = grpcutils.BuildErrorForUUID(segmentID, "segment", err) + if err != nil { + return nil, err + } + filePaths := make(map[string][]string) + for key, filePath := range flushSegmentCompaction.FilePaths { + filePaths[key] = filePath.Paths + } + segmentCompactionInfo = append(segmentCompactionInfo, &model.FlushSegmentCompaction{ + ID: segmentID, + FilePaths: filePaths, + }) + } + FlushCollectionCompaction := &model.FlushCollectionCompaction{ + ID: collectionID, + TenantID: req.TenantId, + LogPosition: req.LogPosition, + CurrentCollectionVersion: req.CollectionVersion, + FlushSegmentCompactions: segmentCompactionInfo, + } + flushCollectionInfo, err := s.coordinator.FlushCollectionCompaction(ctx, FlushCollectionCompaction) + if err != nil { + log.Error("error FlushCollectionCompaction", zap.Error(err)) + return nil, grpcutils.BuildInternalGrpcError(err.Error()) + } + res := &coordinatorpb.FlushCollectionCompactionResponse{ + CollectionId: flushCollectionInfo.ID, + CollectionVersion: flushCollectionInfo.CollectionVersion, + LastCompactionTime: flushCollectionInfo.TenantLastCompactionTime, + } + return res, nil +} + func failResponseWithError(err error, code int32) *coordinatorpb.Status { return &coordinatorpb.Status{ Reason: err.Error(), diff --git a/go/pkg/coordinator/grpc/collection_service_test.go b/go/pkg/coordinator/grpc/collection_service_test.go index a300d4c9b3a..9e86c8ff4f1 100644 --- a/go/pkg/coordinator/grpc/collection_service_test.go +++ b/go/pkg/coordinator/grpc/collection_service_test.go @@ -2,15 +2,67 @@ package grpc import ( "context" - "github.com/chroma-core/chroma/go/pkg/grpcutils" - "testing" - "github.com/chroma-core/chroma/go/pkg/common" + "github.com/chroma-core/chroma/go/pkg/grpcutils" + "github.com/chroma-core/chroma/go/pkg/metastore/coordinator" + "github.com/chroma-core/chroma/go/pkg/metastore/db/dao" "github.com/chroma-core/chroma/go/pkg/metastore/db/dbcore" "github.com/chroma-core/chroma/go/pkg/proto/coordinatorpb" + "github.com/pingcap/log" + "github.com/stretchr/testify/suite" + "google.golang.org/genproto/googleapis/rpc/code" + "google.golang.org/grpc/codes" + "google.golang.org/grpc/status" + "gorm.io/gorm" + "k8s.io/apimachinery/pkg/util/rand" "pgregory.net/rapid" + "reflect" + "strconv" + "testing" + "time" ) +type CollectionServiceTestSuite struct { + suite.Suite + catalog *coordinator.Catalog + db *gorm.DB + s *Server + tenantName string + databaseName string + databaseId string +} + +func (suite *CollectionServiceTestSuite) SetupSuite() { + log.Info("setup suite") + suite.db = dbcore.ConfigDatabaseForTesting() + s, err := NewWithGrpcProvider(Config{ + AssignmentPolicy: "simple", + SystemCatalogProvider: "database", + NotificationStoreProvider: "memory", + NotifierProvider: "memory", + Testing: true}, grpcutils.Default, suite.db) + if err != nil { + suite.T().Fatalf("error creating server: %v", err) + } + suite.s = s + txnImpl := dbcore.NewTxImpl() + metaDomain := dao.NewMetaDomain() + suite.catalog = coordinator.NewTableCatalogWithNotification(txnImpl, metaDomain, nil) + suite.tenantName = "tenant_" + suite.T().Name() + suite.databaseName = "database_" + suite.T().Name() + DbId, err := dao.CreateTestTenantAndDatabase(suite.db, suite.tenantName, suite.databaseName) + suite.NoError(err) + suite.databaseId = DbId +} + +func (suite *CollectionServiceTestSuite) TearDownSuite() { + log.Info("teardown suite") + err := dao.CleanUpTestDatabase(suite.db, suite.tenantName, suite.databaseName) + suite.NoError(err) + err = dao.CleanUpTestTenant(suite.db, suite.tenantName) + suite.NoError(err) +} + // CreateCollection // Collection created successfully are visible to ListCollections // Collection created should have the right metadata, the metadata should be a flat map, with keys as strings and values as strings, ints, or floats @@ -123,3 +175,168 @@ func generateFloat64MetadataValue(t *rapid.T) *coordinatorpb.UpdateMetadataValue func TestCollection(t *testing.T) { // rapid.Check(t, testCollection) } + +func validateDatabase(suite *CollectionServiceTestSuite, collectionId string, collection *coordinatorpb.Collection, filePaths map[string]map[string]*coordinatorpb.FilePaths) { + getCollectionReq := coordinatorpb.GetCollectionsRequest{ + Id: &collectionId, + } + collectionsInDB, err := suite.s.GetCollections(context.Background(), &getCollectionReq) + suite.NoError(err) + suite.Len(collectionsInDB.Collections, 1) + suite.Equal(collection.Id, collection.Id) + suite.Equal(collection.Name, collection.Name) + suite.Equal(collection.Topic, collection.Topic) + suite.Equal(collection.LogPosition, collection.LogPosition) + suite.Equal(collection.Version, collection.Version) + + getSegmentReq := coordinatorpb.GetSegmentsRequest{ + Collection: &collectionId, + } + segments, err := suite.s.GetSegments(context.Background(), &getSegmentReq) + suite.NoError(err) + for _, segment := range segments.Segments { + suite.True(reflect.DeepEqual(filePaths[segment.Id], segment.FilePaths)) + } +} + +func (suite *CollectionServiceTestSuite) TestServer_FlushCollectionCompaction() { + log.Info("TestServer_FlushCollectionCompaction") + // create test collection + collectionName := "collection_service_test_flush_collection_compaction" + collectionTopic := "collection_service_test_flush_collection_compaction_topic" + collectionID, err := dao.CreateTestCollection(suite.db, collectionName, collectionTopic, 128, suite.databaseId) + suite.NoError(err) + + // flush collection compaction + getSegmentReq := coordinatorpb.GetSegmentsRequest{ + Collection: &collectionID, + } + segments, err := suite.s.GetSegments(context.Background(), &getSegmentReq) + suite.NoError(err) + + flushInfo := make([]*coordinatorpb.FlushSegmentCompactionInfo, 0, len(segments.Segments)) + filePaths := make(map[string]map[string]*coordinatorpb.FilePaths, 0) + testFilePathTypes := []string{"TypeA", "TypeB", "TypeC", "TypeD"} + for _, segment := range segments.Segments { + filePaths[segment.Id] = make(map[string]*coordinatorpb.FilePaths, 0) + for i := 0; i < rand.Intn(len(testFilePathTypes)); i++ { + filePathsThisSeg := make([]string, 0) + for j := 0; j < rand.Intn(5); j++ { + filePathsThisSeg = append(filePathsThisSeg, "test_file_path_"+strconv.Itoa(j+1)) + } + filePathTypeI := rand.Intn(len(testFilePathTypes)) + filePaths[segment.Id][testFilePathTypes[filePathTypeI]] = &coordinatorpb.FilePaths{ + Paths: filePathsThisSeg, + } + } + info := &coordinatorpb.FlushSegmentCompactionInfo{ + SegmentId: segment.Id, + FilePaths: filePaths[segment.Id], + } + flushInfo = append(flushInfo, info) + } + + req := &coordinatorpb.FlushCollectionCompactionRequest{ + TenantId: suite.tenantName, + CollectionId: collectionID, + LogPosition: 10, + CollectionVersion: 0, + SegmentCompactionInfo: flushInfo, + } + response, err := suite.s.FlushCollectionCompaction(context.Background(), req) + t1 := time.Now().Unix() + suite.NoError(err) + suite.Equal(collectionID, response.CollectionId) + suite.Equal(int32(1), response.CollectionVersion) + suite.Less(int64(0), response.LastCompactionTime) + suite.LessOrEqual(response.LastCompactionTime, t1) + + // validate database + collection := &coordinatorpb.Collection{ + Id: collectionID, + LogPosition: int64(10), + Version: int32(1), + } + validateDatabase(suite, collectionID, collection, filePaths) + + // flush one segment + filePaths[segments.Segments[0].Id][testFilePathTypes[0]] = &coordinatorpb.FilePaths{ + Paths: []string{"test_file_path_1"}, + } + info := &coordinatorpb.FlushSegmentCompactionInfo{ + SegmentId: segments.Segments[0].Id, + FilePaths: filePaths[segments.Segments[0].Id], + } + req = &coordinatorpb.FlushCollectionCompactionRequest{ + TenantId: suite.tenantName, + CollectionId: collectionID, + LogPosition: 100, + CollectionVersion: 1, + SegmentCompactionInfo: []*coordinatorpb.FlushSegmentCompactionInfo{info}, + } + response, err = suite.s.FlushCollectionCompaction(context.Background(), req) + t2 := time.Now().Unix() + suite.NoError(err) + suite.Equal(collectionID, response.CollectionId) + suite.Equal(int32(2), response.CollectionVersion) + suite.LessOrEqual(t1, response.LastCompactionTime) + suite.LessOrEqual(response.LastCompactionTime, t2) + + // validate database + collection = &coordinatorpb.Collection{ + Id: collectionID, + LogPosition: int64(100), + Version: int32(2), + } + validateDatabase(suite, collectionID, collection, filePaths) + + // test invalid log position + req = &coordinatorpb.FlushCollectionCompactionRequest{ + TenantId: suite.tenantName, + CollectionId: collectionID, + LogPosition: 50, + CollectionVersion: 2, + SegmentCompactionInfo: []*coordinatorpb.FlushSegmentCompactionInfo{info}, + } + response, err = suite.s.FlushCollectionCompaction(context.Background(), req) + suite.Error(err) + suite.Equal(status.Error(codes.Code(code.Code_INTERNAL), common.ErrCollectionLogPositionStale.Error()), err) + // nothing should change in DB + validateDatabase(suite, collectionID, collection, filePaths) + + // test invalid version + req = &coordinatorpb.FlushCollectionCompactionRequest{ + TenantId: suite.tenantName, + CollectionId: collectionID, + LogPosition: 150, + CollectionVersion: 1, + SegmentCompactionInfo: []*coordinatorpb.FlushSegmentCompactionInfo{info}, + } + response, err = suite.s.FlushCollectionCompaction(context.Background(), req) + suite.Error(err) + suite.Equal(status.Error(codes.Code(code.Code_INTERNAL), common.ErrCollectionVersionStale.Error()), err) + // nothing should change in DB + validateDatabase(suite, collectionID, collection, filePaths) + + req = &coordinatorpb.FlushCollectionCompactionRequest{ + TenantId: suite.tenantName, + CollectionId: collectionID, + LogPosition: 150, + CollectionVersion: 5, + SegmentCompactionInfo: []*coordinatorpb.FlushSegmentCompactionInfo{info}, + } + response, err = suite.s.FlushCollectionCompaction(context.Background(), req) + suite.Error(err) + suite.Equal(status.Error(codes.Code(code.Code_INTERNAL), common.ErrCollectionVersionInvalid.Error()), err) + // nothing should change in DB + validateDatabase(suite, collectionID, collection, filePaths) + + // clean up + err = dao.CleanUpTestCollection(suite.db, collectionID) + suite.NoError(err) +} + +func TestCollectionServiceTestSuite(t *testing.T) { + testSuite := new(CollectionServiceTestSuite) + suite.Run(t, testSuite) +} diff --git a/go/pkg/coordinator/grpc/proto_model_convert.go b/go/pkg/coordinator/grpc/proto_model_convert.go index 1f396d20880..61359b2fdc0 100644 --- a/go/pkg/coordinator/grpc/proto_model_convert.go +++ b/go/pkg/coordinator/grpc/proto_model_convert.go @@ -38,12 +38,14 @@ func convertCollectionToProto(collection *model.Collection) *coordinatorpb.Colle } collectionpb := &coordinatorpb.Collection{ - Id: collection.ID.String(), - Name: collection.Name, - Topic: collection.Topic, - Dimension: collection.Dimension, - Tenant: collection.TenantID, - Database: collection.DatabaseName, + Id: collection.ID.String(), + Name: collection.Name, + Topic: collection.Topic, + Dimension: collection.Dimension, + Tenant: collection.TenantID, + Database: collection.DatabaseName, + LogPosition: collection.LogPosition, + Version: collection.Version, } if collection.Metadata == nil { return collectionpb @@ -145,6 +147,12 @@ func convertSegmentToProto(segment *model.Segment) *coordinatorpb.Segment { } scope := coordinatorpb.SegmentScope_value[segment.Scope] segmentSceope := coordinatorpb.SegmentScope(scope) + filePaths := make(map[string]*coordinatorpb.FilePaths) + for t, paths := range segment.FilePaths { + filePaths[t] = &coordinatorpb.FilePaths{ + Paths: paths, + } + } segmentpb := &coordinatorpb.Segment{ Id: segment.ID.String(), Type: segment.Type, @@ -152,6 +160,7 @@ func convertSegmentToProto(segment *model.Segment) *coordinatorpb.Segment { Topic: segment.Topic, Collection: nil, Metadata: nil, + FilePaths: filePaths, } collectionID := segment.CollectionID diff --git a/go/pkg/coordinator/grpc/proto_model_convert_test.go b/go/pkg/coordinator/grpc/proto_model_convert_test.go index e875233aa72..6033fff5a37 100644 --- a/go/pkg/coordinator/grpc/proto_model_convert_test.go +++ b/go/pkg/coordinator/grpc/proto_model_convert_test.go @@ -184,11 +184,12 @@ func TestConvertSegmentToProto(t *testing.T) { // Test case 2: segment is not nil testTopic := "test_topic" segment := &model.Segment{ - ID: types.NewUniqueID(), - Type: "test_type", - Scope: "METADATA", - Topic: &testTopic, - Metadata: nil, + ID: types.NewUniqueID(), + Type: "test_type", + Scope: "METADATA", + Topic: &testTopic, + Metadata: nil, + FilePaths: map[string][]string{}, } segmentpb = convertSegmentToProto(segment) assert.NotNil(t, segmentpb) diff --git a/go/pkg/coordinator/grpc/tenant_database_service.go b/go/pkg/coordinator/grpc/tenant_database_service.go index 56c5f224218..7ae4445fb08 100644 --- a/go/pkg/coordinator/grpc/tenant_database_service.go +++ b/go/pkg/coordinator/grpc/tenant_database_service.go @@ -98,7 +98,7 @@ func (s *Server) SetLastCompactionTimeForTenant(ctx context.Context, req *coordi err := s.coordinator.SetTenantLastCompactionTime(ctx, req.TenantLastCompactionTime.TenantId, req.TenantLastCompactionTime.LastCompactionTime) if err != nil { log.Error("error SetTenantLastCompactionTime", zap.Any("request", req.TenantLastCompactionTime), zap.Error(err)) - return nil, grpcutils.BuildInternalGrpcError("error SetTenantLastCompactionTime") + return nil, grpcutils.BuildInternalGrpcError(err.Error()) } return &emptypb.Empty{}, nil } @@ -109,7 +109,7 @@ func (s *Server) GetLastCompactionTimeForTenant(ctx context.Context, req *coordi tenants, err := s.coordinator.GetTenantsLastCompactionTime(ctx, tenantIDs) if err != nil { log.Error("error GetLastCompactionTimeForTenant", zap.Any("tenantIDs", tenantIDs), zap.Error(err)) - return nil, grpcutils.BuildInternalGrpcError("error GetTenantsLastCompactionTime") + return nil, grpcutils.BuildInternalGrpcError(err.Error()) } for _, tenant := range tenants { res.TenantLastCompactionTime = append(res.TenantLastCompactionTime, &coordinatorpb.TenantLastCompactionTime{ diff --git a/go/pkg/coordinator/grpc/tenant_database_service_test.go b/go/pkg/coordinator/grpc/tenant_database_service_test.go index 153d721cc27..4f37b060734 100644 --- a/go/pkg/coordinator/grpc/tenant_database_service_test.go +++ b/go/pkg/coordinator/grpc/tenant_database_service_test.go @@ -2,13 +2,13 @@ package grpc import ( "context" + "github.com/chroma-core/chroma/go/pkg/common" "github.com/chroma-core/chroma/go/pkg/grpcutils" "github.com/chroma-core/chroma/go/pkg/metastore/coordinator" "github.com/chroma-core/chroma/go/pkg/metastore/db/dao" "github.com/chroma-core/chroma/go/pkg/metastore/db/dbcore" "github.com/chroma-core/chroma/go/pkg/model" "github.com/chroma-core/chroma/go/pkg/proto/coordinatorpb" - "github.com/chroma-core/chroma/go/pkg/types" "github.com/pingcap/log" "github.com/stretchr/testify/suite" "google.golang.org/genproto/googleapis/rpc/code" @@ -21,11 +21,9 @@ import ( type TenantDatabaseServiceTestSuite struct { suite.Suite - catalog *coordinator.Catalog - db *gorm.DB - s *Server - t *testing.T - collectionId types.UniqueID + catalog *coordinator.Catalog + db *gorm.DB + s *Server } func (suite *TenantDatabaseServiceTestSuite) SetupSuite() { @@ -33,12 +31,12 @@ func (suite *TenantDatabaseServiceTestSuite) SetupSuite() { suite.db = dbcore.ConfigDatabaseForTesting() s, err := NewWithGrpcProvider(Config{ AssignmentPolicy: "simple", - SystemCatalogProvider: "memory", + SystemCatalogProvider: "database", NotificationStoreProvider: "memory", NotifierProvider: "memory", Testing: true}, grpcutils.Default, suite.db) if err != nil { - suite.t.Fatalf("error creating server: %v", err) + suite.T().Fatalf("error creating server: %v", err) } suite.s = s txnImpl := dbcore.NewTxImpl() @@ -52,8 +50,6 @@ func (suite *TenantDatabaseServiceTestSuite) SetupTest() { func (suite *TenantDatabaseServiceTestSuite) TearDownTest() { log.Info("teardown test") - // TODO: clean up per test when delete is implemented for tenant - dbcore.ResetTestTables(suite.db) } func (suite *TenantDatabaseServiceTestSuite) TestServer_TenantLastCompactionTime() { @@ -66,7 +62,7 @@ func (suite *TenantDatabaseServiceTestSuite) TestServer_TenantLastCompactionTime }, } _, err := suite.s.SetLastCompactionTimeForTenant(context.Background(), request) - suite.Equal(status.Error(codes.Code(code.Code_INTERNAL), "error SetTenantLastCompactionTime"), err) + suite.Equal(status.Error(codes.Code(code.Code_INTERNAL), common.ErrTenantNotFound.Error()), err) // create tenant _, err = suite.catalog.CreateTenant(context.Background(), &model.CreateTenant{ @@ -99,10 +95,13 @@ func (suite *TenantDatabaseServiceTestSuite) TestServer_TenantLastCompactionTime suite.Equal(1, len(tenants.TenantLastCompactionTime)) suite.Equal(tenantId, tenants.TenantLastCompactionTime[0].TenantId) suite.Equal(int64(1), tenants.TenantLastCompactionTime[0].LastCompactionTime) + + // clean up + err = dao.CleanUpTestTenant(suite.db, tenantId) + suite.NoError(err) } func TestTenantDatabaseServiceTestSuite(t *testing.T) { testSuite := new(TenantDatabaseServiceTestSuite) - testSuite.t = t suite.Run(t, testSuite) } diff --git a/go/pkg/grpcutils/response.go b/go/pkg/grpcutils/response.go index 981bdba1011..5a89344eb30 100644 --- a/go/pkg/grpcutils/response.go +++ b/go/pkg/grpcutils/response.go @@ -31,10 +31,10 @@ func BuildInternalGrpcError(msg string) error { return status.Error(codes.Internal, msg) } -func BuildErrorForCollectionId(collectionID types.UniqueID, err error) error { - if err != nil || collectionID == types.NilUniqueID() { - log.Error("collection id format error", zap.String("collection.id", collectionID.String())) - grpcError, err := BuildInvalidArgumentGrpcError("collection_id", "wrong collection_id format") +func BuildErrorForUUID(ID types.UniqueID, name string, err error) error { + if err != nil || ID == types.NilUniqueID() { + log.Error(name+"id format error", zap.String(name+".id", ID.String())) + grpcError, err := BuildInvalidArgumentGrpcError(name+"_id", "wrong "+name+"_id format") if err != nil { log.Error("error building grpc error", zap.Error(err)) return err diff --git a/go/pkg/logservice/grpc/record_log_service.go b/go/pkg/logservice/grpc/record_log_service.go index 1aa88eb956c..f68e141c0c6 100644 --- a/go/pkg/logservice/grpc/record_log_service.go +++ b/go/pkg/logservice/grpc/record_log_service.go @@ -21,7 +21,7 @@ type CollectionInfo struct { func (s *Server) PushLogs(ctx context.Context, req *logservicepb.PushLogsRequest) (*logservicepb.PushLogsResponse, error) { res := &logservicepb.PushLogsResponse{} collectionID, err := types.ToUniqueID(&req.CollectionId) - err = grpcutils.BuildErrorForCollectionId(collectionID, err) + err = grpcutils.BuildErrorForUUID(collectionID, "collection", err) if err != nil { return nil, err } @@ -42,7 +42,7 @@ func (s *Server) PushLogs(ctx context.Context, req *logservicepb.PushLogsRequest recordCount, err := s.logService.PushLogs(ctx, collectionID, recordsContent) if err != nil { log.Error("error pushing logs", zap.Error(err)) - return nil, grpcutils.BuildInternalGrpcError("error pushing logs") + return nil, grpcutils.BuildInternalGrpcError(err.Error()) } res.RecordCount = int32(recordCount) log.Info("PushLogs success", zap.String("collectionID", req.CollectionId), zap.Int("recordCount", recordCount)) @@ -52,7 +52,7 @@ func (s *Server) PushLogs(ctx context.Context, req *logservicepb.PushLogsRequest func (s *Server) PullLogs(ctx context.Context, req *logservicepb.PullLogsRequest) (*logservicepb.PullLogsResponse, error) { res := &logservicepb.PullLogsResponse{} collectionID, err := types.ToUniqueID(&req.CollectionId) - err = grpcutils.BuildErrorForCollectionId(collectionID, err) + err = grpcutils.BuildErrorForUUID(collectionID, "collection", err) if err != nil { return nil, err } @@ -60,7 +60,7 @@ func (s *Server) PullLogs(ctx context.Context, req *logservicepb.PullLogsRequest recordLogs, err := s.logService.PullLogs(ctx, collectionID, req.GetStartFromId(), int(req.BatchSize)) if err != nil { log.Error("error pulling logs", zap.Error(err)) - return nil, grpcutils.BuildInternalGrpcError("error pulling logs") + return nil, grpcutils.BuildInternalGrpcError(err.Error()) } for index := range recordLogs { record := &coordinatorpb.SubmitEmbeddingRecord{} @@ -90,7 +90,7 @@ func (s *Server) GetAllCollectionInfoToCompact(ctx context.Context, req *logserv recordLogs, err := s.logService.GetAllCollectionIDsToCompact() if err != nil { log.Error("error getting collection info", zap.Error(err)) - return nil, grpcutils.BuildInternalGrpcError("error getting collection info") + return nil, grpcutils.BuildInternalGrpcError(err.Error()) } for _, recordLog := range recordLogs { collectionInfo := &logservicepb.CollectionInfo{ diff --git a/go/pkg/logservice/grpc/record_log_service_test.go b/go/pkg/logservice/grpc/record_log_service_test.go index 52ac27c0176..ed18e1f23a7 100644 --- a/go/pkg/logservice/grpc/record_log_service_test.go +++ b/go/pkg/logservice/grpc/record_log_service_test.go @@ -11,7 +11,6 @@ import ( "github.com/chroma-core/chroma/go/pkg/proto/logservicepb" "github.com/chroma-core/chroma/go/pkg/types" "github.com/pingcap/log" - "github.com/stretchr/testify/assert" "github.com/stretchr/testify/suite" "google.golang.org/grpc/codes" "google.golang.org/grpc/status" @@ -25,7 +24,6 @@ type RecordLogServiceTestSuite struct { suite.Suite db *gorm.DB s *Server - t *testing.T collectionId types.UniqueID } @@ -38,18 +36,25 @@ func (suite *RecordLogServiceTestSuite) SetupSuite() { StartGrpc: false, }) suite.s = s - suite.db = dbcore.GetDB(context.Background()) - suite.collectionId = types.NewUniqueID() + suite.db = dbcore.ConfigDatabaseForTesting() + recordLogTableExist := suite.db.Migrator().HasTable(&dbmodel.RecordLog{}) + if !recordLogTableExist { + err := suite.db.Migrator().CreateTable(&dbmodel.RecordLog{}) + suite.NoError(err) + } } func (suite *RecordLogServiceTestSuite) SetupTest() { log.Info("setup test") - testutils.SetupTest(suite.db, suite.collectionId) + suite.collectionId = types.NewUniqueID() + err := testutils.CreateCollections(suite.db, suite.collectionId) + suite.NoError(err) } func (suite *RecordLogServiceTestSuite) TearDownTest() { log.Info("teardown test") - testutils.TearDownTest(suite.db) + err := testutils.CleanupCollections(suite.db, suite.collectionId) + suite.NoError(err) } func encodeVector(dimension int32, vector []float32, encoding coordinatorpb.ScalarEncoding) *coordinatorpb.Vector { @@ -101,26 +106,26 @@ func (suite *RecordLogServiceTestSuite) TestServer_PushLogs() { Records: recordsToSubmit, } response, err := suite.s.PushLogs(context.Background(), &pushRequest) - assert.Nil(suite.t, err) - assert.Equal(suite.t, int32(3), response.RecordCount) + suite.NoError(err) + suite.Equal(int32(3), response.RecordCount) var recordLogs []*dbmodel.RecordLog suite.db.Where("collection_id = ?", types.FromUniqueID(suite.collectionId)).Find(&recordLogs) - assert.Len(suite.t, recordLogs, 3) + suite.Len(recordLogs, 3) for index := range recordLogs { - assert.Equal(suite.t, int64(index+1), recordLogs[index].ID) - assert.Equal(suite.t, suite.collectionId.String(), *recordLogs[index].CollectionID) + suite.Equal(int64(index+1), recordLogs[index].ID) + suite.Equal(suite.collectionId.String(), *recordLogs[index].CollectionID) record := &coordinatorpb.SubmitEmbeddingRecord{} - if err := proto.Unmarshal(*recordLogs[index].Record, record); err != nil { - panic(err) + if unmarshalErr := proto.Unmarshal(*recordLogs[index].Record, record); err != nil { + suite.NoError(unmarshalErr) } - assert.Equal(suite.t, record.Id, recordsToSubmit[index].Id) - assert.Equal(suite.t, record.Operation, recordsToSubmit[index].Operation) - assert.Equal(suite.t, record.CollectionId, "") - assert.Equal(suite.t, record.Metadata, recordsToSubmit[index].Metadata) - assert.Equal(suite.t, record.Vector.Dimension, recordsToSubmit[index].Vector.Dimension) - assert.Equal(suite.t, record.Vector.Encoding, recordsToSubmit[index].Vector.Encoding) - assert.Equal(suite.t, record.Vector.Vector, recordsToSubmit[index].Vector.Vector) + suite.Equal(recordsToSubmit[index].Id, record.Id) + suite.Equal(recordsToSubmit[index].Operation, record.Operation) + suite.Equal("", record.CollectionId) + suite.Equal(recordsToSubmit[index].Metadata, record.Metadata) + suite.Equal(recordsToSubmit[index].Vector.Dimension, record.Vector.Dimension) + suite.Equal(recordsToSubmit[index].Vector.Encoding, record.Vector.Encoding) + suite.Equal(recordsToSubmit[index].Vector.Vector, record.Vector.Vector) } } @@ -131,7 +136,8 @@ func (suite *RecordLogServiceTestSuite) TestServer_PullLogs() { CollectionId: suite.collectionId.String(), Records: recordsToSubmit, } - suite.s.PushLogs(context.Background(), &pushRequest) + _, err := suite.s.PushLogs(context.Background(), &pushRequest) + suite.NoError(err) // pull the records pullRequest := logservicepb.PullLogsRequest{ @@ -140,17 +146,17 @@ func (suite *RecordLogServiceTestSuite) TestServer_PullLogs() { BatchSize: 10, } pullResponse, err := suite.s.PullLogs(context.Background(), &pullRequest) - assert.Nil(suite.t, err) - assert.Len(suite.t, pullResponse.Records, 3) + suite.NoError(err) + suite.Len(pullResponse.Records, 3) for index := range pullResponse.Records { - assert.Equal(suite.t, int64(index+1), pullResponse.Records[index].LogId) - assert.Equal(suite.t, recordsToSubmit[index].Id, pullResponse.Records[index].Record.Id) - assert.Equal(suite.t, recordsToSubmit[index].Operation, pullResponse.Records[index].Record.Operation) - assert.Equal(suite.t, recordsToSubmit[index].CollectionId, pullResponse.Records[index].Record.CollectionId) - assert.Equal(suite.t, recordsToSubmit[index].Metadata, pullResponse.Records[index].Record.Metadata) - assert.Equal(suite.t, recordsToSubmit[index].Vector.Dimension, pullResponse.Records[index].Record.Vector.Dimension) - assert.Equal(suite.t, recordsToSubmit[index].Vector.Encoding, pullResponse.Records[index].Record.Vector.Encoding) - assert.Equal(suite.t, recordsToSubmit[index].Vector.Vector, pullResponse.Records[index].Record.Vector.Vector) + suite.Equal(int64(index+1), pullResponse.Records[index].LogId) + suite.Equal(pullResponse.Records[index].Record.Id, recordsToSubmit[index].Id) + suite.Equal(pullResponse.Records[index].Record.Operation, recordsToSubmit[index].Operation) + suite.Equal(pullResponse.Records[index].Record.CollectionId, recordsToSubmit[index].CollectionId) + suite.Equal(pullResponse.Records[index].Record.Metadata, recordsToSubmit[index].Metadata) + suite.Equal(pullResponse.Records[index].Record.Vector.Dimension, recordsToSubmit[index].Vector.Dimension) + suite.Equal(pullResponse.Records[index].Record.Vector.Encoding, recordsToSubmit[index].Vector.Encoding) + suite.Equal(pullResponse.Records[index].Record.Vector.Vector, recordsToSubmit[index].Vector.Vector) } } @@ -161,13 +167,12 @@ func (suite *RecordLogServiceTestSuite) TestServer_Bad_CollectionId() { CollectionId: "badId", Records: []*coordinatorpb.SubmitEmbeddingRecord{}, } - pushResponse, err := suite.s.PushLogs(context.Background(), &pushRequest) - assert.Nil(suite.t, pushResponse) - assert.NotNil(suite.t, err) + _, err := suite.s.PushLogs(context.Background(), &pushRequest) + suite.Error(err) st, ok := status.FromError(err) - assert.True(suite.t, ok) - assert.Equal(suite.T(), codes.InvalidArgument, st.Code()) - assert.Equal(suite.T(), "invalid collection_id", st.Message()) + suite.True(ok) + suite.Equal(codes.InvalidArgument, st.Code()) + suite.Equal("invalid collection_id", st.Message()) // pull the records // pull the records @@ -176,13 +181,12 @@ func (suite *RecordLogServiceTestSuite) TestServer_Bad_CollectionId() { StartFromId: 0, BatchSize: 10, } - pullResponse, err := suite.s.PullLogs(context.Background(), &pullRequest) - assert.Nil(suite.t, pullResponse) - assert.NotNil(suite.t, err) + _, err = suite.s.PullLogs(context.Background(), &pullRequest) + suite.Error(err) st, ok = status.FromError(err) - assert.True(suite.t, ok) - assert.Equal(suite.T(), codes.InvalidArgument, st.Code()) - assert.Equal(suite.T(), "invalid collection_id", st.Message()) + suite.True(ok) + suite.Equal(codes.InvalidArgument, st.Code()) + suite.Equal("invalid collection_id", st.Message()) } func (suite *RecordLogServiceTestSuite) TestServer_GetAllCollectionInfoToCompact() { @@ -193,17 +197,18 @@ func (suite *RecordLogServiceTestSuite) TestServer_GetAllCollectionInfoToCompact CollectionId: suite.collectionId.String(), Records: recordsToSubmit, } - suite.s.PushLogs(context.Background(), &pushRequest) + _, err := suite.s.PushLogs(context.Background(), &pushRequest) + suite.NoError(err) // get collection info for compactor request := logservicepb.GetAllCollectionInfoToCompactRequest{} response, err := suite.s.GetAllCollectionInfoToCompact(context.Background(), &request) - assert.Nil(suite.t, err) - assert.Len(suite.t, response.AllCollectionInfo, 1) - assert.Equal(suite.T(), suite.collectionId.String(), response.AllCollectionInfo[0].CollectionId) - assert.Equal(suite.T(), int64(1), response.AllCollectionInfo[0].FirstLogId) - assert.True(suite.T(), response.AllCollectionInfo[0].FirstLogIdTs > startTime) - assert.True(suite.T(), response.AllCollectionInfo[0].FirstLogIdTs < time.Now().UnixNano()) + suite.NoError(err) + suite.Len(response.AllCollectionInfo, 1) + suite.Equal(suite.collectionId.String(), response.AllCollectionInfo[0].CollectionId) + suite.Equal(int64(1), response.AllCollectionInfo[0].FirstLogId) + suite.True(response.AllCollectionInfo[0].FirstLogIdTs > startTime) + suite.True(response.AllCollectionInfo[0].FirstLogIdTs < time.Now().UnixNano()) // move log position testutils.MoveLogPosition(suite.db, suite.collectionId, 2) @@ -211,17 +216,15 @@ func (suite *RecordLogServiceTestSuite) TestServer_GetAllCollectionInfoToCompact // get collection info for compactor request = logservicepb.GetAllCollectionInfoToCompactRequest{} response, err = suite.s.GetAllCollectionInfoToCompact(context.Background(), &request) - assert.Nil(suite.t, err) - assert.Len(suite.t, response.AllCollectionInfo, 1) - assert.Equal(suite.T(), suite.collectionId.String(), response.AllCollectionInfo[0].CollectionId) - assert.Equal(suite.T(), int64(3), response.AllCollectionInfo[0].FirstLogId) - assert.True(suite.T(), response.AllCollectionInfo[0].FirstLogIdTs > startTime) - assert.True(suite.T(), response.AllCollectionInfo[0].FirstLogIdTs < time.Now().UnixNano()) - + suite.NoError(err) + suite.Len(response.AllCollectionInfo, 1) + suite.Equal(suite.collectionId.String(), response.AllCollectionInfo[0].CollectionId) + suite.Equal(int64(3), response.AllCollectionInfo[0].FirstLogId) + suite.True(response.AllCollectionInfo[0].FirstLogIdTs > startTime) + suite.True(response.AllCollectionInfo[0].FirstLogIdTs < time.Now().UnixNano()) } func TestRecordLogServiceTestSuite(t *testing.T) { testSuite := new(RecordLogServiceTestSuite) - testSuite.t = t suite.Run(t, testSuite) } diff --git a/go/pkg/logservice/testutils/record_log_test_util.go b/go/pkg/logservice/testutils/record_log_test_util.go index e70f55747fc..a6f7c3d9aa0 100644 --- a/go/pkg/logservice/testutils/record_log_test_util.go +++ b/go/pkg/logservice/testutils/record_log_test_util.go @@ -1,18 +1,13 @@ package testutils import ( - "github.com/chroma-core/chroma/go/pkg/metastore/db/dbcore" "github.com/chroma-core/chroma/go/pkg/metastore/db/dbmodel" "github.com/chroma-core/chroma/go/pkg/types" - "github.com/pingcap/log" - "go.uber.org/zap" "gorm.io/gorm" "strconv" ) -func SetupTest(db *gorm.DB, collectionIds ...types.UniqueID) { - dbcore.ResetTestTables(db) - +func CreateCollections(db *gorm.DB, collectionIds ...types.UniqueID) error { // create test collections for index, collectionId := range collectionIds { collectionName := "collection" + strconv.Itoa(index+1) @@ -27,18 +22,27 @@ func SetupTest(db *gorm.DB, collectionIds ...types.UniqueID) { } err := db.Create(collection).Error if err != nil { - log.Error("create collection error", zap.Error(err)) + return err } } + return nil } -func TearDownTest(db *gorm.DB) { - db.Migrator().DropTable(&dbmodel.Segment{}) - db.Migrator().CreateTable(&dbmodel.Segment{}) - db.Migrator().DropTable(&dbmodel.Collection{}) - db.Migrator().CreateTable(&dbmodel.Collection{}) - db.Migrator().DropTable(&dbmodel.RecordLog{}) - db.Migrator().CreateTable(&dbmodel.RecordLog{}) +func CleanupCollections(db *gorm.DB, collectionIds ...types.UniqueID) error { + // delete test collections + for _, collectionId := range collectionIds { + err := db.Where("id = ?", collectionId.String()).Delete(&dbmodel.Collection{}).Error + if err != nil { + return err + } + } + + // cleanup logs + err := db.Where("collection_id in ?", collectionIds).Delete(&dbmodel.RecordLog{}).Error + if err != nil { + return err + } + return nil } func MoveLogPosition(db *gorm.DB, collectionId types.UniqueID, position int64) { diff --git a/go/pkg/metastore/catalog.go b/go/pkg/metastore/catalog.go index 52d6ac2ca35..15f73bc0d1f 100644 --- a/go/pkg/metastore/catalog.go +++ b/go/pkg/metastore/catalog.go @@ -29,4 +29,5 @@ type Catalog interface { GetAllTenants(ctx context.Context, ts types.Timestamp) ([]*model.Tenant, error) SetTenantLastCompactionTime(ctx context.Context, tenantID string, lastCompactionTime int64) error GetTenantsLastCompactionTime(ctx context.Context, tenantIDs []string) ([]*dbmodel.Tenant, error) + FlushCollectionCompaction(ctx context.Context, flushCollectionCompaction *model.FlushCollectionCompaction) (*model.FlushCollectionInfo, error) } diff --git a/go/pkg/metastore/coordinator/model_db_convert.go b/go/pkg/metastore/coordinator/model_db_convert.go index 2f164be253d..717b713cf19 100644 --- a/go/pkg/metastore/coordinator/model_db_convert.go +++ b/go/pkg/metastore/coordinator/model_db_convert.go @@ -22,6 +22,8 @@ func convertCollectionToModel(collectionAndMetadataList []*dbmodel.CollectionAnd TenantID: collectionAndMetadata.TenantID, DatabaseName: collectionAndMetadata.DatabaseName, Ts: collectionAndMetadata.Collection.Ts, + LogPosition: collectionAndMetadata.Collection.LogPosition, + Version: collectionAndMetadata.Collection.Version, } collection.Metadata = convertCollectionMetadataToModel(collectionAndMetadata.CollectionMetadata) collections = append(collections, collection) diff --git a/go/pkg/metastore/coordinator/table_catalog.go b/go/pkg/metastore/coordinator/table_catalog.go index bed31c51532..e1ae1e53d5c 100644 --- a/go/pkg/metastore/coordinator/table_catalog.go +++ b/go/pkg/metastore/coordinator/table_catalog.go @@ -316,6 +316,7 @@ func (tc *Catalog) GetCollections(ctx context.Context, collectionID types.Unique } func (tc *Catalog) DeleteCollection(ctx context.Context, deleteCollection *model.DeleteCollection) error { + log.Info("deleting collection", zap.Any("deleteCollection", deleteCollection)) return tc.txImpl.Transaction(ctx, func(txCtx context.Context) error { collectionID := deleteCollection.ID collectionAndMetadata, err := tc.metaDomain.CollectionDb(txCtx).GetCollections(types.FromUniqueID(collectionID), nil, nil, deleteCollection.TenantID, deleteCollection.DatabaseName) @@ -351,6 +352,7 @@ func (tc *Catalog) DeleteCollection(ctx context.Context, deleteCollection *model } func (tc *Catalog) UpdateCollection(ctx context.Context, updateCollection *model.UpdateCollection, ts types.Timestamp) (*model.Collection, error) { + log.Info("updating collection", zap.String("collectionId", updateCollection.ID.String())) var result *model.Collection err := tc.txImpl.Transaction(ctx, func(txCtx context.Context) error { @@ -411,7 +413,7 @@ func (tc *Catalog) UpdateCollection(ctx context.Context, updateCollection *model if err != nil { return nil, err } - log.Info("collection updated", zap.Any("collection", result)) + log.Info("collection updated", zap.String("collectionID", result.ID.String())) return result, nil } @@ -473,11 +475,12 @@ func (tc *Catalog) GetSegments(ctx context.Context, segmentID types.UniqueID, se segments := make([]*model.Segment, 0, len(segmentAndMetadataList)) for _, segmentAndMetadata := range segmentAndMetadataList { segment := &model.Segment{ - ID: types.MustParse(segmentAndMetadata.Segment.ID), - Type: segmentAndMetadata.Segment.Type, - Scope: segmentAndMetadata.Segment.Scope, - Topic: segmentAndMetadata.Segment.Topic, - Ts: segmentAndMetadata.Segment.Ts, + ID: types.MustParse(segmentAndMetadata.Segment.ID), + Type: segmentAndMetadata.Segment.Type, + Scope: segmentAndMetadata.Segment.Scope, + Topic: segmentAndMetadata.Segment.Topic, + Ts: segmentAndMetadata.Segment.Ts, + FilePaths: segmentAndMetadata.Segment.FilePaths, } if segmentAndMetadata.Segment.CollectionID != nil { @@ -614,3 +617,41 @@ func (tc *Catalog) GetTenantsLastCompactionTime(ctx context.Context, tenantIDs [ tenants, err := tc.metaDomain.TenantDb(ctx).GetTenantsLastCompactionTime(tenantIDs) return tenants, err } + +func (tc *Catalog) FlushCollectionCompaction(ctx context.Context, flushCollectionCompaction *model.FlushCollectionCompaction) (*model.FlushCollectionInfo, error) { + flushCollectionInfo := &model.FlushCollectionInfo{ + ID: flushCollectionCompaction.ID.String(), + } + + err := tc.txImpl.Transaction(ctx, func(txCtx context.Context) error { + // register files to Segment metadata + err := tc.metaDomain.SegmentDb(txCtx).RegisterFilePaths(flushCollectionCompaction.FlushSegmentCompactions) + if err != nil { + return err + } + + // update collection log position and version + collectionVersion, err := tc.metaDomain.CollectionDb(txCtx).UpdateLogPositionAndVersion(flushCollectionCompaction.ID.String(), flushCollectionCompaction.LogPosition, flushCollectionCompaction.CurrentCollectionVersion) + if err != nil { + return err + } + flushCollectionInfo.CollectionVersion = collectionVersion + + // update tenant last compaction time + // TODO: add a system configuration to disable + // since this might cause resource contention if one tenant has a lot of collection compactions at the same time + lastCompactionTime := time.Now().Unix() + err = tc.metaDomain.TenantDb(txCtx).UpdateTenantLastCompactionTime(flushCollectionCompaction.TenantID, lastCompactionTime) + if err != nil { + return err + } + flushCollectionInfo.TenantLastCompactionTime = lastCompactionTime + + // return nil will commit the transaction + return nil + }) + if err != nil { + return nil, err + } + return flushCollectionInfo, nil +} diff --git a/go/pkg/metastore/db/dao/collection.go b/go/pkg/metastore/db/dao/collection.go index 295046f42f0..f2f381b6b0d 100644 --- a/go/pkg/metastore/db/dao/collection.go +++ b/go/pkg/metastore/db/dao/collection.go @@ -6,6 +6,7 @@ import ( "github.com/chroma-core/chroma/go/pkg/common" "github.com/jackc/pgx/v5/pgconn" "gorm.io/gorm/clause" + "strings" "go.uber.org/zap" "gorm.io/gorm" @@ -25,31 +26,40 @@ func (s *collectionDb) DeleteAll() error { } func (s *collectionDb) GetCollections(id *string, name *string, topic *string, tenantID string, databaseName string) ([]*dbmodel.CollectionAndMetadata, error) { + var getCollectionInput strings.Builder + getCollectionInput.WriteString("GetCollections input: ") + var collections []*dbmodel.CollectionAndMetadata query := s.db.Table("collections"). - Select("collections.id, collections.name, collections.topic, collections.dimension, collections.database_id, databases.name, databases.tenant_id, collection_metadata.key, collection_metadata.str_value, collection_metadata.int_value, collection_metadata.float_value"). + Select("collections.id, collections.log_position, collections.version, collections.name, collections.topic, collections.dimension, collections.database_id, databases.name, databases.tenant_id, collection_metadata.key, collection_metadata.str_value, collection_metadata.int_value, collection_metadata.float_value"). Joins("LEFT JOIN collection_metadata ON collections.id = collection_metadata.collection_id"). Joins("INNER JOIN databases ON collections.database_id = databases.id"). Order("collections.id") if databaseName != "" { query = query.Where("databases.name = ?", databaseName) + getCollectionInput.WriteString("databases.name: " + databaseName + ", ") } if tenantID != "" { query = query.Where("databases.tenant_id = ?", tenantID) + getCollectionInput.WriteString("databases.tenant_id: " + tenantID + ", ") } if id != nil { query = query.Where("collections.id = ?", *id) + getCollectionInput.WriteString("collections.id: " + *id + ", ") } if topic != nil { query = query.Where("collections.topic = ?", *topic) + getCollectionInput.WriteString("collections.topic: " + *topic + ", ") } if name != nil { query = query.Where("collections.name = ?", *name) + getCollectionInput.WriteString("collections.name: " + *name + ", ") } + log.Info(getCollectionInput.String()) rows, err := query.Rows() if err != nil { @@ -64,6 +74,8 @@ func (s *collectionDb) GetCollections(id *string, name *string, topic *string, t for rows.Next() { var ( collectionID string + logPosition int64 + version int32 collectionName string collectionTopic string collectionDimension sql.NullInt32 @@ -76,7 +88,7 @@ func (s *collectionDb) GetCollections(id *string, name *string, topic *string, t floatValue sql.NullFloat64 ) - err := rows.Scan(&collectionID, &collectionName, &collectionTopic, &collectionDimension, &collectionDatabaseID, &databaseName, &databaseTenantID, &key, &strValue, &intValue, &floatValue) + err := rows.Scan(&collectionID, &logPosition, &version, &collectionName, &collectionTopic, &collectionDimension, &collectionDatabaseID, &databaseName, &databaseTenantID, &key, &strValue, &intValue, &floatValue) if err != nil { log.Error("scan collection failed", zap.Error(err)) return nil, err @@ -87,10 +99,12 @@ func (s *collectionDb) GetCollections(id *string, name *string, topic *string, t currentCollection = &dbmodel.CollectionAndMetadata{ Collection: &dbmodel.Collection{ - ID: collectionID, - Name: &collectionName, - Topic: &collectionTopic, - DatabaseID: collectionDatabaseID, + ID: collectionID, + Name: &collectionName, + Topic: &collectionTopic, + DatabaseID: collectionDatabaseID, + LogPosition: logPosition, + Version: version, }, CollectionMetadata: metadata, TenantID: databaseTenantID, @@ -182,6 +196,33 @@ func generateCollectionUpdatesWithoutID(in *dbmodel.Collection) map[string]inter } func (s *collectionDb) Update(in *dbmodel.Collection) error { + log.Info("update collection", zap.Any("collection", in)) updates := generateCollectionUpdatesWithoutID(in) return s.db.Model(&dbmodel.Collection{}).Where("id = ?", in.ID).Updates(updates).Error } + +func (s *collectionDb) UpdateLogPositionAndVersion(collectionID string, logPosition int64, currentCollectionVersion int32) (int32, error) { + log.Info("update log position and version", zap.String("collectionID", collectionID), zap.Int64("logPosition", logPosition), zap.Int32("currentCollectionVersion", currentCollectionVersion)) + var collection dbmodel.Collection + err := s.db.Where("id = ?", collectionID).First(&collection).Error + if err != nil { + return 0, err + } + if collection.LogPosition > logPosition { + return 0, common.ErrCollectionLogPositionStale + } + if collection.Version > currentCollectionVersion { + return 0, common.ErrCollectionVersionStale + } + if collection.Version < currentCollectionVersion { + // this should not happen, potentially a bug + return 0, common.ErrCollectionVersionInvalid + } + + version := currentCollectionVersion + 1 + err = s.db.Model(&dbmodel.Collection{}).Where("id = ?", collectionID).Updates(map[string]interface{}{"log_position": logPosition, "version": version}).Error + if err != nil { + return 0, err + } + return version, nil +} diff --git a/go/pkg/metastore/db/dao/collection_test.go b/go/pkg/metastore/db/dao/collection_test.go index aa40eabf53a..8e86a6203b5 100644 --- a/go/pkg/metastore/db/dao/collection_test.go +++ b/go/pkg/metastore/db/dao/collection_test.go @@ -3,84 +3,137 @@ package dao import ( "github.com/chroma-core/chroma/go/pkg/metastore/db/dbcore" "github.com/pingcap/log" - "go.uber.org/zap" + "github.com/stretchr/testify/suite" "testing" - "github.com/chroma-core/chroma/go/pkg/common" "github.com/chroma-core/chroma/go/pkg/metastore/db/dbmodel" - "github.com/chroma-core/chroma/go/pkg/types" - "github.com/stretchr/testify/assert" - "gorm.io/driver/sqlite" "gorm.io/gorm" ) -func TestCollectionDb_GetCollections(t *testing.T) { - db, err := gorm.Open(sqlite.Open(":memory:"), &gorm.Config{}) - assert.NoError(t, err) - - err = db.AutoMigrate(&dbmodel.Tenant{}, &dbmodel.Database{}, &dbmodel.Collection{}, &dbmodel.CollectionMetadata{}) - databaseID := dbcore.CreateDefaultTenantAndDatabase(db) - - assert.NoError(t, err) - name := "test_name" - topic := "test_topic" - collection := &dbmodel.Collection{ - ID: types.NewUniqueID().String(), - Name: &name, - Topic: &topic, - DatabaseID: databaseID, +type CollectionDbTestSuite struct { + suite.Suite + db *gorm.DB + collectionDb *collectionDb + tenantName string + databaseName string + databaseId string +} + +func (suite *CollectionDbTestSuite) SetupSuite() { + log.Info("setup suite") + suite.db = dbcore.ConfigDatabaseForTesting() + suite.collectionDb = &collectionDb{ + db: suite.db, } - err = db.Create(collection).Error - assert.NoError(t, err) + suite.tenantName = "test_collection_tenant" + suite.databaseName = "test_collection_database" + DbId, err := CreateTestTenantAndDatabase(suite.db, suite.tenantName, suite.databaseName) + suite.NoError(err) + suite.databaseId = DbId +} + +func (suite *CollectionDbTestSuite) TearDownSuite() { + log.Info("teardown suite") + err := CleanUpTestDatabase(suite.db, suite.tenantName, suite.databaseName) + suite.NoError(err) + err = CleanUpTestTenant(suite.db, suite.tenantName) + suite.NoError(err) +} + +func (suite *CollectionDbTestSuite) TestCollectionDb_GetCollections() { + collectionName := "test_collection_get_collections" + collectionTopic := "test_collection_topic" + collectionID, err := CreateTestCollection(suite.db, collectionName, collectionTopic, 128, suite.databaseId) + suite.NoError(err) testKey := "test" testValue := "test" metadata := &dbmodel.CollectionMetadata{ - CollectionID: collection.ID, + CollectionID: collectionID, Key: &testKey, StrValue: &testValue, } - err = db.Create(metadata).Error - assert.NoError(t, err) - - collectionDb := &collectionDb{ - db: db, - } + err = suite.db.Create(metadata).Error + suite.NoError(err) - query := db.Table("collections").Select("collections.id") + query := suite.db.Table("collections").Select("collections.id").Where("collections.id = ?", collectionID) rows, err := query.Rows() - assert.NoError(t, err) + suite.NoError(err) for rows.Next() { - var collectionID string - err = rows.Scan(&collectionID) - assert.NoError(t, err) - log.Info("collectionID", zap.String("collectionID", collectionID)) + var scanedCollectionID string + err = rows.Scan(&scanedCollectionID) + suite.NoError(err) + suite.Equal(collectionID, scanedCollectionID) } - collections, err := collectionDb.GetCollections(nil, nil, nil, common.DefaultTenant, common.DefaultDatabase) - assert.NoError(t, err) - assert.Len(t, collections, 1) - assert.Equal(t, collection.ID, collections[0].Collection.ID) - assert.Equal(t, collection.Name, collections[0].Collection.Name) - assert.Equal(t, collection.Topic, collections[0].Collection.Topic) - assert.Len(t, collections[0].CollectionMetadata, 1) - assert.Equal(t, metadata.Key, collections[0].CollectionMetadata[0].Key) - assert.Equal(t, metadata.StrValue, collections[0].CollectionMetadata[0].StrValue) + collections, err := suite.collectionDb.GetCollections(nil, nil, nil, suite.tenantName, suite.databaseName) + suite.NoError(err) + suite.Len(collections, 1) + suite.Equal(collectionID, collections[0].Collection.ID) + suite.Equal(collectionName, *collections[0].Collection.Name) + suite.Equal(collectionTopic, *collections[0].Collection.Topic) + suite.Len(collections[0].CollectionMetadata, 1) + suite.Equal(metadata.Key, collections[0].CollectionMetadata[0].Key) + suite.Equal(metadata.StrValue, collections[0].CollectionMetadata[0].StrValue) // Test when filtering by ID - collections, err = collectionDb.GetCollections(nil, nil, nil, common.DefaultTenant, common.DefaultDatabase) - assert.NoError(t, err) - assert.Len(t, collections, 1) - assert.Equal(t, collection.ID, collections[0].Collection.ID) + collections, err = suite.collectionDb.GetCollections(nil, nil, nil, suite.tenantName, suite.databaseName) + suite.NoError(err) + suite.Len(collections, 1) + suite.Equal(collectionID, collections[0].Collection.ID) // Test when filtering by name - collections, err = collectionDb.GetCollections(nil, collection.Name, nil, common.DefaultTenant, common.DefaultDatabase) - assert.NoError(t, err) - assert.Len(t, collections, 1) - assert.Equal(t, collection.ID, collections[0].Collection.ID) + collections, err = suite.collectionDb.GetCollections(nil, &collectionName, nil, suite.tenantName, suite.databaseName) + suite.NoError(err) + suite.Len(collections, 1) + suite.Equal(collectionID, collections[0].Collection.ID) // Test when filtering by topic - collections, err = collectionDb.GetCollections(nil, nil, collection.Topic, common.DefaultTenant, common.DefaultDatabase) - assert.NoError(t, err) - assert.Len(t, collections, 1) - assert.Equal(t, collection.ID, collections[0].Collection.ID) + collections, err = suite.collectionDb.GetCollections(nil, nil, &collectionTopic, suite.tenantName, suite.databaseName) + suite.NoError(err) + suite.Len(collections, 1) + suite.Equal(collectionID, collections[0].Collection.ID) + + // clean up + err = CleanUpTestCollection(suite.db, collectionID) + suite.NoError(err) +} + +func (suite *CollectionDbTestSuite) TestCollectionDb_UpdateLogPositionAndVersion() { + collectionName := "test_collection_get_collections" + collectionTopic := "test_topic" + collectionID, err := CreateTestCollection(suite.db, collectionName, collectionTopic, 128, suite.databaseId) + // verify default values + collections, err := suite.collectionDb.GetCollections(&collectionID, nil, nil, "", "") + suite.NoError(err) + suite.Len(collections, 1) + suite.Equal(int64(0), collections[0].Collection.LogPosition) + suite.Equal(int32(0), collections[0].Collection.Version) + + // update log position and version + version, err := suite.collectionDb.UpdateLogPositionAndVersion(collectionID, int64(10), 0) + suite.NoError(err) + suite.Equal(int32(1), version) + collections, err = suite.collectionDb.GetCollections(&collectionID, nil, nil, "", "") + suite.Len(collections, 1) + suite.Equal(int64(10), collections[0].Collection.LogPosition) + suite.Equal(int32(1), collections[0].Collection.Version) + + // invalid log position + _, err = suite.collectionDb.UpdateLogPositionAndVersion(collectionID, int64(5), 0) + suite.Error(err, "collection log position Stale") + + // invalid version + _, err = suite.collectionDb.UpdateLogPositionAndVersion(collectionID, int64(20), 0) + suite.Error(err, "collection version invalid") + _, err = suite.collectionDb.UpdateLogPositionAndVersion(collectionID, int64(20), 3) + suite.Error(err, "collection version invalid") + + //clean up + err = CleanUpTestCollection(suite.db, collectionID) + suite.NoError(err) +} + +func TestCollectionDbTestSuiteSuite(t *testing.T) { + testSuite := new(CollectionDbTestSuite) + suite.Run(t, testSuite) } diff --git a/go/pkg/metastore/db/dao/database.go b/go/pkg/metastore/db/dao/database.go index 7ede1c5bc4f..fb7ffb07a12 100644 --- a/go/pkg/metastore/db/dao/database.go +++ b/go/pkg/metastore/db/dao/database.go @@ -5,6 +5,7 @@ import ( "github.com/pingcap/log" "go.uber.org/zap" "gorm.io/gorm" + "gorm.io/gorm/clause" ) type databaseDb struct { @@ -17,6 +18,12 @@ func (s *databaseDb) DeleteAll() error { return s.db.Where("1 = 1").Delete(&dbmodel.Database{}).Error } +func (s *databaseDb) DeleteByTenantIdAndName(tenantId string, databaseName string) (int, error) { + var databases []dbmodel.Database + err := s.db.Clauses(clause.Returning{}).Where("tenant_id = ?", tenantId).Where("name = ?", databaseName).Delete(&databases).Error + return len(databases), err +} + func (s *databaseDb) GetAllDatabases() ([]*dbmodel.Database, error) { var databases []*dbmodel.Database query := s.db.Table("databases") @@ -44,3 +51,16 @@ func (s *databaseDb) GetDatabases(tenantID string, databaseName string) ([]*dbmo func (s *databaseDb) Insert(database *dbmodel.Database) error { return s.db.Create(database).Error } + +func (s *databaseDb) GetDatabasesByTenantID(tenantID string) ([]*dbmodel.Database, error) { + var databases []*dbmodel.Database + query := s.db.Table("databases"). + Select("databases.id, databases.name, databases.tenant_id"). + Where("databases.tenant_id = ?", tenantID) + + if err := query.Find(&databases).Error; err != nil { + log.Error("GetDatabasesByTenantID", zap.Error(err)) + return nil, err + } + return databases, nil +} diff --git a/go/pkg/metastore/db/dao/record_log_test.go b/go/pkg/metastore/db/dao/record_log_test.go index cb1a3ac6a0d..9edf8c149e4 100644 --- a/go/pkg/metastore/db/dao/record_log_test.go +++ b/go/pkg/metastore/db/dao/record_log_test.go @@ -6,7 +6,6 @@ import ( "github.com/chroma-core/chroma/go/pkg/metastore/db/dbmodel" "github.com/chroma-core/chroma/go/pkg/types" "github.com/pingcap/log" - "github.com/stretchr/testify/assert" "github.com/stretchr/testify/suite" "gorm.io/gorm" "testing" @@ -16,7 +15,6 @@ type RecordLogDbTestSuite struct { suite.Suite db *gorm.DB Db *recordLogDb - t *testing.T collectionId1 types.UniqueID collectionId2 types.UniqueID records [][]byte @@ -28,21 +26,28 @@ func (suite *RecordLogDbTestSuite) SetupSuite() { suite.Db = &recordLogDb{ db: suite.db, } - suite.collectionId1 = types.NewUniqueID() - suite.collectionId2 = types.NewUniqueID() suite.records = make([][]byte, 0, 5) suite.records = append(suite.records, []byte("test1"), []byte("test2"), []byte("test3"), []byte("test4"), []byte("test5")) + recordLogTableExist := suite.db.Migrator().HasTable(&dbmodel.RecordLog{}) + if !recordLogTableExist { + err := suite.db.Migrator().CreateTable(&dbmodel.RecordLog{}) + suite.NoError(err) + } } func (suite *RecordLogDbTestSuite) SetupTest() { log.Info("setup test") - testutils.SetupTest(suite.db, suite.collectionId1, suite.collectionId2) + suite.collectionId1 = types.NewUniqueID() + suite.collectionId2 = types.NewUniqueID() + err := testutils.CreateCollections(suite.db, suite.collectionId1, suite.collectionId2) + suite.NoError(err) } func (suite *RecordLogDbTestSuite) TearDownTest() { log.Info("teardown test") - testutils.TearDownTest(suite.db) + err := testutils.CleanupCollections(suite.db, suite.collectionId1, suite.collectionId2) + suite.NoError(err) } func (suite *RecordLogDbTestSuite) TestRecordLogDb_PushLogs() { @@ -50,46 +55,46 @@ func (suite *RecordLogDbTestSuite) TestRecordLogDb_PushLogs() { // id: 0, // records: test1, test2, test3 count, err := suite.Db.PushLogs(suite.collectionId1, suite.records[:3]) - assert.NoError(suite.t, err) - assert.Equal(suite.t, 3, count) + suite.NoError(err) + suite.Equal(3, count) // verify logs are pushed var recordLogs []*dbmodel.RecordLog suite.db.Where("collection_id = ?", types.FromUniqueID(suite.collectionId1)).Find(&recordLogs) - assert.Len(suite.t, recordLogs, 3) + suite.Len(recordLogs, 3) for index := range recordLogs { - assert.Equal(suite.t, int64(index+1), recordLogs[index].ID) - assert.Equal(suite.t, suite.records[index], *recordLogs[index].Record) + suite.Equal(int64(index+1), recordLogs[index].ID) + suite.Equal(suite.records[index], *recordLogs[index].Record) } // run push logs in transaction // id: 1, // records: test4, test5 count, err = suite.Db.PushLogs(suite.collectionId1, suite.records[3:]) - assert.NoError(suite.t, err) - assert.Equal(suite.t, 2, count) + suite.NoError(err) + suite.Equal(2, count) // verify logs are pushed suite.db.Where("collection_id = ?", types.FromUniqueID(suite.collectionId1)).Find(&recordLogs) - assert.Len(suite.t, recordLogs, 5) + suite.Len(recordLogs, 5) for index := range recordLogs { - assert.Equal(suite.t, int64(index+1), recordLogs[index].ID, "id mismatch for index %d", index) - assert.Equal(suite.t, suite.records[index], *recordLogs[index].Record, "record mismatch for index %d", index) + suite.Equal(int64(index+1), recordLogs[index].ID) + suite.Equal(suite.records[index], *recordLogs[index].Record) } // run push logs in transaction // id: 0, // records: test1, test2, test3, test4, test5 count, err = suite.Db.PushLogs(suite.collectionId2, suite.records) - assert.NoError(suite.t, err) - assert.Equal(suite.t, 5, count) + suite.NoError(err) + suite.Equal(5, count) // verify logs are pushed suite.db.Where("collection_id = ?", types.FromUniqueID(suite.collectionId2)).Find(&recordLogs) - assert.Len(suite.t, recordLogs, 5) + suite.Len(recordLogs, 5) for index := range recordLogs { - assert.Equal(suite.t, int64(index+1), recordLogs[index].ID, "id mismatch for index %d", index) - assert.Equal(suite.t, suite.records[index], *recordLogs[index].Record, "record mismatch for index %d", index) + suite.Equal(int64(index+1), recordLogs[index].ID) + suite.Equal(suite.records[index], *recordLogs[index].Record) } } @@ -97,86 +102,85 @@ func (suite *RecordLogDbTestSuite) TestRecordLogDb_PullLogsFromID() { // pull empty logs var recordLogs []*dbmodel.RecordLog recordLogs, err := suite.Db.PullLogs(suite.collectionId1, 0, 3) - assert.NoError(suite.t, err) - assert.Len(suite.t, recordLogs, 0) + suite.NoError(err) + suite.Len(recordLogs, 0) // push some logs count, err := suite.Db.PushLogs(suite.collectionId1, suite.records[:3]) - assert.NoError(suite.t, err) - assert.Equal(suite.t, 3, count) + suite.NoError(err) + suite.Equal(3, count) count, err = suite.Db.PushLogs(suite.collectionId1, suite.records[3:]) - assert.NoError(suite.t, err) - assert.Equal(suite.t, 2, count) + suite.NoError(err) + suite.Equal(2, count) // pull logs from id 0 batch_size 3 recordLogs, err = suite.Db.PullLogs(suite.collectionId1, 0, 3) - assert.NoError(suite.t, err) - assert.Len(suite.t, recordLogs, 3) + suite.NoError(err) + suite.Len(recordLogs, 3) for index := range recordLogs { - assert.Equal(suite.t, int64(index+1), recordLogs[index].ID, "id mismatch for index %d", index) - assert.Equal(suite.t, suite.records[index], *recordLogs[index].Record, "record mismatch for index %d", index) + suite.Equal(int64(index+1), recordLogs[index].ID) + suite.Equal(suite.records[index], *recordLogs[index].Record) } // pull logs from id 0 batch_size 6 recordLogs, err = suite.Db.PullLogs(suite.collectionId1, 0, 6) - assert.NoError(suite.t, err) - assert.Len(suite.t, recordLogs, 5) + suite.NoError(err) + suite.Len(recordLogs, 5) for index := range recordLogs { - assert.Equal(suite.t, int64(index+1), recordLogs[index].ID, "id mismatch for index %d", index) - assert.Equal(suite.t, suite.records[index], *recordLogs[index].Record, "record mismatch for index %d", index) + suite.Equal(int64(index+1), recordLogs[index].ID) + suite.Equal(suite.records[index], *recordLogs[index].Record) } // pull logs from id 3 batch_size 4 recordLogs, err = suite.Db.PullLogs(suite.collectionId1, 3, 4) - assert.NoError(suite.t, err) - assert.Len(suite.t, recordLogs, 3) + suite.NoError(err) + suite.Len(recordLogs, 3) for index := range recordLogs { - assert.Equal(suite.t, int64(index+3), recordLogs[index].ID, "id mismatch for index %d", index) - assert.Equal(suite.t, suite.records[index+2], *recordLogs[index].Record, "record mismatch for index %d", index) + suite.Equal(int64(index+3), recordLogs[index].ID) + suite.Equal(suite.records[index+2], *recordLogs[index].Record) } } func (suite *RecordLogDbTestSuite) TestRecordLogDb_GetAllCollectionsToCompact() { // push some logs count, err := suite.Db.PushLogs(suite.collectionId1, suite.records) - assert.NoError(suite.t, err) - assert.Equal(suite.t, 5, count) + suite.NoError(err) + suite.Equal(5, count) // get all collection ids to compact collectionInfos, err := suite.Db.GetAllCollectionsToCompact() - assert.NoError(suite.t, err) - assert.Len(suite.t, collectionInfos, 1) - assert.Equal(suite.t, suite.collectionId1.String(), *collectionInfos[0].CollectionID) - assert.Equal(suite.t, int64(1), collectionInfos[0].ID) + suite.NoError(err) + suite.Len(collectionInfos, 1) + suite.Equal(suite.collectionId1.String(), *collectionInfos[0].CollectionID) + suite.Equal(int64(1), collectionInfos[0].ID) // move log position testutils.MoveLogPosition(suite.db, suite.collectionId1, 2) // get all collection ids to compact collectionInfos, err = suite.Db.GetAllCollectionsToCompact() - assert.NoError(suite.t, err) - assert.Len(suite.t, collectionInfos, 1) - assert.Equal(suite.t, suite.collectionId1.String(), *collectionInfos[0].CollectionID) - assert.Equal(suite.t, int64(3), collectionInfos[0].ID) + suite.NoError(err) + suite.Len(collectionInfos, 1) + suite.Equal(suite.collectionId1.String(), *collectionInfos[0].CollectionID) + suite.Equal(int64(3), collectionInfos[0].ID) // push some logs count, err = suite.Db.PushLogs(suite.collectionId2, suite.records) - assert.NoError(suite.t, err) - assert.Equal(suite.t, 5, count) + suite.NoError(err) + suite.Equal(5, count) // get all collection ids to compact collectionInfos, err = suite.Db.GetAllCollectionsToCompact() - assert.NoError(suite.t, err) - assert.Len(suite.t, collectionInfos, 2) - assert.Equal(suite.t, suite.collectionId1.String(), *collectionInfos[0].CollectionID) - assert.Equal(suite.t, int64(3), collectionInfos[0].ID) - assert.Equal(suite.t, suite.collectionId2.String(), *collectionInfos[1].CollectionID) - assert.Equal(suite.t, int64(1), collectionInfos[1].ID) + suite.NoError(err) + suite.Len(collectionInfos, 2) + suite.Equal(suite.collectionId1.String(), *collectionInfos[0].CollectionID) + suite.Equal(int64(3), collectionInfos[0].ID) + suite.Equal(suite.collectionId2.String(), *collectionInfos[1].CollectionID) + suite.Equal(int64(1), collectionInfos[1].ID) } func TestRecordLogDbTestSuite(t *testing.T) { testSuite := new(RecordLogDbTestSuite) - testSuite.t = t suite.Run(t, testSuite) } diff --git a/go/pkg/metastore/db/dao/segment.go b/go/pkg/metastore/db/dao/segment.go index 57701aa8066..a69cd13ce6a 100644 --- a/go/pkg/metastore/db/dao/segment.go +++ b/go/pkg/metastore/db/dao/segment.go @@ -2,8 +2,10 @@ package dao import ( "database/sql" + "encoding/json" "errors" "github.com/chroma-core/chroma/go/pkg/common" + "github.com/chroma-core/chroma/go/pkg/model" "github.com/jackc/pgx/v5/pgconn" "github.com/chroma-core/chroma/go/pkg/metastore/db/dbmodel" @@ -53,7 +55,7 @@ func (s *segmentDb) GetSegments(id types.UniqueID, segmentType *string, scope *s var segments []*dbmodel.SegmentAndMetadata query := s.db.Table("segments"). - Select("segments.id, segments.collection_id, segments.type, segments.scope, segments.topic, segment_metadata.key, segment_metadata.str_value, segment_metadata.int_value, segment_metadata.float_value"). + Select("segments.id, segments.collection_id, segments.type, segments.scope, segments.topic, segments.file_paths, segment_metadata.key, segment_metadata.str_value, segment_metadata.int_value, segment_metadata.float_value"). Joins("LEFT JOIN segment_metadata ON segments.id = segment_metadata.segment_id"). Order("segments.id") @@ -86,18 +88,19 @@ func (s *segmentDb) GetSegments(id types.UniqueID, segmentType *string, scope *s for rows.Next() { var ( - segmentID string - collectionID sql.NullString - segmentType string - scope string - topic sql.NullString - key sql.NullString - strValue sql.NullString - intValue sql.NullInt64 - floatValue sql.NullFloat64 + segmentID string + collectionID sql.NullString + segmentType string + scope string + topic sql.NullString + filePathsJson string + key sql.NullString + strValue sql.NullString + intValue sql.NullInt64 + floatValue sql.NullFloat64 ) - err := rows.Scan(&segmentID, &collectionID, &segmentType, &scope, &topic, &key, &strValue, &intValue, &floatValue) + err := rows.Scan(&segmentID, &collectionID, &segmentType, &scope, &topic, &filePathsJson, &key, &strValue, &intValue, &floatValue) if err != nil { log.Error("scan segment failed", zap.Error(err)) } @@ -105,11 +108,17 @@ func (s *segmentDb) GetSegments(id types.UniqueID, segmentType *string, scope *s currentSegmentID = segmentID metadata = nil + var filePaths map[string][]string + err := json.Unmarshal([]byte(filePathsJson), &filePaths) + if err != nil { + return nil, err + } currentSegment = &dbmodel.SegmentAndMetadata{ Segment: &dbmodel.Segment{ - ID: segmentID, - Type: segmentType, - Scope: scope, + ID: segmentID, + Type: segmentType, + Scope: scope, + FilePaths: filePaths, }, SegmentMetadata: metadata, } @@ -201,3 +210,22 @@ func (s *segmentDb) Update(in *dbmodel.UpdateSegment) error { Where("collection_id = ?", &in.Collection). Where("id = ?", in.ID).Updates(updates).Error } + +func (s *segmentDb) RegisterFilePaths(flushSegmentCompactions []*model.FlushSegmentCompaction) error { + log.Info("register file paths", zap.Any("flushSegmentCompactions", flushSegmentCompactions)) + for _, flushSegmentCompaction := range flushSegmentCompactions { + filePaths, err := json.Marshal(flushSegmentCompaction.FilePaths) + if err != nil { + log.Error("marshal file paths failed", zap.Error(err)) + return err + } + err = s.db.Model(&dbmodel.Segment{}). + Where("id = ?", flushSegmentCompaction.ID). + Update("file_paths", filePaths).Error + if err != nil { + log.Error("register file path failed", zap.Error(err)) + return err + } + } + return nil +} diff --git a/go/pkg/metastore/db/dao/segment_test.go b/go/pkg/metastore/db/dao/segment_test.go index 3eb527b1da7..7712ccf0bed 100644 --- a/go/pkg/metastore/db/dao/segment_test.go +++ b/go/pkg/metastore/db/dao/segment_test.go @@ -1,25 +1,37 @@ package dao import ( + "github.com/chroma-core/chroma/go/pkg/metastore/db/dbcore" + "github.com/chroma-core/chroma/go/pkg/model" + "github.com/pingcap/log" + "github.com/stretchr/testify/suite" + "k8s.io/apimachinery/pkg/util/rand" + "strconv" "testing" "github.com/chroma-core/chroma/go/pkg/metastore/db/dbmodel" "github.com/chroma-core/chroma/go/pkg/types" - "github.com/stretchr/testify/assert" - "gorm.io/driver/sqlite" "gorm.io/gorm" ) -func TestSegmentDb_GetSegments(t *testing.T) { - db, err := gorm.Open(sqlite.Open(":memory:"), &gorm.Config{}) - assert.NoError(t, err) +type SegmentDbTestSuite struct { + suite.Suite + db *gorm.DB + segmentDb *segmentDb +} - err = db.AutoMigrate(&dbmodel.Segment{}, &dbmodel.SegmentMetadata{}) - assert.NoError(t, err) +func (suite *SegmentDbTestSuite) SetupSuite() { + log.Info("setup suite") + suite.db = dbcore.ConfigDatabaseForTesting() + suite.segmentDb = &segmentDb{ + db: suite.db, + } +} +func (suite *SegmentDbTestSuite) TestSegmentDb_GetSegments() { uniqueID := types.NewUniqueID() collectionID := uniqueID.String() - testTopic := "test_topic" + testTopic := "test_segment_topic" segment := &dbmodel.Segment{ ID: uniqueID.String(), CollectionID: &collectionID, @@ -27,8 +39,8 @@ func TestSegmentDb_GetSegments(t *testing.T) { Scope: "test_scope", Topic: &testTopic, } - err = db.Create(segment).Error - assert.NoError(t, err) + err := suite.db.Create(segment).Error + suite.NoError(err) testKey := "test" testValue := "test" @@ -37,53 +49,110 @@ func TestSegmentDb_GetSegments(t *testing.T) { Key: &testKey, StrValue: &testValue, } - err = db.Create(metadata).Error - assert.NoError(t, err) - - segmentDb := &segmentDb{ - db: db, - } + err = suite.db.Create(metadata).Error + suite.NoError(err) // Test when all parameters are nil - segments, err := segmentDb.GetSegments(types.NilUniqueID(), nil, nil, nil, types.NilUniqueID()) - assert.NoError(t, err) - assert.Len(t, segments, 1) - assert.Equal(t, segment.ID, segments[0].Segment.ID) - assert.Equal(t, segment.CollectionID, segments[0].Segment.CollectionID) - assert.Equal(t, segment.Type, segments[0].Segment.Type) - assert.Equal(t, segment.Scope, segments[0].Segment.Scope) - assert.Equal(t, segment.Topic, segments[0].Segment.Topic) - assert.Len(t, segments[0].SegmentMetadata, 1) - assert.Equal(t, metadata.Key, segments[0].SegmentMetadata[0].Key) - assert.Equal(t, metadata.StrValue, segments[0].SegmentMetadata[0].StrValue) + segments, err := suite.segmentDb.GetSegments(types.NilUniqueID(), nil, nil, nil, types.NilUniqueID()) + suite.NoError(err) + suite.Len(segments, 1) + suite.Equal(segment.ID, segments[0].Segment.ID) + suite.Equal(segment.CollectionID, segments[0].Segment.CollectionID) + suite.Equal(segment.Type, segments[0].Segment.Type) + suite.Equal(segment.Scope, segments[0].Segment.Scope) + suite.Equal(segment.Topic, segments[0].Segment.Topic) + suite.Len(segments[0].SegmentMetadata, 1) + suite.Equal(metadata.Key, segments[0].SegmentMetadata[0].Key) + suite.Equal(metadata.StrValue, segments[0].SegmentMetadata[0].StrValue) // Test when filtering by ID - segments, err = segmentDb.GetSegments(types.MustParse(segment.ID), nil, nil, nil, types.NilUniqueID()) - assert.NoError(t, err) - assert.Len(t, segments, 1) - assert.Equal(t, segment.ID, segments[0].Segment.ID) + segments, err = suite.segmentDb.GetSegments(types.MustParse(segment.ID), nil, nil, nil, types.NilUniqueID()) + suite.NoError(err) + suite.Len(segments, 1) + suite.Equal(segment.ID, segments[0].Segment.ID) // Test when filtering by type - segments, err = segmentDb.GetSegments(types.NilUniqueID(), &segment.Type, nil, nil, types.NilUniqueID()) - assert.NoError(t, err) - assert.Len(t, segments, 1) - assert.Equal(t, segment.ID, segments[0].Segment.ID) + segments, err = suite.segmentDb.GetSegments(types.NilUniqueID(), &segment.Type, nil, nil, types.NilUniqueID()) + suite.NoError(err) + suite.Len(segments, 1) + suite.Equal(segment.ID, segments[0].Segment.ID) // Test when filtering by scope - segments, err = segmentDb.GetSegments(types.NilUniqueID(), nil, &segment.Scope, nil, types.NilUniqueID()) - assert.NoError(t, err) - assert.Len(t, segments, 1) - assert.Equal(t, segment.ID, segments[0].Segment.ID) + segments, err = suite.segmentDb.GetSegments(types.NilUniqueID(), nil, &segment.Scope, nil, types.NilUniqueID()) + suite.NoError(err) + suite.Len(segments, 1) + suite.Equal(segment.ID, segments[0].Segment.ID) // Test when filtering by topic - segments, err = segmentDb.GetSegments(types.NilUniqueID(), nil, nil, segment.Topic, types.NilUniqueID()) - assert.NoError(t, err) - assert.Len(t, segments, 1) - assert.Equal(t, segment.ID, segments[0].Segment.ID) + segments, err = suite.segmentDb.GetSegments(types.NilUniqueID(), nil, nil, segment.Topic, types.NilUniqueID()) + suite.NoError(err) + suite.Len(segments, 1) + suite.Equal(segment.ID, segments[0].Segment.ID) // Test when filtering by collection ID - segments, err = segmentDb.GetSegments(types.NilUniqueID(), nil, nil, nil, types.MustParse(*segment.CollectionID)) - assert.NoError(t, err) - assert.Len(t, segments, 1) - assert.Equal(t, segment.ID, segments[0].Segment.ID) + segments, err = suite.segmentDb.GetSegments(types.NilUniqueID(), nil, nil, nil, types.MustParse(*segment.CollectionID)) + suite.NoError(err) + suite.Len(segments, 1) + suite.Equal(segment.ID, segments[0].Segment.ID) + + // clean up + err = suite.db.Delete(segment).Error + suite.NoError(err) + err = suite.db.Delete(metadata).Error + suite.NoError(err) +} + +func (suite *SegmentDbTestSuite) TestSegmentDb_RegisterFilePath() { + // create a collection for testing + databaseId := types.NewUniqueID().String() + collectionName := "test_segment_register_file_paths" + collectionID, err := CreateTestCollection(suite.db, collectionName, "test_topic", 128, databaseId) + suite.NoError(err) + + segments, err := suite.segmentDb.GetSegments(types.NilUniqueID(), nil, nil, nil, types.MustParse(collectionID)) + suite.NoError(err) + + // create entries to flush + segmentsFilePaths := make(map[string]map[string][]string) + flushSegmentCompactions := make([]*model.FlushSegmentCompaction, 0) + testFilePathTypes := []string{"TypeA", "TypeB", "TypeC", "TypeD"} + for _, segment := range segments { + segmentID := segment.Segment.ID + segmentsFilePaths[segmentID] = make(map[string][]string) + for i := 0; i < rand.Intn(len(testFilePathTypes)); i++ { + filePaths := make([]string, 0) + for j := 0; j < rand.Intn(5); j++ { + filePaths = append(filePaths, "test_file_path_"+strconv.Itoa(j+1)) + } + filePathTypeI := rand.Intn(len(testFilePathTypes)) + filePathType := testFilePathTypes[filePathTypeI] + segmentsFilePaths[segmentID][filePathType] = filePaths + } + flushSegmentCompaction := &model.FlushSegmentCompaction{ + ID: types.MustParse(segmentID), + FilePaths: segmentsFilePaths[segmentID], + } + flushSegmentCompactions = append(flushSegmentCompactions, flushSegmentCompaction) + } + + // flush the entries + err = suite.segmentDb.RegisterFilePaths(flushSegmentCompactions) + suite.NoError(err) + + // verify file paths registered + segments, err = suite.segmentDb.GetSegments(types.NilUniqueID(), nil, nil, nil, types.MustParse(collectionID)) + suite.NoError(err) + for _, segment := range segments { + suite.Contains(segmentsFilePaths, segment.Segment.ID) + suite.Equal(segmentsFilePaths[segment.Segment.ID], segment.Segment.FilePaths) + } + + // clean up + err = CleanUpTestCollection(suite.db, collectionID) + suite.NoError(err) +} + +func TestSegmentDbTestSuiteSuite(t *testing.T) { + testSuite := new(SegmentDbTestSuite) + suite.Run(t, testSuite) } diff --git a/go/pkg/metastore/db/dao/tenant.go b/go/pkg/metastore/db/dao/tenant.go index adc79c06dfa..fcd73f2cdcb 100644 --- a/go/pkg/metastore/db/dao/tenant.go +++ b/go/pkg/metastore/db/dao/tenant.go @@ -21,6 +21,12 @@ func (s *tenantDb) DeleteAll() error { return s.db.Where("1 = 1").Delete(&dbmodel.Tenant{}).Error } +func (s *tenantDb) DeleteByID(tenantID string) (int, error) { + var tenants []dbmodel.Tenant + err := s.db.Clauses(clause.Returning{}).Where("id = ?", tenantID).Delete(&tenants).Error + return len(tenants), err +} + func (s *tenantDb) GetAllTenants() ([]*dbmodel.Tenant, error) { var tenants []*dbmodel.Tenant @@ -61,6 +67,7 @@ func (s *tenantDb) Insert(tenant *dbmodel.Tenant) error { } func (s *tenantDb) UpdateTenantLastCompactionTime(tenantID string, lastCompactionTime int64) error { + log.Info("UpdateTenantLastCompactionTime", zap.String("tenantID", tenantID), zap.Int64("lastCompactionTime", lastCompactionTime)) var tenants []dbmodel.Tenant result := s.db.Model(&tenants). Clauses(clause.Returning{Columns: []clause.Column{{Name: "id"}}}). @@ -78,6 +85,7 @@ func (s *tenantDb) UpdateTenantLastCompactionTime(tenantID string, lastCompactio } func (s *tenantDb) GetTenantsLastCompactionTime(tenantIDs []string) ([]*dbmodel.Tenant, error) { + log.Info("GetTenantsLastCompactionTime", zap.Any("tenantIDs", tenantIDs)) var tenants []*dbmodel.Tenant result := s.db.Select("id", "last_compaction_time").Find(&tenants, "id IN ?", tenantIDs) diff --git a/go/pkg/metastore/db/dao/tenant_test.go b/go/pkg/metastore/db/dao/tenant_test.go index 5f4e658928a..7bb613ae7df 100644 --- a/go/pkg/metastore/db/dao/tenant_test.go +++ b/go/pkg/metastore/db/dao/tenant_test.go @@ -21,7 +21,6 @@ type TenantDbTestSuite struct { func (suite *TenantDbTestSuite) SetupSuite() { log.Info("setup suite") suite.db = dbcore.ConfigDatabaseForTesting() - dbcore.ResetTestTables(suite.db) suite.Db = &tenantDb{ db: suite.db, } @@ -38,14 +37,15 @@ func (suite *TenantDbTestSuite) TearDownTest() { func (suite *TenantDbTestSuite) TestTenantDb_UpdateTenantLastCompactionTime() { tenantId := "testUpdateTenantLastCompactionTime" var tenant dbmodel.Tenant - suite.Db.Insert(&dbmodel.Tenant{ + err := suite.Db.Insert(&dbmodel.Tenant{ ID: tenantId, LastCompactionTime: 0, }) + suite.Require().NoError(err) suite.db.First(&tenant, "id = ?", tenantId) suite.Require().Equal(int64(0), tenant.LastCompactionTime) - err := suite.Db.UpdateTenantLastCompactionTime(tenantId, 1) + err = suite.Db.UpdateTenantLastCompactionTime(tenantId, 1) suite.Require().NoError(err) suite.db.First(&tenant, "id = ?", tenantId) suite.Require().Equal(int64(1), tenant.LastCompactionTime) @@ -63,10 +63,11 @@ func (suite *TenantDbTestSuite) TestTenantDb_GetTenantsLastCompactionTime() { tenantIds := make([]string, 0) for i := 0; i < 10; i++ { tenantId := "testGetTenantsLastCompactionTime" + strconv.Itoa(i) - suite.Db.Insert(&dbmodel.Tenant{ + err := suite.Db.Insert(&dbmodel.Tenant{ ID: tenantId, LastCompactionTime: int64(i), }) + suite.Require().NoError(err) tenantIds = append(tenantIds, tenantId) } diff --git a/go/pkg/metastore/db/dao/test_utils.go b/go/pkg/metastore/db/dao/test_utils.go new file mode 100644 index 00000000000..6ae3293d1c1 --- /dev/null +++ b/go/pkg/metastore/db/dao/test_utils.go @@ -0,0 +1,184 @@ +package dao + +import ( + "github.com/chroma-core/chroma/go/pkg/metastore/db/dbmodel" + "github.com/chroma-core/chroma/go/pkg/types" + "github.com/pingcap/log" + "go.uber.org/zap" + "gorm.io/gorm" + "time" +) + +const SegmentType = "urn:chroma:segment/vector/hnsw-distributed" + +func GetSegmentScopes() []string { + return []string{"VECTOR", "METADATA"} +} + +func CreateTestTenantAndDatabase(db *gorm.DB, tenant string, database string) (string, error) { + log.Info("create test tenant and database", zap.String("tenant", tenant), zap.String("database", database)) + tenantDb := &tenantDb{ + db: db, + } + databaseDb := &databaseDb{ + db: db, + } + + err := tenantDb.Insert(&dbmodel.Tenant{ + ID: tenant, + LastCompactionTime: time.Now().Unix(), + }) + if err != nil { + return "", err + } + + databaseId := types.NewUniqueID().String() + err = databaseDb.Insert(&dbmodel.Database{ + ID: databaseId, + Name: database, + TenantID: tenant, + }) + if err != nil { + return "", err + } + + return databaseId, nil +} + +func CleanUpTestDatabase(db *gorm.DB, tenantName string, databaseName string) error { + log.Info("clean up test database", zap.String("tenantName", tenantName), zap.String("databaseName", databaseName)) + // clean up collections + collectionDb := &collectionDb{ + db: db, + } + collections, err := collectionDb.GetCollections(nil, nil, nil, tenantName, databaseName) + log.Info("clean up test database", zap.Int("collections", len(collections))) + if err != nil { + return err + } + for _, collection := range collections { + err = CleanUpTestCollection(db, collection.Collection.ID) + if err != nil { + return err + } + } + + // clean up database + databaseDb := &databaseDb{ + db: db, + } + + _, err = databaseDb.DeleteByTenantIdAndName(tenantName, databaseName) + if err != nil { + return err + } + + return nil +} + +func CleanUpTestTenant(db *gorm.DB, tenantName string) error { + log.Info("clean up test tenant", zap.String("tenantName", tenantName)) + tenantDb := &tenantDb{ + db: db, + } + databaseDb := &databaseDb{ + db: db, + } + + // clean up databases + databases, err := databaseDb.GetDatabasesByTenantID(tenantName) + if err != nil { + return err + } + for _, database := range databases { + err = CleanUpTestDatabase(db, tenantName, database.Name) + if err != nil { + return err + } + } + + // clean up tenant + _, err = tenantDb.DeleteByID(tenantName) + if err != nil { + return err + } + return nil +} + +func CreateTestCollection(db *gorm.DB, collectionName string, topic string, dimension int32, databaseID string) (string, error) { + log.Info("create test collection", zap.String("collectionName", collectionName), zap.String("topic", topic), zap.Int32("dimension", dimension), zap.String("databaseID", databaseID)) + collectionDb := &collectionDb{ + db: db, + } + segmentDb := &segmentDb{ + db: db, + } + collectionId := types.NewUniqueID().String() + + err := collectionDb.Insert(&dbmodel.Collection{ + ID: collectionId, + Name: &collectionName, + Topic: &topic, + Dimension: &dimension, + DatabaseID: databaseID, + }) + if err != nil { + return "", err + } + + for _, scope := range GetSegmentScopes() { + segmentId := types.NewUniqueID().String() + err = segmentDb.Insert(&dbmodel.Segment{ + CollectionID: &collectionId, + ID: segmentId, + Type: SegmentType, + Scope: scope, + }) + if err != nil { + return "", err + } + } + + return collectionId, nil +} + +func CleanUpTestCollection(db *gorm.DB, collectionId string) error { + log.Info("clean up collection", zap.String("collectionId", collectionId)) + collectionDb := &collectionDb{ + db: db, + } + collectionMetadataDb := &collectionMetadataDb{ + db: db, + } + segmentDb := &segmentDb{ + db: db, + } + segmentMetadataDb := &segmentMetadataDb{ + db: db, + } + + _, err := collectionMetadataDb.DeleteByCollectionID(collectionId) + if err != nil { + return err + } + _, err = collectionDb.DeleteCollectionByID(collectionId) + if err != nil { + return err + } + segments, err := segmentDb.GetSegments(types.NilUniqueID(), nil, nil, nil, types.MustParse(collectionId)) + if err != nil { + return err + } + for _, segment := range segments { + err = segmentDb.DeleteSegmentByID(segment.Segment.ID) + if err != nil { + return err + } + err = segmentMetadataDb.DeleteBySegmentID(segment.Segment.ID) + if err != nil { + return err + } + } + + return nil +} diff --git a/go/pkg/metastore/db/dbcore/core.go b/go/pkg/metastore/db/dbcore/core.go index 215b3375725..83b47338ae7 100644 --- a/go/pkg/metastore/db/dbcore/core.go +++ b/go/pkg/metastore/db/dbcore/core.go @@ -118,28 +118,69 @@ func GetDB(ctx context.Context) *gorm.DB { return globalDB.WithContext(ctx) } -func ResetTestTables(db *gorm.DB) { - db.Exec("TRUNCATE TABLE tenants, databases, collection_metadata, collections, segment_metadata, segments, notifications") - CreateDefaultTenantAndDatabase(db) -} - func CreateDefaultTenantAndDatabase(db *gorm.DB) string { - db.Model(&dbmodel.Tenant{}).Create(&dbmodel.Tenant{ + defaultTenant := &dbmodel.Tenant{ ID: common.DefaultTenant, LastCompactionTime: time.Now().Unix(), - }) - databaseId := types.NilUniqueID().String() - db.Model(&dbmodel.Database{}).Create(&dbmodel.Database{ - ID: databaseId, - Name: common.DefaultDatabase, - TenantID: common.DefaultTenant, - }) - return databaseId + } + db.Model(&dbmodel.Tenant{}).Where("id = ?", common.DefaultTenant).Save(defaultTenant) + + var database []dbmodel.Database + databaseId := types.NewUniqueID().String() + result := db.Model(&dbmodel.Database{}). + Where("name = ?", common.DefaultDatabase). + Where("tenant_id = ?", common.DefaultTenant). + Find(&database) + if result.Error != nil { + return "" + } + + if result.RowsAffected == 0 { + db.Create(&dbmodel.Database{ + ID: databaseId, + Name: common.DefaultDatabase, + TenantID: common.DefaultTenant, + }) + return databaseId + } + + err := result.Row().Scan(&database) + if err != nil { + return "" + } + return database[0].ID } func CreateTestTables(db *gorm.DB) { log.Info("CreateTestTables") - db.AutoMigrate(&dbmodel.Tenant{}, &dbmodel.Database{}, &dbmodel.CollectionMetadata{}, &dbmodel.Collection{}, &dbmodel.SegmentMetadata{}, &dbmodel.Segment{}, &dbmodel.Notification{}) + tableExist := db.Migrator().HasTable(&dbmodel.Tenant{}) + if !tableExist { + db.Migrator().CreateTable(&dbmodel.Tenant{}) + } + tableExist = db.Migrator().HasTable(&dbmodel.Database{}) + if !tableExist { + db.Migrator().CreateTable(&dbmodel.Database{}) + } + tableExist = db.Migrator().HasTable(&dbmodel.CollectionMetadata{}) + if !tableExist { + db.Migrator().CreateTable(&dbmodel.CollectionMetadata{}) + } + tableExist = db.Migrator().HasTable(&dbmodel.Collection{}) + if !tableExist { + db.Migrator().CreateTable(&dbmodel.Collection{}) + } + tableExist = db.Migrator().HasTable(&dbmodel.SegmentMetadata{}) + if !tableExist { + db.Migrator().CreateTable(&dbmodel.SegmentMetadata{}) + } + tableExist = db.Migrator().HasTable(&dbmodel.Segment{}) + if !tableExist { + db.Migrator().CreateTable(&dbmodel.Segment{}) + } + tableExist = db.Migrator().HasTable(&dbmodel.Notification{}) + if !tableExist { + db.Migrator().CreateTable(&dbmodel.Notification{}) + } // create default tenant and database CreateDefaultTenantAndDatabase(db) diff --git a/go/pkg/metastore/db/dbmodel/collection.go b/go/pkg/metastore/db/dbmodel/collection.go index 4c6af65483c..30a9ab945ac 100644 --- a/go/pkg/metastore/db/dbmodel/collection.go +++ b/go/pkg/metastore/db/dbmodel/collection.go @@ -17,6 +17,7 @@ type Collection struct { CreatedAt time.Time `gorm:"created_at;type:timestamp;not null;default:current_timestamp"` UpdatedAt time.Time `gorm:"updated_at;type:timestamp;not null;default:current_timestamp"` LogPosition int64 `gorm:"log_position;default:0"` + Version int32 `gorm:"version;default:0"` } func (v Collection) TableName() string { @@ -37,4 +38,5 @@ type ICollectionDb interface { Insert(in *Collection) error Update(in *Collection) error DeleteAll() error + UpdateLogPositionAndVersion(collectionID string, logPosition int64, currentCollectionVersion int32) (int32, error) } diff --git a/go/pkg/metastore/db/dbmodel/mocks/ICollectionDb.go b/go/pkg/metastore/db/dbmodel/mocks/ICollectionDb.go index 1a07397926c..b819b0b1889 100644 --- a/go/pkg/metastore/db/dbmodel/mocks/ICollectionDb.go +++ b/go/pkg/metastore/db/dbmodel/mocks/ICollectionDb.go @@ -124,6 +124,34 @@ func (_m *ICollectionDb) Update(in *dbmodel.Collection) error { return r0 } +// UpdateLogPositionAndVersion provides a mock function with given fields: collectionID, logPosition, currentCollectionVersion +func (_m *ICollectionDb) UpdateLogPositionAndVersion(collectionID string, logPosition int64, currentCollectionVersion int32) (int32, error) { + ret := _m.Called(collectionID, logPosition, currentCollectionVersion) + + if len(ret) == 0 { + panic("no return value specified for UpdateLogPositionAndVersion") + } + + var r0 int32 + var r1 error + if rf, ok := ret.Get(0).(func(string, int64, int32) (int32, error)); ok { + return rf(collectionID, logPosition, currentCollectionVersion) + } + if rf, ok := ret.Get(0).(func(string, int64, int32) int32); ok { + r0 = rf(collectionID, logPosition, currentCollectionVersion) + } else { + r0 = ret.Get(0).(int32) + } + + if rf, ok := ret.Get(1).(func(string, int64, int32) error); ok { + r1 = rf(collectionID, logPosition, currentCollectionVersion) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + // NewICollectionDb creates a new instance of ICollectionDb. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. // The first argument is typically a *testing.T value. func NewICollectionDb(t interface { diff --git a/go/pkg/metastore/db/dbmodel/segment.go b/go/pkg/metastore/db/dbmodel/segment.go index 0285f32791a..14eaf19ca4c 100644 --- a/go/pkg/metastore/db/dbmodel/segment.go +++ b/go/pkg/metastore/db/dbmodel/segment.go @@ -1,6 +1,7 @@ package dbmodel import ( + "github.com/chroma-core/chroma/go/pkg/model" "time" "github.com/chroma-core/chroma/go/pkg/types" @@ -11,15 +12,16 @@ type Segment struct { This requires us to push down CollectionID from the caller. We don't think there is need to modify CollectionID in the near future. Each Segment should always have a collection as a parent and cannot be modified. */ - CollectionID *string `gorm:"collection_id;primaryKey"` - ID string `gorm:"id;primaryKey"` - Type string `gorm:"type;type:string;not null"` - Scope string `gorm:"scope"` - Topic *string `gorm:"topic"` - Ts types.Timestamp `gorm:"ts;type:bigint;default:0"` - IsDeleted bool `gorm:"is_deleted;type:bool;default:false"` - CreatedAt time.Time `gorm:"created_at;type:timestamp;not null;default:current_timestamp"` - UpdatedAt time.Time `gorm:"updated_at;type:timestamp;not null;default:current_timestamp"` + CollectionID *string `gorm:"collection_id;primaryKey"` + ID string `gorm:"id;primaryKey"` + Type string `gorm:"type;type:string;not null"` + Scope string `gorm:"scope"` + Topic *string `gorm:"topic"` + Ts types.Timestamp `gorm:"ts;type:bigint;default:0"` + IsDeleted bool `gorm:"is_deleted;type:bool;default:false"` + CreatedAt time.Time `gorm:"created_at;type:timestamp;not null;default:current_timestamp"` + UpdatedAt time.Time `gorm:"updated_at;type:timestamp;not null;default:current_timestamp"` + FilePaths map[string][]string `gorm:"file_paths;serializer:json;default:'{}'"` } func (s Segment) TableName() string { @@ -46,4 +48,5 @@ type ISegmentDb interface { Insert(*Segment) error Update(*UpdateSegment) error DeleteAll() error + RegisterFilePaths(flushSegmentCompactions []*model.FlushSegmentCompaction) error } diff --git a/go/pkg/model/collection.go b/go/pkg/model/collection.go index 240d81fa8a2..1340c44df5b 100644 --- a/go/pkg/model/collection.go +++ b/go/pkg/model/collection.go @@ -13,6 +13,8 @@ type Collection struct { TenantID string DatabaseName string Ts types.Timestamp + LogPosition int64 + Version int32 } type CreateCollection struct { @@ -46,6 +48,20 @@ type UpdateCollection struct { Ts types.Timestamp } +type FlushCollectionCompaction struct { + ID types.UniqueID + TenantID string + LogPosition int64 + CurrentCollectionVersion int32 + FlushSegmentCompactions []*FlushSegmentCompaction +} + +type FlushCollectionInfo struct { + ID string + CollectionVersion int32 + TenantLastCompactionTime int64 +} + func FilterCollection(collection *Collection, collectionID types.UniqueID, collectionName *string, collectionTopic *string) bool { if collectionID != types.NilUniqueID() && collectionID != collection.ID { return false diff --git a/go/pkg/model/segment.go b/go/pkg/model/segment.go index 3127f515aaa..07030e77c91 100644 --- a/go/pkg/model/segment.go +++ b/go/pkg/model/segment.go @@ -12,6 +12,7 @@ type Segment struct { CollectionID types.UniqueID Metadata *SegmentMetadata[SegmentMetadataValueType] Ts types.Timestamp + FilePaths map[string][]string } type CreateSegment struct { @@ -43,6 +44,11 @@ type GetSegments struct { CollectionID types.UniqueID } +type FlushSegmentCompaction struct { + ID types.UniqueID + FilePaths map[string][]string +} + func FilterSegments(segment *Segment, segmentID types.UniqueID, segmentType *string, scope *string, topic *string, collectionID types.UniqueID) bool { if segmentID != types.NilUniqueID() && segment.ID != segmentID { return false diff --git a/go/pkg/proto/coordinatorpb/chroma.pb.go b/go/pkg/proto/coordinatorpb/chroma.pb.go index 49b077e803a..208d297e1c3 100644 --- a/go/pkg/proto/coordinatorpb/chroma.pb.go +++ b/go/pkg/proto/coordinatorpb/chroma.pb.go @@ -282,6 +282,53 @@ func (x *Vector) GetEncoding() ScalarEncoding { return ScalarEncoding_FLOAT32 } +type FilePaths struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Paths []string `protobuf:"bytes,1,rep,name=paths,proto3" json:"paths,omitempty"` +} + +func (x *FilePaths) Reset() { + *x = FilePaths{} + if protoimpl.UnsafeEnabled { + mi := &file_chromadb_proto_chroma_proto_msgTypes[2] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *FilePaths) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*FilePaths) ProtoMessage() {} + +func (x *FilePaths) ProtoReflect() protoreflect.Message { + mi := &file_chromadb_proto_chroma_proto_msgTypes[2] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use FilePaths.ProtoReflect.Descriptor instead. +func (*FilePaths) Descriptor() ([]byte, []int) { + return file_chromadb_proto_chroma_proto_rawDescGZIP(), []int{2} +} + +func (x *FilePaths) GetPaths() []string { + if x != nil { + return x.Paths + } + return nil +} + type Segment struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache @@ -293,14 +340,15 @@ type Segment struct { Topic *string `protobuf:"bytes,4,opt,name=topic,proto3,oneof" json:"topic,omitempty"` // TODO should channel <> segment binding exist here? // If a segment has a collection, it implies that this segment implements the full // collection and can be used to service queries (for it's given scope.) - Collection *string `protobuf:"bytes,5,opt,name=collection,proto3,oneof" json:"collection,omitempty"` - Metadata *UpdateMetadata `protobuf:"bytes,6,opt,name=metadata,proto3,oneof" json:"metadata,omitempty"` + Collection *string `protobuf:"bytes,5,opt,name=collection,proto3,oneof" json:"collection,omitempty"` + Metadata *UpdateMetadata `protobuf:"bytes,6,opt,name=metadata,proto3,oneof" json:"metadata,omitempty"` + FilePaths map[string]*FilePaths `protobuf:"bytes,7,rep,name=file_paths,json=filePaths,proto3" json:"file_paths,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` } func (x *Segment) Reset() { *x = Segment{} if protoimpl.UnsafeEnabled { - mi := &file_chromadb_proto_chroma_proto_msgTypes[2] + mi := &file_chromadb_proto_chroma_proto_msgTypes[3] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -313,7 +361,7 @@ func (x *Segment) String() string { func (*Segment) ProtoMessage() {} func (x *Segment) ProtoReflect() protoreflect.Message { - mi := &file_chromadb_proto_chroma_proto_msgTypes[2] + mi := &file_chromadb_proto_chroma_proto_msgTypes[3] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -326,7 +374,7 @@ func (x *Segment) ProtoReflect() protoreflect.Message { // Deprecated: Use Segment.ProtoReflect.Descriptor instead. func (*Segment) Descriptor() ([]byte, []int) { - return file_chromadb_proto_chroma_proto_rawDescGZIP(), []int{2} + return file_chromadb_proto_chroma_proto_rawDescGZIP(), []int{3} } func (x *Segment) GetId() string { @@ -371,24 +419,33 @@ func (x *Segment) GetMetadata() *UpdateMetadata { return nil } +func (x *Segment) GetFilePaths() map[string]*FilePaths { + if x != nil { + return x.FilePaths + } + return nil +} + type Collection struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields - Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"` - Name string `protobuf:"bytes,2,opt,name=name,proto3" json:"name,omitempty"` - Topic string `protobuf:"bytes,3,opt,name=topic,proto3" json:"topic,omitempty"` - Metadata *UpdateMetadata `protobuf:"bytes,4,opt,name=metadata,proto3,oneof" json:"metadata,omitempty"` - Dimension *int32 `protobuf:"varint,5,opt,name=dimension,proto3,oneof" json:"dimension,omitempty"` - Tenant string `protobuf:"bytes,6,opt,name=tenant,proto3" json:"tenant,omitempty"` - Database string `protobuf:"bytes,7,opt,name=database,proto3" json:"database,omitempty"` + Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"` + Name string `protobuf:"bytes,2,opt,name=name,proto3" json:"name,omitempty"` + Topic string `protobuf:"bytes,3,opt,name=topic,proto3" json:"topic,omitempty"` + Metadata *UpdateMetadata `protobuf:"bytes,4,opt,name=metadata,proto3,oneof" json:"metadata,omitempty"` + Dimension *int32 `protobuf:"varint,5,opt,name=dimension,proto3,oneof" json:"dimension,omitempty"` + Tenant string `protobuf:"bytes,6,opt,name=tenant,proto3" json:"tenant,omitempty"` + Database string `protobuf:"bytes,7,opt,name=database,proto3" json:"database,omitempty"` + LogPosition int64 `protobuf:"varint,8,opt,name=logPosition,proto3" json:"logPosition,omitempty"` + Version int32 `protobuf:"varint,9,opt,name=version,proto3" json:"version,omitempty"` } func (x *Collection) Reset() { *x = Collection{} if protoimpl.UnsafeEnabled { - mi := &file_chromadb_proto_chroma_proto_msgTypes[3] + mi := &file_chromadb_proto_chroma_proto_msgTypes[4] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -401,7 +458,7 @@ func (x *Collection) String() string { func (*Collection) ProtoMessage() {} func (x *Collection) ProtoReflect() protoreflect.Message { - mi := &file_chromadb_proto_chroma_proto_msgTypes[3] + mi := &file_chromadb_proto_chroma_proto_msgTypes[4] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -414,7 +471,7 @@ func (x *Collection) ProtoReflect() protoreflect.Message { // Deprecated: Use Collection.ProtoReflect.Descriptor instead. func (*Collection) Descriptor() ([]byte, []int) { - return file_chromadb_proto_chroma_proto_rawDescGZIP(), []int{3} + return file_chromadb_proto_chroma_proto_rawDescGZIP(), []int{4} } func (x *Collection) GetId() string { @@ -466,6 +523,20 @@ func (x *Collection) GetDatabase() string { return "" } +func (x *Collection) GetLogPosition() int64 { + if x != nil { + return x.LogPosition + } + return 0 +} + +func (x *Collection) GetVersion() int32 { + if x != nil { + return x.Version + } + return 0 +} + type Database struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache @@ -479,7 +550,7 @@ type Database struct { func (x *Database) Reset() { *x = Database{} if protoimpl.UnsafeEnabled { - mi := &file_chromadb_proto_chroma_proto_msgTypes[4] + mi := &file_chromadb_proto_chroma_proto_msgTypes[5] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -492,7 +563,7 @@ func (x *Database) String() string { func (*Database) ProtoMessage() {} func (x *Database) ProtoReflect() protoreflect.Message { - mi := &file_chromadb_proto_chroma_proto_msgTypes[4] + mi := &file_chromadb_proto_chroma_proto_msgTypes[5] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -505,7 +576,7 @@ func (x *Database) ProtoReflect() protoreflect.Message { // Deprecated: Use Database.ProtoReflect.Descriptor instead. func (*Database) Descriptor() ([]byte, []int) { - return file_chromadb_proto_chroma_proto_rawDescGZIP(), []int{4} + return file_chromadb_proto_chroma_proto_rawDescGZIP(), []int{5} } func (x *Database) GetId() string { @@ -540,7 +611,7 @@ type Tenant struct { func (x *Tenant) Reset() { *x = Tenant{} if protoimpl.UnsafeEnabled { - mi := &file_chromadb_proto_chroma_proto_msgTypes[5] + mi := &file_chromadb_proto_chroma_proto_msgTypes[6] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -553,7 +624,7 @@ func (x *Tenant) String() string { func (*Tenant) ProtoMessage() {} func (x *Tenant) ProtoReflect() protoreflect.Message { - mi := &file_chromadb_proto_chroma_proto_msgTypes[5] + mi := &file_chromadb_proto_chroma_proto_msgTypes[6] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -566,7 +637,7 @@ func (x *Tenant) ProtoReflect() protoreflect.Message { // Deprecated: Use Tenant.ProtoReflect.Descriptor instead. func (*Tenant) Descriptor() ([]byte, []int) { - return file_chromadb_proto_chroma_proto_rawDescGZIP(), []int{5} + return file_chromadb_proto_chroma_proto_rawDescGZIP(), []int{6} } func (x *Tenant) GetName() string { @@ -592,7 +663,7 @@ type UpdateMetadataValue struct { func (x *UpdateMetadataValue) Reset() { *x = UpdateMetadataValue{} if protoimpl.UnsafeEnabled { - mi := &file_chromadb_proto_chroma_proto_msgTypes[6] + mi := &file_chromadb_proto_chroma_proto_msgTypes[7] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -605,7 +676,7 @@ func (x *UpdateMetadataValue) String() string { func (*UpdateMetadataValue) ProtoMessage() {} func (x *UpdateMetadataValue) ProtoReflect() protoreflect.Message { - mi := &file_chromadb_proto_chroma_proto_msgTypes[6] + mi := &file_chromadb_proto_chroma_proto_msgTypes[7] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -618,7 +689,7 @@ func (x *UpdateMetadataValue) ProtoReflect() protoreflect.Message { // Deprecated: Use UpdateMetadataValue.ProtoReflect.Descriptor instead. func (*UpdateMetadataValue) Descriptor() ([]byte, []int) { - return file_chromadb_proto_chroma_proto_rawDescGZIP(), []int{6} + return file_chromadb_proto_chroma_proto_rawDescGZIP(), []int{7} } func (m *UpdateMetadataValue) GetValue() isUpdateMetadataValue_Value { @@ -682,7 +753,7 @@ type UpdateMetadata struct { func (x *UpdateMetadata) Reset() { *x = UpdateMetadata{} if protoimpl.UnsafeEnabled { - mi := &file_chromadb_proto_chroma_proto_msgTypes[7] + mi := &file_chromadb_proto_chroma_proto_msgTypes[8] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -695,7 +766,7 @@ func (x *UpdateMetadata) String() string { func (*UpdateMetadata) ProtoMessage() {} func (x *UpdateMetadata) ProtoReflect() protoreflect.Message { - mi := &file_chromadb_proto_chroma_proto_msgTypes[7] + mi := &file_chromadb_proto_chroma_proto_msgTypes[8] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -708,7 +779,7 @@ func (x *UpdateMetadata) ProtoReflect() protoreflect.Message { // Deprecated: Use UpdateMetadata.ProtoReflect.Descriptor instead. func (*UpdateMetadata) Descriptor() ([]byte, []int) { - return file_chromadb_proto_chroma_proto_rawDescGZIP(), []int{7} + return file_chromadb_proto_chroma_proto_rawDescGZIP(), []int{8} } func (x *UpdateMetadata) GetMetadata() map[string]*UpdateMetadataValue { @@ -733,7 +804,7 @@ type SubmitEmbeddingRecord struct { func (x *SubmitEmbeddingRecord) Reset() { *x = SubmitEmbeddingRecord{} if protoimpl.UnsafeEnabled { - mi := &file_chromadb_proto_chroma_proto_msgTypes[8] + mi := &file_chromadb_proto_chroma_proto_msgTypes[9] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -746,7 +817,7 @@ func (x *SubmitEmbeddingRecord) String() string { func (*SubmitEmbeddingRecord) ProtoMessage() {} func (x *SubmitEmbeddingRecord) ProtoReflect() protoreflect.Message { - mi := &file_chromadb_proto_chroma_proto_msgTypes[8] + mi := &file_chromadb_proto_chroma_proto_msgTypes[9] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -759,7 +830,7 @@ func (x *SubmitEmbeddingRecord) ProtoReflect() protoreflect.Message { // Deprecated: Use SubmitEmbeddingRecord.ProtoReflect.Descriptor instead. func (*SubmitEmbeddingRecord) Descriptor() ([]byte, []int) { - return file_chromadb_proto_chroma_proto_rawDescGZIP(), []int{8} + return file_chromadb_proto_chroma_proto_rawDescGZIP(), []int{9} } func (x *SubmitEmbeddingRecord) GetId() string { @@ -810,7 +881,7 @@ type VectorEmbeddingRecord struct { func (x *VectorEmbeddingRecord) Reset() { *x = VectorEmbeddingRecord{} if protoimpl.UnsafeEnabled { - mi := &file_chromadb_proto_chroma_proto_msgTypes[9] + mi := &file_chromadb_proto_chroma_proto_msgTypes[10] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -823,7 +894,7 @@ func (x *VectorEmbeddingRecord) String() string { func (*VectorEmbeddingRecord) ProtoMessage() {} func (x *VectorEmbeddingRecord) ProtoReflect() protoreflect.Message { - mi := &file_chromadb_proto_chroma_proto_msgTypes[9] + mi := &file_chromadb_proto_chroma_proto_msgTypes[10] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -836,7 +907,7 @@ func (x *VectorEmbeddingRecord) ProtoReflect() protoreflect.Message { // Deprecated: Use VectorEmbeddingRecord.ProtoReflect.Descriptor instead. func (*VectorEmbeddingRecord) Descriptor() ([]byte, []int) { - return file_chromadb_proto_chroma_proto_rawDescGZIP(), []int{9} + return file_chromadb_proto_chroma_proto_rawDescGZIP(), []int{10} } func (x *VectorEmbeddingRecord) GetId() string { @@ -874,7 +945,7 @@ type VectorQueryResult struct { func (x *VectorQueryResult) Reset() { *x = VectorQueryResult{} if protoimpl.UnsafeEnabled { - mi := &file_chromadb_proto_chroma_proto_msgTypes[10] + mi := &file_chromadb_proto_chroma_proto_msgTypes[11] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -887,7 +958,7 @@ func (x *VectorQueryResult) String() string { func (*VectorQueryResult) ProtoMessage() {} func (x *VectorQueryResult) ProtoReflect() protoreflect.Message { - mi := &file_chromadb_proto_chroma_proto_msgTypes[10] + mi := &file_chromadb_proto_chroma_proto_msgTypes[11] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -900,7 +971,7 @@ func (x *VectorQueryResult) ProtoReflect() protoreflect.Message { // Deprecated: Use VectorQueryResult.ProtoReflect.Descriptor instead. func (*VectorQueryResult) Descriptor() ([]byte, []int) { - return file_chromadb_proto_chroma_proto_rawDescGZIP(), []int{10} + return file_chromadb_proto_chroma_proto_rawDescGZIP(), []int{11} } func (x *VectorQueryResult) GetId() string { @@ -942,7 +1013,7 @@ type VectorQueryResults struct { func (x *VectorQueryResults) Reset() { *x = VectorQueryResults{} if protoimpl.UnsafeEnabled { - mi := &file_chromadb_proto_chroma_proto_msgTypes[11] + mi := &file_chromadb_proto_chroma_proto_msgTypes[12] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -955,7 +1026,7 @@ func (x *VectorQueryResults) String() string { func (*VectorQueryResults) ProtoMessage() {} func (x *VectorQueryResults) ProtoReflect() protoreflect.Message { - mi := &file_chromadb_proto_chroma_proto_msgTypes[11] + mi := &file_chromadb_proto_chroma_proto_msgTypes[12] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -968,7 +1039,7 @@ func (x *VectorQueryResults) ProtoReflect() protoreflect.Message { // Deprecated: Use VectorQueryResults.ProtoReflect.Descriptor instead. func (*VectorQueryResults) Descriptor() ([]byte, []int) { - return file_chromadb_proto_chroma_proto_rawDescGZIP(), []int{11} + return file_chromadb_proto_chroma_proto_rawDescGZIP(), []int{12} } func (x *VectorQueryResults) GetResults() []*VectorQueryResult { @@ -990,7 +1061,7 @@ type GetVectorsRequest struct { func (x *GetVectorsRequest) Reset() { *x = GetVectorsRequest{} if protoimpl.UnsafeEnabled { - mi := &file_chromadb_proto_chroma_proto_msgTypes[12] + mi := &file_chromadb_proto_chroma_proto_msgTypes[13] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1003,7 +1074,7 @@ func (x *GetVectorsRequest) String() string { func (*GetVectorsRequest) ProtoMessage() {} func (x *GetVectorsRequest) ProtoReflect() protoreflect.Message { - mi := &file_chromadb_proto_chroma_proto_msgTypes[12] + mi := &file_chromadb_proto_chroma_proto_msgTypes[13] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1016,7 +1087,7 @@ func (x *GetVectorsRequest) ProtoReflect() protoreflect.Message { // Deprecated: Use GetVectorsRequest.ProtoReflect.Descriptor instead. func (*GetVectorsRequest) Descriptor() ([]byte, []int) { - return file_chromadb_proto_chroma_proto_rawDescGZIP(), []int{12} + return file_chromadb_proto_chroma_proto_rawDescGZIP(), []int{13} } func (x *GetVectorsRequest) GetIds() []string { @@ -1044,7 +1115,7 @@ type GetVectorsResponse struct { func (x *GetVectorsResponse) Reset() { *x = GetVectorsResponse{} if protoimpl.UnsafeEnabled { - mi := &file_chromadb_proto_chroma_proto_msgTypes[13] + mi := &file_chromadb_proto_chroma_proto_msgTypes[14] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1057,7 +1128,7 @@ func (x *GetVectorsResponse) String() string { func (*GetVectorsResponse) ProtoMessage() {} func (x *GetVectorsResponse) ProtoReflect() protoreflect.Message { - mi := &file_chromadb_proto_chroma_proto_msgTypes[13] + mi := &file_chromadb_proto_chroma_proto_msgTypes[14] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1070,7 +1141,7 @@ func (x *GetVectorsResponse) ProtoReflect() protoreflect.Message { // Deprecated: Use GetVectorsResponse.ProtoReflect.Descriptor instead. func (*GetVectorsResponse) Descriptor() ([]byte, []int) { - return file_chromadb_proto_chroma_proto_rawDescGZIP(), []int{13} + return file_chromadb_proto_chroma_proto_rawDescGZIP(), []int{14} } func (x *GetVectorsResponse) GetRecords() []*VectorEmbeddingRecord { @@ -1095,7 +1166,7 @@ type QueryVectorsRequest struct { func (x *QueryVectorsRequest) Reset() { *x = QueryVectorsRequest{} if protoimpl.UnsafeEnabled { - mi := &file_chromadb_proto_chroma_proto_msgTypes[14] + mi := &file_chromadb_proto_chroma_proto_msgTypes[15] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1108,7 +1179,7 @@ func (x *QueryVectorsRequest) String() string { func (*QueryVectorsRequest) ProtoMessage() {} func (x *QueryVectorsRequest) ProtoReflect() protoreflect.Message { - mi := &file_chromadb_proto_chroma_proto_msgTypes[14] + mi := &file_chromadb_proto_chroma_proto_msgTypes[15] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1121,7 +1192,7 @@ func (x *QueryVectorsRequest) ProtoReflect() protoreflect.Message { // Deprecated: Use QueryVectorsRequest.ProtoReflect.Descriptor instead. func (*QueryVectorsRequest) Descriptor() ([]byte, []int) { - return file_chromadb_proto_chroma_proto_rawDescGZIP(), []int{14} + return file_chromadb_proto_chroma_proto_rawDescGZIP(), []int{15} } func (x *QueryVectorsRequest) GetVectors() []*Vector { @@ -1170,7 +1241,7 @@ type QueryVectorsResponse struct { func (x *QueryVectorsResponse) Reset() { *x = QueryVectorsResponse{} if protoimpl.UnsafeEnabled { - mi := &file_chromadb_proto_chroma_proto_msgTypes[15] + mi := &file_chromadb_proto_chroma_proto_msgTypes[16] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1183,7 +1254,7 @@ func (x *QueryVectorsResponse) String() string { func (*QueryVectorsResponse) ProtoMessage() {} func (x *QueryVectorsResponse) ProtoReflect() protoreflect.Message { - mi := &file_chromadb_proto_chroma_proto_msgTypes[15] + mi := &file_chromadb_proto_chroma_proto_msgTypes[16] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1196,7 +1267,7 @@ func (x *QueryVectorsResponse) ProtoReflect() protoreflect.Message { // Deprecated: Use QueryVectorsResponse.ProtoReflect.Descriptor instead. func (*QueryVectorsResponse) Descriptor() ([]byte, []int) { - return file_chromadb_proto_chroma_proto_rawDescGZIP(), []int{15} + return file_chromadb_proto_chroma_proto_rawDescGZIP(), []int{16} } func (x *QueryVectorsResponse) GetResults() []*VectorQueryResults { @@ -1222,149 +1293,164 @@ var file_chromadb_proto_chroma_proto_rawDesc = []byte{ 0x6e, 0x63, 0x6f, 0x64, 0x69, 0x6e, 0x67, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x16, 0x2e, 0x63, 0x68, 0x72, 0x6f, 0x6d, 0x61, 0x2e, 0x53, 0x63, 0x61, 0x6c, 0x61, 0x72, 0x45, 0x6e, 0x63, 0x6f, 0x64, 0x69, 0x6e, 0x67, 0x52, 0x08, 0x65, 0x6e, 0x63, 0x6f, 0x64, 0x69, 0x6e, 0x67, 0x22, - 0xf8, 0x01, 0x0a, 0x07, 0x53, 0x65, 0x67, 0x6d, 0x65, 0x6e, 0x74, 0x12, 0x0e, 0x0a, 0x02, 0x69, - 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, 0x12, 0x12, 0x0a, 0x04, 0x74, - 0x79, 0x70, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x74, 0x79, 0x70, 0x65, 0x12, - 0x2a, 0x0a, 0x05, 0x73, 0x63, 0x6f, 0x70, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x14, - 0x2e, 0x63, 0x68, 0x72, 0x6f, 0x6d, 0x61, 0x2e, 0x53, 0x65, 0x67, 0x6d, 0x65, 0x6e, 0x74, 0x53, - 0x63, 0x6f, 0x70, 0x65, 0x52, 0x05, 0x73, 0x63, 0x6f, 0x70, 0x65, 0x12, 0x19, 0x0a, 0x05, 0x74, - 0x6f, 0x70, 0x69, 0x63, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x05, 0x74, 0x6f, - 0x70, 0x69, 0x63, 0x88, 0x01, 0x01, 0x12, 0x23, 0x0a, 0x0a, 0x63, 0x6f, 0x6c, 0x6c, 0x65, 0x63, - 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x48, 0x01, 0x52, 0x0a, 0x63, 0x6f, - 0x6c, 0x6c, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x88, 0x01, 0x01, 0x12, 0x37, 0x0a, 0x08, 0x6d, - 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x18, 0x06, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x16, 0x2e, - 0x63, 0x68, 0x72, 0x6f, 0x6d, 0x61, 0x2e, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x4d, 0x65, 0x74, - 0x61, 0x64, 0x61, 0x74, 0x61, 0x48, 0x02, 0x52, 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, - 0x61, 0x88, 0x01, 0x01, 0x42, 0x08, 0x0a, 0x06, 0x5f, 0x74, 0x6f, 0x70, 0x69, 0x63, 0x42, 0x0d, - 0x0a, 0x0b, 0x5f, 0x63, 0x6f, 0x6c, 0x6c, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x42, 0x0b, 0x0a, - 0x09, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x22, 0xf1, 0x01, 0x0a, 0x0a, 0x43, - 0x6f, 0x6c, 0x6c, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, + 0x21, 0x0a, 0x09, 0x46, 0x69, 0x6c, 0x65, 0x50, 0x61, 0x74, 0x68, 0x73, 0x12, 0x14, 0x0a, 0x05, + 0x70, 0x61, 0x74, 0x68, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x09, 0x52, 0x05, 0x70, 0x61, 0x74, + 0x68, 0x73, 0x22, 0x88, 0x03, 0x0a, 0x07, 0x53, 0x65, 0x67, 0x6d, 0x65, 0x6e, 0x74, 0x12, 0x0e, + 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, 0x12, 0x12, + 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x74, 0x79, + 0x70, 0x65, 0x12, 0x2a, 0x0a, 0x05, 0x73, 0x63, 0x6f, 0x70, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, + 0x0e, 0x32, 0x14, 0x2e, 0x63, 0x68, 0x72, 0x6f, 0x6d, 0x61, 0x2e, 0x53, 0x65, 0x67, 0x6d, 0x65, + 0x6e, 0x74, 0x53, 0x63, 0x6f, 0x70, 0x65, 0x52, 0x05, 0x73, 0x63, 0x6f, 0x70, 0x65, 0x12, 0x19, + 0x0a, 0x05, 0x74, 0x6f, 0x70, 0x69, 0x63, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, + 0x05, 0x74, 0x6f, 0x70, 0x69, 0x63, 0x88, 0x01, 0x01, 0x12, 0x23, 0x0a, 0x0a, 0x63, 0x6f, 0x6c, + 0x6c, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x48, 0x01, 0x52, + 0x0a, 0x63, 0x6f, 0x6c, 0x6c, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x88, 0x01, 0x01, 0x12, 0x37, + 0x0a, 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x18, 0x06, 0x20, 0x01, 0x28, 0x0b, + 0x32, 0x16, 0x2e, 0x63, 0x68, 0x72, 0x6f, 0x6d, 0x61, 0x2e, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, + 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x48, 0x02, 0x52, 0x08, 0x6d, 0x65, 0x74, 0x61, + 0x64, 0x61, 0x74, 0x61, 0x88, 0x01, 0x01, 0x12, 0x3d, 0x0a, 0x0a, 0x66, 0x69, 0x6c, 0x65, 0x5f, + 0x70, 0x61, 0x74, 0x68, 0x73, 0x18, 0x07, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1e, 0x2e, 0x63, 0x68, + 0x72, 0x6f, 0x6d, 0x61, 0x2e, 0x53, 0x65, 0x67, 0x6d, 0x65, 0x6e, 0x74, 0x2e, 0x46, 0x69, 0x6c, + 0x65, 0x50, 0x61, 0x74, 0x68, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x09, 0x66, 0x69, 0x6c, + 0x65, 0x50, 0x61, 0x74, 0x68, 0x73, 0x1a, 0x4f, 0x0a, 0x0e, 0x46, 0x69, 0x6c, 0x65, 0x50, 0x61, + 0x74, 0x68, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, + 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x27, 0x0a, 0x05, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x11, 0x2e, 0x63, 0x68, 0x72, 0x6f, + 0x6d, 0x61, 0x2e, 0x46, 0x69, 0x6c, 0x65, 0x50, 0x61, 0x74, 0x68, 0x73, 0x52, 0x05, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x42, 0x08, 0x0a, 0x06, 0x5f, 0x74, 0x6f, 0x70, 0x69, + 0x63, 0x42, 0x0d, 0x0a, 0x0b, 0x5f, 0x63, 0x6f, 0x6c, 0x6c, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, + 0x42, 0x0b, 0x0a, 0x09, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x22, 0xad, 0x02, + 0x0a, 0x0a, 0x43, 0x6f, 0x6c, 0x6c, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x0e, 0x0a, 0x02, + 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, 0x12, 0x12, 0x0a, 0x04, + 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, + 0x12, 0x14, 0x0a, 0x05, 0x74, 0x6f, 0x70, 0x69, 0x63, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, + 0x05, 0x74, 0x6f, 0x70, 0x69, 0x63, 0x12, 0x37, 0x0a, 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, + 0x74, 0x61, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x16, 0x2e, 0x63, 0x68, 0x72, 0x6f, 0x6d, + 0x61, 0x2e, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, + 0x48, 0x00, 0x52, 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x88, 0x01, 0x01, 0x12, + 0x21, 0x0a, 0x09, 0x64, 0x69, 0x6d, 0x65, 0x6e, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x05, 0x20, 0x01, + 0x28, 0x05, 0x48, 0x01, 0x52, 0x09, 0x64, 0x69, 0x6d, 0x65, 0x6e, 0x73, 0x69, 0x6f, 0x6e, 0x88, + 0x01, 0x01, 0x12, 0x16, 0x0a, 0x06, 0x74, 0x65, 0x6e, 0x61, 0x6e, 0x74, 0x18, 0x06, 0x20, 0x01, + 0x28, 0x09, 0x52, 0x06, 0x74, 0x65, 0x6e, 0x61, 0x6e, 0x74, 0x12, 0x1a, 0x0a, 0x08, 0x64, 0x61, + 0x74, 0x61, 0x62, 0x61, 0x73, 0x65, 0x18, 0x07, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x64, 0x61, + 0x74, 0x61, 0x62, 0x61, 0x73, 0x65, 0x12, 0x20, 0x0a, 0x0b, 0x6c, 0x6f, 0x67, 0x50, 0x6f, 0x73, + 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x08, 0x20, 0x01, 0x28, 0x03, 0x52, 0x0b, 0x6c, 0x6f, 0x67, + 0x50, 0x6f, 0x73, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x18, 0x0a, 0x07, 0x76, 0x65, 0x72, 0x73, + 0x69, 0x6f, 0x6e, 0x18, 0x09, 0x20, 0x01, 0x28, 0x05, 0x52, 0x07, 0x76, 0x65, 0x72, 0x73, 0x69, + 0x6f, 0x6e, 0x42, 0x0b, 0x0a, 0x09, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x42, + 0x0c, 0x0a, 0x0a, 0x5f, 0x64, 0x69, 0x6d, 0x65, 0x6e, 0x73, 0x69, 0x6f, 0x6e, 0x22, 0x46, 0x0a, + 0x08, 0x44, 0x61, 0x74, 0x61, 0x62, 0x61, 0x73, 0x65, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, - 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x14, 0x0a, - 0x05, 0x74, 0x6f, 0x70, 0x69, 0x63, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x74, 0x6f, - 0x70, 0x69, 0x63, 0x12, 0x37, 0x0a, 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x18, - 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x16, 0x2e, 0x63, 0x68, 0x72, 0x6f, 0x6d, 0x61, 0x2e, 0x55, - 0x70, 0x64, 0x61, 0x74, 0x65, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x48, 0x00, 0x52, - 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x88, 0x01, 0x01, 0x12, 0x21, 0x0a, 0x09, - 0x64, 0x69, 0x6d, 0x65, 0x6e, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x05, 0x20, 0x01, 0x28, 0x05, 0x48, - 0x01, 0x52, 0x09, 0x64, 0x69, 0x6d, 0x65, 0x6e, 0x73, 0x69, 0x6f, 0x6e, 0x88, 0x01, 0x01, 0x12, - 0x16, 0x0a, 0x06, 0x74, 0x65, 0x6e, 0x61, 0x6e, 0x74, 0x18, 0x06, 0x20, 0x01, 0x28, 0x09, 0x52, - 0x06, 0x74, 0x65, 0x6e, 0x61, 0x6e, 0x74, 0x12, 0x1a, 0x0a, 0x08, 0x64, 0x61, 0x74, 0x61, 0x62, - 0x61, 0x73, 0x65, 0x18, 0x07, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x64, 0x61, 0x74, 0x61, 0x62, - 0x61, 0x73, 0x65, 0x42, 0x0b, 0x0a, 0x09, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, - 0x42, 0x0c, 0x0a, 0x0a, 0x5f, 0x64, 0x69, 0x6d, 0x65, 0x6e, 0x73, 0x69, 0x6f, 0x6e, 0x22, 0x46, - 0x0a, 0x08, 0x44, 0x61, 0x74, 0x61, 0x62, 0x61, 0x73, 0x65, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, - 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, - 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x16, - 0x0a, 0x06, 0x74, 0x65, 0x6e, 0x61, 0x6e, 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, - 0x74, 0x65, 0x6e, 0x61, 0x6e, 0x74, 0x22, 0x1c, 0x0a, 0x06, 0x54, 0x65, 0x6e, 0x61, 0x6e, 0x74, - 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, - 0x6e, 0x61, 0x6d, 0x65, 0x22, 0x85, 0x01, 0x0a, 0x13, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x4d, - 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x12, 0x23, 0x0a, 0x0c, - 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x01, 0x20, 0x01, - 0x28, 0x09, 0x48, 0x00, 0x52, 0x0b, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x56, 0x61, 0x6c, 0x75, - 0x65, 0x12, 0x1d, 0x0a, 0x09, 0x69, 0x6e, 0x74, 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, - 0x20, 0x01, 0x28, 0x03, 0x48, 0x00, 0x52, 0x08, 0x69, 0x6e, 0x74, 0x56, 0x61, 0x6c, 0x75, 0x65, - 0x12, 0x21, 0x0a, 0x0b, 0x66, 0x6c, 0x6f, 0x61, 0x74, 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, - 0x03, 0x20, 0x01, 0x28, 0x01, 0x48, 0x00, 0x52, 0x0a, 0x66, 0x6c, 0x6f, 0x61, 0x74, 0x56, 0x61, - 0x6c, 0x75, 0x65, 0x42, 0x07, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x22, 0xac, 0x01, 0x0a, - 0x0e, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x12, - 0x40, 0x0a, 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x18, 0x01, 0x20, 0x03, 0x28, - 0x0b, 0x32, 0x24, 0x2e, 0x63, 0x68, 0x72, 0x6f, 0x6d, 0x61, 0x2e, 0x55, 0x70, 0x64, 0x61, 0x74, - 0x65, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, - 0x74, 0x61, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, - 0x61, 0x1a, 0x58, 0x0a, 0x0d, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x45, 0x6e, 0x74, - 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, - 0x03, 0x6b, 0x65, 0x79, 0x12, 0x31, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, - 0x01, 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x63, 0x68, 0x72, 0x6f, 0x6d, 0x61, 0x2e, 0x55, 0x70, 0x64, - 0x61, 0x74, 0x65, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x56, 0x61, 0x6c, 0x75, 0x65, - 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x22, 0xfb, 0x01, 0x0a, 0x15, - 0x53, 0x75, 0x62, 0x6d, 0x69, 0x74, 0x45, 0x6d, 0x62, 0x65, 0x64, 0x64, 0x69, 0x6e, 0x67, 0x52, - 0x65, 0x63, 0x6f, 0x72, 0x64, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, - 0x09, 0x52, 0x02, 0x69, 0x64, 0x12, 0x2b, 0x0a, 0x06, 0x76, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x18, - 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0e, 0x2e, 0x63, 0x68, 0x72, 0x6f, 0x6d, 0x61, 0x2e, 0x56, - 0x65, 0x63, 0x74, 0x6f, 0x72, 0x48, 0x00, 0x52, 0x06, 0x76, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x88, - 0x01, 0x01, 0x12, 0x37, 0x0a, 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x18, 0x03, - 0x20, 0x01, 0x28, 0x0b, 0x32, 0x16, 0x2e, 0x63, 0x68, 0x72, 0x6f, 0x6d, 0x61, 0x2e, 0x55, 0x70, - 0x64, 0x61, 0x74, 0x65, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x48, 0x01, 0x52, 0x08, - 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x88, 0x01, 0x01, 0x12, 0x2f, 0x0a, 0x09, 0x6f, - 0x70, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x11, - 0x2e, 0x63, 0x68, 0x72, 0x6f, 0x6d, 0x61, 0x2e, 0x4f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, - 0x6e, 0x52, 0x09, 0x6f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x23, 0x0a, 0x0d, - 0x63, 0x6f, 0x6c, 0x6c, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x69, 0x64, 0x18, 0x05, 0x20, - 0x01, 0x28, 0x09, 0x52, 0x0c, 0x63, 0x6f, 0x6c, 0x6c, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x49, - 0x64, 0x42, 0x09, 0x0a, 0x07, 0x5f, 0x76, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x42, 0x0b, 0x0a, 0x09, - 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x22, 0x66, 0x0a, 0x15, 0x56, 0x65, 0x63, - 0x74, 0x6f, 0x72, 0x45, 0x6d, 0x62, 0x65, 0x64, 0x64, 0x69, 0x6e, 0x67, 0x52, 0x65, 0x63, 0x6f, - 0x72, 0x64, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, - 0x69, 0x64, 0x12, 0x15, 0x0a, 0x06, 0x73, 0x65, 0x71, 0x5f, 0x69, 0x64, 0x18, 0x02, 0x20, 0x01, - 0x28, 0x0c, 0x52, 0x05, 0x73, 0x65, 0x71, 0x49, 0x64, 0x12, 0x26, 0x0a, 0x06, 0x76, 0x65, 0x63, - 0x74, 0x6f, 0x72, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0e, 0x2e, 0x63, 0x68, 0x72, 0x6f, - 0x6d, 0x61, 0x2e, 0x56, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x52, 0x06, 0x76, 0x65, 0x63, 0x74, 0x6f, - 0x72, 0x22, 0x8e, 0x01, 0x0a, 0x11, 0x56, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x51, 0x75, 0x65, 0x72, - 0x79, 0x52, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, - 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, 0x12, 0x15, 0x0a, 0x06, 0x73, 0x65, 0x71, 0x5f, 0x69, - 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x05, 0x73, 0x65, 0x71, 0x49, 0x64, 0x12, 0x1a, - 0x0a, 0x08, 0x64, 0x69, 0x73, 0x74, 0x61, 0x6e, 0x63, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x02, - 0x52, 0x08, 0x64, 0x69, 0x73, 0x74, 0x61, 0x6e, 0x63, 0x65, 0x12, 0x2b, 0x0a, 0x06, 0x76, 0x65, - 0x63, 0x74, 0x6f, 0x72, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0e, 0x2e, 0x63, 0x68, 0x72, - 0x6f, 0x6d, 0x61, 0x2e, 0x56, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x48, 0x00, 0x52, 0x06, 0x76, 0x65, - 0x63, 0x74, 0x6f, 0x72, 0x88, 0x01, 0x01, 0x42, 0x09, 0x0a, 0x07, 0x5f, 0x76, 0x65, 0x63, 0x74, - 0x6f, 0x72, 0x22, 0x49, 0x0a, 0x12, 0x56, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x51, 0x75, 0x65, 0x72, - 0x79, 0x52, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x73, 0x12, 0x33, 0x0a, 0x07, 0x72, 0x65, 0x73, 0x75, - 0x6c, 0x74, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x63, 0x68, 0x72, 0x6f, + 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x16, 0x0a, + 0x06, 0x74, 0x65, 0x6e, 0x61, 0x6e, 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x74, + 0x65, 0x6e, 0x61, 0x6e, 0x74, 0x22, 0x1c, 0x0a, 0x06, 0x54, 0x65, 0x6e, 0x61, 0x6e, 0x74, 0x12, + 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, + 0x61, 0x6d, 0x65, 0x22, 0x85, 0x01, 0x0a, 0x13, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x4d, 0x65, + 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x12, 0x23, 0x0a, 0x0c, 0x73, + 0x74, 0x72, 0x69, 0x6e, 0x67, 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, + 0x09, 0x48, 0x00, 0x52, 0x0b, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x56, 0x61, 0x6c, 0x75, 0x65, + 0x12, 0x1d, 0x0a, 0x09, 0x69, 0x6e, 0x74, 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, + 0x01, 0x28, 0x03, 0x48, 0x00, 0x52, 0x08, 0x69, 0x6e, 0x74, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x12, + 0x21, 0x0a, 0x0b, 0x66, 0x6c, 0x6f, 0x61, 0x74, 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x03, + 0x20, 0x01, 0x28, 0x01, 0x48, 0x00, 0x52, 0x0a, 0x66, 0x6c, 0x6f, 0x61, 0x74, 0x56, 0x61, 0x6c, + 0x75, 0x65, 0x42, 0x07, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x22, 0xac, 0x01, 0x0a, 0x0e, + 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x12, 0x40, + 0x0a, 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, + 0x32, 0x24, 0x2e, 0x63, 0x68, 0x72, 0x6f, 0x6d, 0x61, 0x2e, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, + 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, + 0x61, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, + 0x1a, 0x58, 0x0a, 0x0d, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x45, 0x6e, 0x74, 0x72, + 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, + 0x6b, 0x65, 0x79, 0x12, 0x31, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, + 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x63, 0x68, 0x72, 0x6f, 0x6d, 0x61, 0x2e, 0x55, 0x70, 0x64, 0x61, + 0x74, 0x65, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, + 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x22, 0xfb, 0x01, 0x0a, 0x15, 0x53, + 0x75, 0x62, 0x6d, 0x69, 0x74, 0x45, 0x6d, 0x62, 0x65, 0x64, 0x64, 0x69, 0x6e, 0x67, 0x52, 0x65, + 0x63, 0x6f, 0x72, 0x64, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, + 0x52, 0x02, 0x69, 0x64, 0x12, 0x2b, 0x0a, 0x06, 0x76, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x18, 0x02, + 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0e, 0x2e, 0x63, 0x68, 0x72, 0x6f, 0x6d, 0x61, 0x2e, 0x56, 0x65, + 0x63, 0x74, 0x6f, 0x72, 0x48, 0x00, 0x52, 0x06, 0x76, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x88, 0x01, + 0x01, 0x12, 0x37, 0x0a, 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x18, 0x03, 0x20, + 0x01, 0x28, 0x0b, 0x32, 0x16, 0x2e, 0x63, 0x68, 0x72, 0x6f, 0x6d, 0x61, 0x2e, 0x55, 0x70, 0x64, + 0x61, 0x74, 0x65, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x48, 0x01, 0x52, 0x08, 0x6d, + 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x88, 0x01, 0x01, 0x12, 0x2f, 0x0a, 0x09, 0x6f, 0x70, + 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x11, 0x2e, + 0x63, 0x68, 0x72, 0x6f, 0x6d, 0x61, 0x2e, 0x4f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, + 0x52, 0x09, 0x6f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x23, 0x0a, 0x0d, 0x63, + 0x6f, 0x6c, 0x6c, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x69, 0x64, 0x18, 0x05, 0x20, 0x01, + 0x28, 0x09, 0x52, 0x0c, 0x63, 0x6f, 0x6c, 0x6c, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x49, 0x64, + 0x42, 0x09, 0x0a, 0x07, 0x5f, 0x76, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x42, 0x0b, 0x0a, 0x09, 0x5f, + 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x22, 0x66, 0x0a, 0x15, 0x56, 0x65, 0x63, 0x74, + 0x6f, 0x72, 0x45, 0x6d, 0x62, 0x65, 0x64, 0x64, 0x69, 0x6e, 0x67, 0x52, 0x65, 0x63, 0x6f, 0x72, + 0x64, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, + 0x64, 0x12, 0x15, 0x0a, 0x06, 0x73, 0x65, 0x71, 0x5f, 0x69, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, + 0x0c, 0x52, 0x05, 0x73, 0x65, 0x71, 0x49, 0x64, 0x12, 0x26, 0x0a, 0x06, 0x76, 0x65, 0x63, 0x74, + 0x6f, 0x72, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0e, 0x2e, 0x63, 0x68, 0x72, 0x6f, 0x6d, + 0x61, 0x2e, 0x56, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x52, 0x06, 0x76, 0x65, 0x63, 0x74, 0x6f, 0x72, + 0x22, 0x8e, 0x01, 0x0a, 0x11, 0x56, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x51, 0x75, 0x65, 0x72, 0x79, + 0x52, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, + 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, 0x12, 0x15, 0x0a, 0x06, 0x73, 0x65, 0x71, 0x5f, 0x69, 0x64, + 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x05, 0x73, 0x65, 0x71, 0x49, 0x64, 0x12, 0x1a, 0x0a, + 0x08, 0x64, 0x69, 0x73, 0x74, 0x61, 0x6e, 0x63, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x02, 0x52, + 0x08, 0x64, 0x69, 0x73, 0x74, 0x61, 0x6e, 0x63, 0x65, 0x12, 0x2b, 0x0a, 0x06, 0x76, 0x65, 0x63, + 0x74, 0x6f, 0x72, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0e, 0x2e, 0x63, 0x68, 0x72, 0x6f, + 0x6d, 0x61, 0x2e, 0x56, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x48, 0x00, 0x52, 0x06, 0x76, 0x65, 0x63, + 0x74, 0x6f, 0x72, 0x88, 0x01, 0x01, 0x42, 0x09, 0x0a, 0x07, 0x5f, 0x76, 0x65, 0x63, 0x74, 0x6f, + 0x72, 0x22, 0x49, 0x0a, 0x12, 0x56, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x51, 0x75, 0x65, 0x72, 0x79, + 0x52, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x73, 0x12, 0x33, 0x0a, 0x07, 0x72, 0x65, 0x73, 0x75, 0x6c, + 0x74, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x63, 0x68, 0x72, 0x6f, 0x6d, + 0x61, 0x2e, 0x56, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x51, 0x75, 0x65, 0x72, 0x79, 0x52, 0x65, 0x73, + 0x75, 0x6c, 0x74, 0x52, 0x07, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x73, 0x22, 0x44, 0x0a, 0x11, + 0x47, 0x65, 0x74, 0x56, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, + 0x74, 0x12, 0x10, 0x0a, 0x03, 0x69, 0x64, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x09, 0x52, 0x03, + 0x69, 0x64, 0x73, 0x12, 0x1d, 0x0a, 0x0a, 0x73, 0x65, 0x67, 0x6d, 0x65, 0x6e, 0x74, 0x5f, 0x69, + 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x73, 0x65, 0x67, 0x6d, 0x65, 0x6e, 0x74, + 0x49, 0x64, 0x22, 0x4d, 0x0a, 0x12, 0x47, 0x65, 0x74, 0x56, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x73, + 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x37, 0x0a, 0x07, 0x72, 0x65, 0x63, 0x6f, + 0x72, 0x64, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x63, 0x68, 0x72, 0x6f, + 0x6d, 0x61, 0x2e, 0x56, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x45, 0x6d, 0x62, 0x65, 0x64, 0x64, 0x69, + 0x6e, 0x67, 0x52, 0x65, 0x63, 0x6f, 0x72, 0x64, 0x52, 0x07, 0x72, 0x65, 0x63, 0x6f, 0x72, 0x64, + 0x73, 0x22, 0xbc, 0x01, 0x0a, 0x13, 0x51, 0x75, 0x65, 0x72, 0x79, 0x56, 0x65, 0x63, 0x74, 0x6f, + 0x72, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x28, 0x0a, 0x07, 0x76, 0x65, 0x63, + 0x74, 0x6f, 0x72, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x0e, 0x2e, 0x63, 0x68, 0x72, + 0x6f, 0x6d, 0x61, 0x2e, 0x56, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x52, 0x07, 0x76, 0x65, 0x63, 0x74, + 0x6f, 0x72, 0x73, 0x12, 0x0c, 0x0a, 0x01, 0x6b, 0x18, 0x02, 0x20, 0x01, 0x28, 0x05, 0x52, 0x01, + 0x6b, 0x12, 0x1f, 0x0a, 0x0b, 0x61, 0x6c, 0x6c, 0x6f, 0x77, 0x65, 0x64, 0x5f, 0x69, 0x64, 0x73, + 0x18, 0x03, 0x20, 0x03, 0x28, 0x09, 0x52, 0x0a, 0x61, 0x6c, 0x6c, 0x6f, 0x77, 0x65, 0x64, 0x49, + 0x64, 0x73, 0x12, 0x2d, 0x0a, 0x12, 0x69, 0x6e, 0x63, 0x6c, 0x75, 0x64, 0x65, 0x5f, 0x65, 0x6d, + 0x62, 0x65, 0x64, 0x64, 0x69, 0x6e, 0x67, 0x73, 0x18, 0x04, 0x20, 0x01, 0x28, 0x08, 0x52, 0x11, + 0x69, 0x6e, 0x63, 0x6c, 0x75, 0x64, 0x65, 0x45, 0x6d, 0x62, 0x65, 0x64, 0x64, 0x69, 0x6e, 0x67, + 0x73, 0x12, 0x1d, 0x0a, 0x0a, 0x73, 0x65, 0x67, 0x6d, 0x65, 0x6e, 0x74, 0x5f, 0x69, 0x64, 0x18, + 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x73, 0x65, 0x67, 0x6d, 0x65, 0x6e, 0x74, 0x49, 0x64, + 0x22, 0x4c, 0x0a, 0x14, 0x51, 0x75, 0x65, 0x72, 0x79, 0x56, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x73, + 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x34, 0x0a, 0x07, 0x72, 0x65, 0x73, 0x75, + 0x6c, 0x74, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x63, 0x68, 0x72, 0x6f, 0x6d, 0x61, 0x2e, 0x56, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x51, 0x75, 0x65, 0x72, 0x79, 0x52, 0x65, - 0x73, 0x75, 0x6c, 0x74, 0x52, 0x07, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x73, 0x22, 0x44, 0x0a, - 0x11, 0x47, 0x65, 0x74, 0x56, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, - 0x73, 0x74, 0x12, 0x10, 0x0a, 0x03, 0x69, 0x64, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x09, 0x52, - 0x03, 0x69, 0x64, 0x73, 0x12, 0x1d, 0x0a, 0x0a, 0x73, 0x65, 0x67, 0x6d, 0x65, 0x6e, 0x74, 0x5f, - 0x69, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x73, 0x65, 0x67, 0x6d, 0x65, 0x6e, - 0x74, 0x49, 0x64, 0x22, 0x4d, 0x0a, 0x12, 0x47, 0x65, 0x74, 0x56, 0x65, 0x63, 0x74, 0x6f, 0x72, - 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x37, 0x0a, 0x07, 0x72, 0x65, 0x63, - 0x6f, 0x72, 0x64, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x63, 0x68, 0x72, - 0x6f, 0x6d, 0x61, 0x2e, 0x56, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x45, 0x6d, 0x62, 0x65, 0x64, 0x64, - 0x69, 0x6e, 0x67, 0x52, 0x65, 0x63, 0x6f, 0x72, 0x64, 0x52, 0x07, 0x72, 0x65, 0x63, 0x6f, 0x72, - 0x64, 0x73, 0x22, 0xbc, 0x01, 0x0a, 0x13, 0x51, 0x75, 0x65, 0x72, 0x79, 0x56, 0x65, 0x63, 0x74, - 0x6f, 0x72, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x28, 0x0a, 0x07, 0x76, 0x65, - 0x63, 0x74, 0x6f, 0x72, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x0e, 0x2e, 0x63, 0x68, - 0x72, 0x6f, 0x6d, 0x61, 0x2e, 0x56, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x52, 0x07, 0x76, 0x65, 0x63, - 0x74, 0x6f, 0x72, 0x73, 0x12, 0x0c, 0x0a, 0x01, 0x6b, 0x18, 0x02, 0x20, 0x01, 0x28, 0x05, 0x52, - 0x01, 0x6b, 0x12, 0x1f, 0x0a, 0x0b, 0x61, 0x6c, 0x6c, 0x6f, 0x77, 0x65, 0x64, 0x5f, 0x69, 0x64, - 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x09, 0x52, 0x0a, 0x61, 0x6c, 0x6c, 0x6f, 0x77, 0x65, 0x64, - 0x49, 0x64, 0x73, 0x12, 0x2d, 0x0a, 0x12, 0x69, 0x6e, 0x63, 0x6c, 0x75, 0x64, 0x65, 0x5f, 0x65, - 0x6d, 0x62, 0x65, 0x64, 0x64, 0x69, 0x6e, 0x67, 0x73, 0x18, 0x04, 0x20, 0x01, 0x28, 0x08, 0x52, - 0x11, 0x69, 0x6e, 0x63, 0x6c, 0x75, 0x64, 0x65, 0x45, 0x6d, 0x62, 0x65, 0x64, 0x64, 0x69, 0x6e, - 0x67, 0x73, 0x12, 0x1d, 0x0a, 0x0a, 0x73, 0x65, 0x67, 0x6d, 0x65, 0x6e, 0x74, 0x5f, 0x69, 0x64, - 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x73, 0x65, 0x67, 0x6d, 0x65, 0x6e, 0x74, 0x49, - 0x64, 0x22, 0x4c, 0x0a, 0x14, 0x51, 0x75, 0x65, 0x72, 0x79, 0x56, 0x65, 0x63, 0x74, 0x6f, 0x72, - 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x34, 0x0a, 0x07, 0x72, 0x65, 0x73, - 0x75, 0x6c, 0x74, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x63, 0x68, 0x72, - 0x6f, 0x6d, 0x61, 0x2e, 0x56, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x51, 0x75, 0x65, 0x72, 0x79, 0x52, - 0x65, 0x73, 0x75, 0x6c, 0x74, 0x73, 0x52, 0x07, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x73, 0x2a, - 0x38, 0x0a, 0x09, 0x4f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x07, 0x0a, 0x03, - 0x41, 0x44, 0x44, 0x10, 0x00, 0x12, 0x0a, 0x0a, 0x06, 0x55, 0x50, 0x44, 0x41, 0x54, 0x45, 0x10, - 0x01, 0x12, 0x0a, 0x0a, 0x06, 0x55, 0x50, 0x53, 0x45, 0x52, 0x54, 0x10, 0x02, 0x12, 0x0a, 0x0a, - 0x06, 0x44, 0x45, 0x4c, 0x45, 0x54, 0x45, 0x10, 0x03, 0x2a, 0x28, 0x0a, 0x0e, 0x53, 0x63, 0x61, - 0x6c, 0x61, 0x72, 0x45, 0x6e, 0x63, 0x6f, 0x64, 0x69, 0x6e, 0x67, 0x12, 0x0b, 0x0a, 0x07, 0x46, - 0x4c, 0x4f, 0x41, 0x54, 0x33, 0x32, 0x10, 0x00, 0x12, 0x09, 0x0a, 0x05, 0x49, 0x4e, 0x54, 0x33, - 0x32, 0x10, 0x01, 0x2a, 0x28, 0x0a, 0x0c, 0x53, 0x65, 0x67, 0x6d, 0x65, 0x6e, 0x74, 0x53, 0x63, - 0x6f, 0x70, 0x65, 0x12, 0x0a, 0x0a, 0x06, 0x56, 0x45, 0x43, 0x54, 0x4f, 0x52, 0x10, 0x00, 0x12, - 0x0c, 0x0a, 0x08, 0x4d, 0x45, 0x54, 0x41, 0x44, 0x41, 0x54, 0x41, 0x10, 0x01, 0x32, 0xa2, 0x01, - 0x0a, 0x0c, 0x56, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x52, 0x65, 0x61, 0x64, 0x65, 0x72, 0x12, 0x45, - 0x0a, 0x0a, 0x47, 0x65, 0x74, 0x56, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x73, 0x12, 0x19, 0x2e, 0x63, - 0x68, 0x72, 0x6f, 0x6d, 0x61, 0x2e, 0x47, 0x65, 0x74, 0x56, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x73, - 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1a, 0x2e, 0x63, 0x68, 0x72, 0x6f, 0x6d, 0x61, - 0x2e, 0x47, 0x65, 0x74, 0x56, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, - 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0x4b, 0x0a, 0x0c, 0x51, 0x75, 0x65, 0x72, 0x79, 0x56, 0x65, - 0x63, 0x74, 0x6f, 0x72, 0x73, 0x12, 0x1b, 0x2e, 0x63, 0x68, 0x72, 0x6f, 0x6d, 0x61, 0x2e, 0x51, - 0x75, 0x65, 0x72, 0x79, 0x56, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, - 0x73, 0x74, 0x1a, 0x1c, 0x2e, 0x63, 0x68, 0x72, 0x6f, 0x6d, 0x61, 0x2e, 0x51, 0x75, 0x65, 0x72, - 0x79, 0x56, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, - 0x22, 0x00, 0x42, 0x3a, 0x5a, 0x38, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, - 0x2f, 0x63, 0x68, 0x72, 0x6f, 0x6d, 0x61, 0x2d, 0x63, 0x6f, 0x72, 0x65, 0x2f, 0x63, 0x68, 0x72, - 0x6f, 0x6d, 0x61, 0x2f, 0x67, 0x6f, 0x2f, 0x70, 0x6b, 0x67, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, - 0x2f, 0x63, 0x6f, 0x6f, 0x72, 0x64, 0x69, 0x6e, 0x61, 0x74, 0x6f, 0x72, 0x70, 0x62, 0x62, 0x06, - 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, + 0x73, 0x75, 0x6c, 0x74, 0x73, 0x52, 0x07, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x73, 0x2a, 0x38, + 0x0a, 0x09, 0x4f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x07, 0x0a, 0x03, 0x41, + 0x44, 0x44, 0x10, 0x00, 0x12, 0x0a, 0x0a, 0x06, 0x55, 0x50, 0x44, 0x41, 0x54, 0x45, 0x10, 0x01, + 0x12, 0x0a, 0x0a, 0x06, 0x55, 0x50, 0x53, 0x45, 0x52, 0x54, 0x10, 0x02, 0x12, 0x0a, 0x0a, 0x06, + 0x44, 0x45, 0x4c, 0x45, 0x54, 0x45, 0x10, 0x03, 0x2a, 0x28, 0x0a, 0x0e, 0x53, 0x63, 0x61, 0x6c, + 0x61, 0x72, 0x45, 0x6e, 0x63, 0x6f, 0x64, 0x69, 0x6e, 0x67, 0x12, 0x0b, 0x0a, 0x07, 0x46, 0x4c, + 0x4f, 0x41, 0x54, 0x33, 0x32, 0x10, 0x00, 0x12, 0x09, 0x0a, 0x05, 0x49, 0x4e, 0x54, 0x33, 0x32, + 0x10, 0x01, 0x2a, 0x28, 0x0a, 0x0c, 0x53, 0x65, 0x67, 0x6d, 0x65, 0x6e, 0x74, 0x53, 0x63, 0x6f, + 0x70, 0x65, 0x12, 0x0a, 0x0a, 0x06, 0x56, 0x45, 0x43, 0x54, 0x4f, 0x52, 0x10, 0x00, 0x12, 0x0c, + 0x0a, 0x08, 0x4d, 0x45, 0x54, 0x41, 0x44, 0x41, 0x54, 0x41, 0x10, 0x01, 0x32, 0xa2, 0x01, 0x0a, + 0x0c, 0x56, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x52, 0x65, 0x61, 0x64, 0x65, 0x72, 0x12, 0x45, 0x0a, + 0x0a, 0x47, 0x65, 0x74, 0x56, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x73, 0x12, 0x19, 0x2e, 0x63, 0x68, + 0x72, 0x6f, 0x6d, 0x61, 0x2e, 0x47, 0x65, 0x74, 0x56, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x73, 0x52, + 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1a, 0x2e, 0x63, 0x68, 0x72, 0x6f, 0x6d, 0x61, 0x2e, + 0x47, 0x65, 0x74, 0x56, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, + 0x73, 0x65, 0x22, 0x00, 0x12, 0x4b, 0x0a, 0x0c, 0x51, 0x75, 0x65, 0x72, 0x79, 0x56, 0x65, 0x63, + 0x74, 0x6f, 0x72, 0x73, 0x12, 0x1b, 0x2e, 0x63, 0x68, 0x72, 0x6f, 0x6d, 0x61, 0x2e, 0x51, 0x75, + 0x65, 0x72, 0x79, 0x56, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, + 0x74, 0x1a, 0x1c, 0x2e, 0x63, 0x68, 0x72, 0x6f, 0x6d, 0x61, 0x2e, 0x51, 0x75, 0x65, 0x72, 0x79, + 0x56, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, + 0x00, 0x42, 0x3a, 0x5a, 0x38, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, + 0x63, 0x68, 0x72, 0x6f, 0x6d, 0x61, 0x2d, 0x63, 0x6f, 0x72, 0x65, 0x2f, 0x63, 0x68, 0x72, 0x6f, + 0x6d, 0x61, 0x2f, 0x67, 0x6f, 0x2f, 0x70, 0x6b, 0x67, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, + 0x63, 0x6f, 0x6f, 0x72, 0x64, 0x69, 0x6e, 0x61, 0x74, 0x6f, 0x72, 0x70, 0x62, 0x62, 0x06, 0x70, + 0x72, 0x6f, 0x74, 0x6f, 0x33, } var ( @@ -1380,54 +1466,58 @@ func file_chromadb_proto_chroma_proto_rawDescGZIP() []byte { } var file_chromadb_proto_chroma_proto_enumTypes = make([]protoimpl.EnumInfo, 3) -var file_chromadb_proto_chroma_proto_msgTypes = make([]protoimpl.MessageInfo, 17) +var file_chromadb_proto_chroma_proto_msgTypes = make([]protoimpl.MessageInfo, 19) var file_chromadb_proto_chroma_proto_goTypes = []interface{}{ (Operation)(0), // 0: chroma.Operation (ScalarEncoding)(0), // 1: chroma.ScalarEncoding (SegmentScope)(0), // 2: chroma.SegmentScope (*Status)(nil), // 3: chroma.Status (*Vector)(nil), // 4: chroma.Vector - (*Segment)(nil), // 5: chroma.Segment - (*Collection)(nil), // 6: chroma.Collection - (*Database)(nil), // 7: chroma.Database - (*Tenant)(nil), // 8: chroma.Tenant - (*UpdateMetadataValue)(nil), // 9: chroma.UpdateMetadataValue - (*UpdateMetadata)(nil), // 10: chroma.UpdateMetadata - (*SubmitEmbeddingRecord)(nil), // 11: chroma.SubmitEmbeddingRecord - (*VectorEmbeddingRecord)(nil), // 12: chroma.VectorEmbeddingRecord - (*VectorQueryResult)(nil), // 13: chroma.VectorQueryResult - (*VectorQueryResults)(nil), // 14: chroma.VectorQueryResults - (*GetVectorsRequest)(nil), // 15: chroma.GetVectorsRequest - (*GetVectorsResponse)(nil), // 16: chroma.GetVectorsResponse - (*QueryVectorsRequest)(nil), // 17: chroma.QueryVectorsRequest - (*QueryVectorsResponse)(nil), // 18: chroma.QueryVectorsResponse - nil, // 19: chroma.UpdateMetadata.MetadataEntry + (*FilePaths)(nil), // 5: chroma.FilePaths + (*Segment)(nil), // 6: chroma.Segment + (*Collection)(nil), // 7: chroma.Collection + (*Database)(nil), // 8: chroma.Database + (*Tenant)(nil), // 9: chroma.Tenant + (*UpdateMetadataValue)(nil), // 10: chroma.UpdateMetadataValue + (*UpdateMetadata)(nil), // 11: chroma.UpdateMetadata + (*SubmitEmbeddingRecord)(nil), // 12: chroma.SubmitEmbeddingRecord + (*VectorEmbeddingRecord)(nil), // 13: chroma.VectorEmbeddingRecord + (*VectorQueryResult)(nil), // 14: chroma.VectorQueryResult + (*VectorQueryResults)(nil), // 15: chroma.VectorQueryResults + (*GetVectorsRequest)(nil), // 16: chroma.GetVectorsRequest + (*GetVectorsResponse)(nil), // 17: chroma.GetVectorsResponse + (*QueryVectorsRequest)(nil), // 18: chroma.QueryVectorsRequest + (*QueryVectorsResponse)(nil), // 19: chroma.QueryVectorsResponse + nil, // 20: chroma.Segment.FilePathsEntry + nil, // 21: chroma.UpdateMetadata.MetadataEntry } var file_chromadb_proto_chroma_proto_depIdxs = []int32{ 1, // 0: chroma.Vector.encoding:type_name -> chroma.ScalarEncoding 2, // 1: chroma.Segment.scope:type_name -> chroma.SegmentScope - 10, // 2: chroma.Segment.metadata:type_name -> chroma.UpdateMetadata - 10, // 3: chroma.Collection.metadata:type_name -> chroma.UpdateMetadata - 19, // 4: chroma.UpdateMetadata.metadata:type_name -> chroma.UpdateMetadata.MetadataEntry - 4, // 5: chroma.SubmitEmbeddingRecord.vector:type_name -> chroma.Vector - 10, // 6: chroma.SubmitEmbeddingRecord.metadata:type_name -> chroma.UpdateMetadata - 0, // 7: chroma.SubmitEmbeddingRecord.operation:type_name -> chroma.Operation - 4, // 8: chroma.VectorEmbeddingRecord.vector:type_name -> chroma.Vector - 4, // 9: chroma.VectorQueryResult.vector:type_name -> chroma.Vector - 13, // 10: chroma.VectorQueryResults.results:type_name -> chroma.VectorQueryResult - 12, // 11: chroma.GetVectorsResponse.records:type_name -> chroma.VectorEmbeddingRecord - 4, // 12: chroma.QueryVectorsRequest.vectors:type_name -> chroma.Vector - 14, // 13: chroma.QueryVectorsResponse.results:type_name -> chroma.VectorQueryResults - 9, // 14: chroma.UpdateMetadata.MetadataEntry.value:type_name -> chroma.UpdateMetadataValue - 15, // 15: chroma.VectorReader.GetVectors:input_type -> chroma.GetVectorsRequest - 17, // 16: chroma.VectorReader.QueryVectors:input_type -> chroma.QueryVectorsRequest - 16, // 17: chroma.VectorReader.GetVectors:output_type -> chroma.GetVectorsResponse - 18, // 18: chroma.VectorReader.QueryVectors:output_type -> chroma.QueryVectorsResponse - 17, // [17:19] is the sub-list for method output_type - 15, // [15:17] is the sub-list for method input_type - 15, // [15:15] is the sub-list for extension type_name - 15, // [15:15] is the sub-list for extension extendee - 0, // [0:15] is the sub-list for field type_name + 11, // 2: chroma.Segment.metadata:type_name -> chroma.UpdateMetadata + 20, // 3: chroma.Segment.file_paths:type_name -> chroma.Segment.FilePathsEntry + 11, // 4: chroma.Collection.metadata:type_name -> chroma.UpdateMetadata + 21, // 5: chroma.UpdateMetadata.metadata:type_name -> chroma.UpdateMetadata.MetadataEntry + 4, // 6: chroma.SubmitEmbeddingRecord.vector:type_name -> chroma.Vector + 11, // 7: chroma.SubmitEmbeddingRecord.metadata:type_name -> chroma.UpdateMetadata + 0, // 8: chroma.SubmitEmbeddingRecord.operation:type_name -> chroma.Operation + 4, // 9: chroma.VectorEmbeddingRecord.vector:type_name -> chroma.Vector + 4, // 10: chroma.VectorQueryResult.vector:type_name -> chroma.Vector + 14, // 11: chroma.VectorQueryResults.results:type_name -> chroma.VectorQueryResult + 13, // 12: chroma.GetVectorsResponse.records:type_name -> chroma.VectorEmbeddingRecord + 4, // 13: chroma.QueryVectorsRequest.vectors:type_name -> chroma.Vector + 15, // 14: chroma.QueryVectorsResponse.results:type_name -> chroma.VectorQueryResults + 5, // 15: chroma.Segment.FilePathsEntry.value:type_name -> chroma.FilePaths + 10, // 16: chroma.UpdateMetadata.MetadataEntry.value:type_name -> chroma.UpdateMetadataValue + 16, // 17: chroma.VectorReader.GetVectors:input_type -> chroma.GetVectorsRequest + 18, // 18: chroma.VectorReader.QueryVectors:input_type -> chroma.QueryVectorsRequest + 17, // 19: chroma.VectorReader.GetVectors:output_type -> chroma.GetVectorsResponse + 19, // 20: chroma.VectorReader.QueryVectors:output_type -> chroma.QueryVectorsResponse + 19, // [19:21] is the sub-list for method output_type + 17, // [17:19] is the sub-list for method input_type + 17, // [17:17] is the sub-list for extension type_name + 17, // [17:17] is the sub-list for extension extendee + 0, // [0:17] is the sub-list for field type_name } func init() { file_chromadb_proto_chroma_proto_init() } @@ -1461,7 +1551,7 @@ func file_chromadb_proto_chroma_proto_init() { } } file_chromadb_proto_chroma_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*Segment); i { + switch v := v.(*FilePaths); i { case 0: return &v.state case 1: @@ -1473,7 +1563,7 @@ func file_chromadb_proto_chroma_proto_init() { } } file_chromadb_proto_chroma_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*Collection); i { + switch v := v.(*Segment); i { case 0: return &v.state case 1: @@ -1485,7 +1575,7 @@ func file_chromadb_proto_chroma_proto_init() { } } file_chromadb_proto_chroma_proto_msgTypes[4].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*Database); i { + switch v := v.(*Collection); i { case 0: return &v.state case 1: @@ -1497,7 +1587,7 @@ func file_chromadb_proto_chroma_proto_init() { } } file_chromadb_proto_chroma_proto_msgTypes[5].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*Tenant); i { + switch v := v.(*Database); i { case 0: return &v.state case 1: @@ -1509,7 +1599,7 @@ func file_chromadb_proto_chroma_proto_init() { } } file_chromadb_proto_chroma_proto_msgTypes[6].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*UpdateMetadataValue); i { + switch v := v.(*Tenant); i { case 0: return &v.state case 1: @@ -1521,7 +1611,7 @@ func file_chromadb_proto_chroma_proto_init() { } } file_chromadb_proto_chroma_proto_msgTypes[7].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*UpdateMetadata); i { + switch v := v.(*UpdateMetadataValue); i { case 0: return &v.state case 1: @@ -1533,7 +1623,7 @@ func file_chromadb_proto_chroma_proto_init() { } } file_chromadb_proto_chroma_proto_msgTypes[8].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*SubmitEmbeddingRecord); i { + switch v := v.(*UpdateMetadata); i { case 0: return &v.state case 1: @@ -1545,7 +1635,7 @@ func file_chromadb_proto_chroma_proto_init() { } } file_chromadb_proto_chroma_proto_msgTypes[9].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*VectorEmbeddingRecord); i { + switch v := v.(*SubmitEmbeddingRecord); i { case 0: return &v.state case 1: @@ -1557,7 +1647,7 @@ func file_chromadb_proto_chroma_proto_init() { } } file_chromadb_proto_chroma_proto_msgTypes[10].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*VectorQueryResult); i { + switch v := v.(*VectorEmbeddingRecord); i { case 0: return &v.state case 1: @@ -1569,7 +1659,7 @@ func file_chromadb_proto_chroma_proto_init() { } } file_chromadb_proto_chroma_proto_msgTypes[11].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*VectorQueryResults); i { + switch v := v.(*VectorQueryResult); i { case 0: return &v.state case 1: @@ -1581,7 +1671,7 @@ func file_chromadb_proto_chroma_proto_init() { } } file_chromadb_proto_chroma_proto_msgTypes[12].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*GetVectorsRequest); i { + switch v := v.(*VectorQueryResults); i { case 0: return &v.state case 1: @@ -1593,7 +1683,7 @@ func file_chromadb_proto_chroma_proto_init() { } } file_chromadb_proto_chroma_proto_msgTypes[13].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*GetVectorsResponse); i { + switch v := v.(*GetVectorsRequest); i { case 0: return &v.state case 1: @@ -1605,7 +1695,7 @@ func file_chromadb_proto_chroma_proto_init() { } } file_chromadb_proto_chroma_proto_msgTypes[14].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*QueryVectorsRequest); i { + switch v := v.(*GetVectorsResponse); i { case 0: return &v.state case 1: @@ -1617,6 +1707,18 @@ func file_chromadb_proto_chroma_proto_init() { } } file_chromadb_proto_chroma_proto_msgTypes[15].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*QueryVectorsRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_chromadb_proto_chroma_proto_msgTypes[16].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*QueryVectorsResponse); i { case 0: return &v.state @@ -1629,22 +1731,22 @@ func file_chromadb_proto_chroma_proto_init() { } } } - file_chromadb_proto_chroma_proto_msgTypes[2].OneofWrappers = []interface{}{} file_chromadb_proto_chroma_proto_msgTypes[3].OneofWrappers = []interface{}{} - file_chromadb_proto_chroma_proto_msgTypes[6].OneofWrappers = []interface{}{ + file_chromadb_proto_chroma_proto_msgTypes[4].OneofWrappers = []interface{}{} + file_chromadb_proto_chroma_proto_msgTypes[7].OneofWrappers = []interface{}{ (*UpdateMetadataValue_StringValue)(nil), (*UpdateMetadataValue_IntValue)(nil), (*UpdateMetadataValue_FloatValue)(nil), } - file_chromadb_proto_chroma_proto_msgTypes[8].OneofWrappers = []interface{}{} - file_chromadb_proto_chroma_proto_msgTypes[10].OneofWrappers = []interface{}{} + file_chromadb_proto_chroma_proto_msgTypes[9].OneofWrappers = []interface{}{} + file_chromadb_proto_chroma_proto_msgTypes[11].OneofWrappers = []interface{}{} type x struct{} out := protoimpl.TypeBuilder{ File: protoimpl.DescBuilder{ GoPackagePath: reflect.TypeOf(x{}).PkgPath(), RawDescriptor: file_chromadb_proto_chroma_proto_rawDesc, NumEnums: 3, - NumMessages: 17, + NumMessages: 19, NumExtensions: 0, NumServices: 1, }, diff --git a/go/pkg/proto/coordinatorpb/coordinator.pb.go b/go/pkg/proto/coordinatorpb/coordinator.pb.go index 5ca8bce37d4..085f6988055 100644 --- a/go/pkg/proto/coordinatorpb/coordinator.pb.go +++ b/go/pkg/proto/coordinatorpb/coordinator.pb.go @@ -1855,6 +1855,203 @@ func (x *SetLastCompactionTimeForTenantRequest) GetTenantLastCompactionTime() *T return nil } +type FlushSegmentCompactionInfo struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + SegmentId string `protobuf:"bytes,1,opt,name=segment_id,json=segmentId,proto3" json:"segment_id,omitempty"` + FilePaths map[string]*FilePaths `protobuf:"bytes,2,rep,name=file_paths,json=filePaths,proto3" json:"file_paths,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` +} + +func (x *FlushSegmentCompactionInfo) Reset() { + *x = FlushSegmentCompactionInfo{} + if protoimpl.UnsafeEnabled { + mi := &file_chromadb_proto_coordinator_proto_msgTypes[30] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *FlushSegmentCompactionInfo) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*FlushSegmentCompactionInfo) ProtoMessage() {} + +func (x *FlushSegmentCompactionInfo) ProtoReflect() protoreflect.Message { + mi := &file_chromadb_proto_coordinator_proto_msgTypes[30] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use FlushSegmentCompactionInfo.ProtoReflect.Descriptor instead. +func (*FlushSegmentCompactionInfo) Descriptor() ([]byte, []int) { + return file_chromadb_proto_coordinator_proto_rawDescGZIP(), []int{30} +} + +func (x *FlushSegmentCompactionInfo) GetSegmentId() string { + if x != nil { + return x.SegmentId + } + return "" +} + +func (x *FlushSegmentCompactionInfo) GetFilePaths() map[string]*FilePaths { + if x != nil { + return x.FilePaths + } + return nil +} + +type FlushCollectionCompactionRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + TenantId string `protobuf:"bytes,1,opt,name=tenant_id,json=tenantId,proto3" json:"tenant_id,omitempty"` + CollectionId string `protobuf:"bytes,2,opt,name=collection_id,json=collectionId,proto3" json:"collection_id,omitempty"` + LogPosition int64 `protobuf:"varint,3,opt,name=log_position,json=logPosition,proto3" json:"log_position,omitempty"` + CollectionVersion int32 `protobuf:"varint,4,opt,name=collection_version,json=collectionVersion,proto3" json:"collection_version,omitempty"` + SegmentCompactionInfo []*FlushSegmentCompactionInfo `protobuf:"bytes,5,rep,name=segment_compaction_info,json=segmentCompactionInfo,proto3" json:"segment_compaction_info,omitempty"` +} + +func (x *FlushCollectionCompactionRequest) Reset() { + *x = FlushCollectionCompactionRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_chromadb_proto_coordinator_proto_msgTypes[31] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *FlushCollectionCompactionRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*FlushCollectionCompactionRequest) ProtoMessage() {} + +func (x *FlushCollectionCompactionRequest) ProtoReflect() protoreflect.Message { + mi := &file_chromadb_proto_coordinator_proto_msgTypes[31] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use FlushCollectionCompactionRequest.ProtoReflect.Descriptor instead. +func (*FlushCollectionCompactionRequest) Descriptor() ([]byte, []int) { + return file_chromadb_proto_coordinator_proto_rawDescGZIP(), []int{31} +} + +func (x *FlushCollectionCompactionRequest) GetTenantId() string { + if x != nil { + return x.TenantId + } + return "" +} + +func (x *FlushCollectionCompactionRequest) GetCollectionId() string { + if x != nil { + return x.CollectionId + } + return "" +} + +func (x *FlushCollectionCompactionRequest) GetLogPosition() int64 { + if x != nil { + return x.LogPosition + } + return 0 +} + +func (x *FlushCollectionCompactionRequest) GetCollectionVersion() int32 { + if x != nil { + return x.CollectionVersion + } + return 0 +} + +func (x *FlushCollectionCompactionRequest) GetSegmentCompactionInfo() []*FlushSegmentCompactionInfo { + if x != nil { + return x.SegmentCompactionInfo + } + return nil +} + +type FlushCollectionCompactionResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + CollectionId string `protobuf:"bytes,1,opt,name=collection_id,json=collectionId,proto3" json:"collection_id,omitempty"` + CollectionVersion int32 `protobuf:"varint,2,opt,name=collection_version,json=collectionVersion,proto3" json:"collection_version,omitempty"` + LastCompactionTime int64 `protobuf:"varint,3,opt,name=last_compaction_time,json=lastCompactionTime,proto3" json:"last_compaction_time,omitempty"` +} + +func (x *FlushCollectionCompactionResponse) Reset() { + *x = FlushCollectionCompactionResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_chromadb_proto_coordinator_proto_msgTypes[32] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *FlushCollectionCompactionResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*FlushCollectionCompactionResponse) ProtoMessage() {} + +func (x *FlushCollectionCompactionResponse) ProtoReflect() protoreflect.Message { + mi := &file_chromadb_proto_coordinator_proto_msgTypes[32] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use FlushCollectionCompactionResponse.ProtoReflect.Descriptor instead. +func (*FlushCollectionCompactionResponse) Descriptor() ([]byte, []int) { + return file_chromadb_proto_coordinator_proto_rawDescGZIP(), []int{32} +} + +func (x *FlushCollectionCompactionResponse) GetCollectionId() string { + if x != nil { + return x.CollectionId + } + return "" +} + +func (x *FlushCollectionCompactionResponse) GetCollectionVersion() int32 { + if x != nil { + return x.CollectionVersion + } + return 0 +} + +func (x *FlushCollectionCompactionResponse) GetLastCompactionTime() int64 { + if x != nil { + return x.LastCompactionTime + } + return 0 +} + var File_chromadb_proto_coordinator_proto protoreflect.FileDescriptor var file_chromadb_proto_coordinator_proto_rawDesc = []byte{ @@ -2078,91 +2275,141 @@ var file_chromadb_proto_coordinator_proto_rawDesc = []byte{ 0x6d, 0x61, 0x2e, 0x54, 0x65, 0x6e, 0x61, 0x6e, 0x74, 0x4c, 0x61, 0x73, 0x74, 0x43, 0x6f, 0x6d, 0x70, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x54, 0x69, 0x6d, 0x65, 0x52, 0x18, 0x74, 0x65, 0x6e, 0x61, 0x6e, 0x74, 0x4c, 0x61, 0x73, 0x74, 0x43, 0x6f, 0x6d, 0x70, 0x61, 0x63, 0x74, 0x69, 0x6f, - 0x6e, 0x54, 0x69, 0x6d, 0x65, 0x32, 0x80, 0x0a, 0x0a, 0x05, 0x53, 0x79, 0x73, 0x44, 0x42, 0x12, - 0x51, 0x0a, 0x0e, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x44, 0x61, 0x74, 0x61, 0x62, 0x61, 0x73, - 0x65, 0x12, 0x1d, 0x2e, 0x63, 0x68, 0x72, 0x6f, 0x6d, 0x61, 0x2e, 0x43, 0x72, 0x65, 0x61, 0x74, - 0x65, 0x44, 0x61, 0x74, 0x61, 0x62, 0x61, 0x73, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, - 0x1a, 0x1e, 0x2e, 0x63, 0x68, 0x72, 0x6f, 0x6d, 0x61, 0x2e, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, - 0x44, 0x61, 0x74, 0x61, 0x62, 0x61, 0x73, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, - 0x22, 0x00, 0x12, 0x48, 0x0a, 0x0b, 0x47, 0x65, 0x74, 0x44, 0x61, 0x74, 0x61, 0x62, 0x61, 0x73, - 0x65, 0x12, 0x1a, 0x2e, 0x63, 0x68, 0x72, 0x6f, 0x6d, 0x61, 0x2e, 0x47, 0x65, 0x74, 0x44, 0x61, - 0x74, 0x61, 0x62, 0x61, 0x73, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1b, 0x2e, - 0x63, 0x68, 0x72, 0x6f, 0x6d, 0x61, 0x2e, 0x47, 0x65, 0x74, 0x44, 0x61, 0x74, 0x61, 0x62, 0x61, - 0x73, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0x4b, 0x0a, 0x0c, - 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x54, 0x65, 0x6e, 0x61, 0x6e, 0x74, 0x12, 0x1b, 0x2e, 0x63, - 0x68, 0x72, 0x6f, 0x6d, 0x61, 0x2e, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x54, 0x65, 0x6e, 0x61, - 0x6e, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1c, 0x2e, 0x63, 0x68, 0x72, 0x6f, - 0x6d, 0x61, 0x2e, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x54, 0x65, 0x6e, 0x61, 0x6e, 0x74, 0x52, - 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0x42, 0x0a, 0x09, 0x47, 0x65, 0x74, - 0x54, 0x65, 0x6e, 0x61, 0x6e, 0x74, 0x12, 0x18, 0x2e, 0x63, 0x68, 0x72, 0x6f, 0x6d, 0x61, 0x2e, - 0x47, 0x65, 0x74, 0x54, 0x65, 0x6e, 0x61, 0x6e, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, - 0x1a, 0x19, 0x2e, 0x63, 0x68, 0x72, 0x6f, 0x6d, 0x61, 0x2e, 0x47, 0x65, 0x74, 0x54, 0x65, 0x6e, - 0x61, 0x6e, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0x4e, 0x0a, - 0x0d, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x53, 0x65, 0x67, 0x6d, 0x65, 0x6e, 0x74, 0x12, 0x1c, + 0x6e, 0x54, 0x69, 0x6d, 0x65, 0x22, 0xde, 0x01, 0x0a, 0x1a, 0x46, 0x6c, 0x75, 0x73, 0x68, 0x53, + 0x65, 0x67, 0x6d, 0x65, 0x6e, 0x74, 0x43, 0x6f, 0x6d, 0x70, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, + 0x49, 0x6e, 0x66, 0x6f, 0x12, 0x1d, 0x0a, 0x0a, 0x73, 0x65, 0x67, 0x6d, 0x65, 0x6e, 0x74, 0x5f, + 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x73, 0x65, 0x67, 0x6d, 0x65, 0x6e, + 0x74, 0x49, 0x64, 0x12, 0x50, 0x0a, 0x0a, 0x66, 0x69, 0x6c, 0x65, 0x5f, 0x70, 0x61, 0x74, 0x68, + 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x31, 0x2e, 0x63, 0x68, 0x72, 0x6f, 0x6d, 0x61, + 0x2e, 0x46, 0x6c, 0x75, 0x73, 0x68, 0x53, 0x65, 0x67, 0x6d, 0x65, 0x6e, 0x74, 0x43, 0x6f, 0x6d, + 0x70, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x49, 0x6e, 0x66, 0x6f, 0x2e, 0x46, 0x69, 0x6c, 0x65, + 0x50, 0x61, 0x74, 0x68, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x09, 0x66, 0x69, 0x6c, 0x65, + 0x50, 0x61, 0x74, 0x68, 0x73, 0x1a, 0x4f, 0x0a, 0x0e, 0x46, 0x69, 0x6c, 0x65, 0x50, 0x61, 0x74, + 0x68, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, + 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x27, 0x0a, 0x05, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x11, 0x2e, 0x63, 0x68, 0x72, 0x6f, 0x6d, + 0x61, 0x2e, 0x46, 0x69, 0x6c, 0x65, 0x50, 0x61, 0x74, 0x68, 0x73, 0x52, 0x05, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x22, 0x92, 0x02, 0x0a, 0x20, 0x46, 0x6c, 0x75, 0x73, 0x68, + 0x43, 0x6f, 0x6c, 0x6c, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x43, 0x6f, 0x6d, 0x70, 0x61, 0x63, + 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1b, 0x0a, 0x09, 0x74, + 0x65, 0x6e, 0x61, 0x6e, 0x74, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, + 0x74, 0x65, 0x6e, 0x61, 0x6e, 0x74, 0x49, 0x64, 0x12, 0x23, 0x0a, 0x0d, 0x63, 0x6f, 0x6c, 0x6c, + 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x69, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, + 0x0c, 0x63, 0x6f, 0x6c, 0x6c, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x49, 0x64, 0x12, 0x21, 0x0a, + 0x0c, 0x6c, 0x6f, 0x67, 0x5f, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x03, 0x20, + 0x01, 0x28, 0x03, 0x52, 0x0b, 0x6c, 0x6f, 0x67, 0x50, 0x6f, 0x73, 0x69, 0x74, 0x69, 0x6f, 0x6e, + 0x12, 0x2d, 0x0a, 0x12, 0x63, 0x6f, 0x6c, 0x6c, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x76, + 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x04, 0x20, 0x01, 0x28, 0x05, 0x52, 0x11, 0x63, 0x6f, + 0x6c, 0x6c, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x12, + 0x5a, 0x0a, 0x17, 0x73, 0x65, 0x67, 0x6d, 0x65, 0x6e, 0x74, 0x5f, 0x63, 0x6f, 0x6d, 0x70, 0x61, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x69, 0x6e, 0x66, 0x6f, 0x18, 0x05, 0x20, 0x03, 0x28, 0x0b, + 0x32, 0x22, 0x2e, 0x63, 0x68, 0x72, 0x6f, 0x6d, 0x61, 0x2e, 0x46, 0x6c, 0x75, 0x73, 0x68, 0x53, + 0x65, 0x67, 0x6d, 0x65, 0x6e, 0x74, 0x43, 0x6f, 0x6d, 0x70, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, + 0x49, 0x6e, 0x66, 0x6f, 0x52, 0x15, 0x73, 0x65, 0x67, 0x6d, 0x65, 0x6e, 0x74, 0x43, 0x6f, 0x6d, + 0x70, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x49, 0x6e, 0x66, 0x6f, 0x22, 0xa9, 0x01, 0x0a, 0x21, + 0x46, 0x6c, 0x75, 0x73, 0x68, 0x43, 0x6f, 0x6c, 0x6c, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x43, + 0x6f, 0x6d, 0x70, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, + 0x65, 0x12, 0x23, 0x0a, 0x0d, 0x63, 0x6f, 0x6c, 0x6c, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x5f, + 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0c, 0x63, 0x6f, 0x6c, 0x6c, 0x65, 0x63, + 0x74, 0x69, 0x6f, 0x6e, 0x49, 0x64, 0x12, 0x2d, 0x0a, 0x12, 0x63, 0x6f, 0x6c, 0x6c, 0x65, 0x63, + 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x02, 0x20, 0x01, + 0x28, 0x05, 0x52, 0x11, 0x63, 0x6f, 0x6c, 0x6c, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x56, 0x65, + 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x30, 0x0a, 0x14, 0x6c, 0x61, 0x73, 0x74, 0x5f, 0x63, 0x6f, + 0x6d, 0x70, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x74, 0x69, 0x6d, 0x65, 0x18, 0x03, 0x20, + 0x01, 0x28, 0x03, 0x52, 0x12, 0x6c, 0x61, 0x73, 0x74, 0x43, 0x6f, 0x6d, 0x70, 0x61, 0x63, 0x74, + 0x69, 0x6f, 0x6e, 0x54, 0x69, 0x6d, 0x65, 0x32, 0xf4, 0x0a, 0x0a, 0x05, 0x53, 0x79, 0x73, 0x44, + 0x42, 0x12, 0x51, 0x0a, 0x0e, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x44, 0x61, 0x74, 0x61, 0x62, + 0x61, 0x73, 0x65, 0x12, 0x1d, 0x2e, 0x63, 0x68, 0x72, 0x6f, 0x6d, 0x61, 0x2e, 0x43, 0x72, 0x65, + 0x61, 0x74, 0x65, 0x44, 0x61, 0x74, 0x61, 0x62, 0x61, 0x73, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, + 0x73, 0x74, 0x1a, 0x1e, 0x2e, 0x63, 0x68, 0x72, 0x6f, 0x6d, 0x61, 0x2e, 0x43, 0x72, 0x65, 0x61, + 0x74, 0x65, 0x44, 0x61, 0x74, 0x61, 0x62, 0x61, 0x73, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, + 0x73, 0x65, 0x22, 0x00, 0x12, 0x48, 0x0a, 0x0b, 0x47, 0x65, 0x74, 0x44, 0x61, 0x74, 0x61, 0x62, + 0x61, 0x73, 0x65, 0x12, 0x1a, 0x2e, 0x63, 0x68, 0x72, 0x6f, 0x6d, 0x61, 0x2e, 0x47, 0x65, 0x74, + 0x44, 0x61, 0x74, 0x61, 0x62, 0x61, 0x73, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, + 0x1b, 0x2e, 0x63, 0x68, 0x72, 0x6f, 0x6d, 0x61, 0x2e, 0x47, 0x65, 0x74, 0x44, 0x61, 0x74, 0x61, + 0x62, 0x61, 0x73, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0x4b, + 0x0a, 0x0c, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x54, 0x65, 0x6e, 0x61, 0x6e, 0x74, 0x12, 0x1b, + 0x2e, 0x63, 0x68, 0x72, 0x6f, 0x6d, 0x61, 0x2e, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x54, 0x65, + 0x6e, 0x61, 0x6e, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1c, 0x2e, 0x63, 0x68, + 0x72, 0x6f, 0x6d, 0x61, 0x2e, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x54, 0x65, 0x6e, 0x61, 0x6e, + 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0x42, 0x0a, 0x09, 0x47, + 0x65, 0x74, 0x54, 0x65, 0x6e, 0x61, 0x6e, 0x74, 0x12, 0x18, 0x2e, 0x63, 0x68, 0x72, 0x6f, 0x6d, + 0x61, 0x2e, 0x47, 0x65, 0x74, 0x54, 0x65, 0x6e, 0x61, 0x6e, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, + 0x73, 0x74, 0x1a, 0x19, 0x2e, 0x63, 0x68, 0x72, 0x6f, 0x6d, 0x61, 0x2e, 0x47, 0x65, 0x74, 0x54, + 0x65, 0x6e, 0x61, 0x6e, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, + 0x4e, 0x0a, 0x0d, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x53, 0x65, 0x67, 0x6d, 0x65, 0x6e, 0x74, + 0x12, 0x1c, 0x2e, 0x63, 0x68, 0x72, 0x6f, 0x6d, 0x61, 0x2e, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, + 0x53, 0x65, 0x67, 0x6d, 0x65, 0x6e, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1d, 0x2e, 0x63, 0x68, 0x72, 0x6f, 0x6d, 0x61, 0x2e, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x53, 0x65, - 0x67, 0x6d, 0x65, 0x6e, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1d, 0x2e, 0x63, - 0x68, 0x72, 0x6f, 0x6d, 0x61, 0x2e, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x53, 0x65, 0x67, 0x6d, - 0x65, 0x6e, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0x4e, 0x0a, - 0x0d, 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x53, 0x65, 0x67, 0x6d, 0x65, 0x6e, 0x74, 0x12, 0x1c, + 0x67, 0x6d, 0x65, 0x6e, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, + 0x4e, 0x0a, 0x0d, 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x53, 0x65, 0x67, 0x6d, 0x65, 0x6e, 0x74, + 0x12, 0x1c, 0x2e, 0x63, 0x68, 0x72, 0x6f, 0x6d, 0x61, 0x2e, 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, + 0x53, 0x65, 0x67, 0x6d, 0x65, 0x6e, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1d, 0x2e, 0x63, 0x68, 0x72, 0x6f, 0x6d, 0x61, 0x2e, 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x53, 0x65, - 0x67, 0x6d, 0x65, 0x6e, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1d, 0x2e, 0x63, - 0x68, 0x72, 0x6f, 0x6d, 0x61, 0x2e, 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x53, 0x65, 0x67, 0x6d, - 0x65, 0x6e, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0x48, 0x0a, - 0x0b, 0x47, 0x65, 0x74, 0x53, 0x65, 0x67, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x12, 0x1a, 0x2e, 0x63, - 0x68, 0x72, 0x6f, 0x6d, 0x61, 0x2e, 0x47, 0x65, 0x74, 0x53, 0x65, 0x67, 0x6d, 0x65, 0x6e, 0x74, - 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1b, 0x2e, 0x63, 0x68, 0x72, 0x6f, 0x6d, - 0x61, 0x2e, 0x47, 0x65, 0x74, 0x53, 0x65, 0x67, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x52, 0x65, 0x73, - 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0x4e, 0x0a, 0x0d, 0x55, 0x70, 0x64, 0x61, 0x74, - 0x65, 0x53, 0x65, 0x67, 0x6d, 0x65, 0x6e, 0x74, 0x12, 0x1c, 0x2e, 0x63, 0x68, 0x72, 0x6f, 0x6d, + 0x67, 0x6d, 0x65, 0x6e, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, + 0x48, 0x0a, 0x0b, 0x47, 0x65, 0x74, 0x53, 0x65, 0x67, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x12, 0x1a, + 0x2e, 0x63, 0x68, 0x72, 0x6f, 0x6d, 0x61, 0x2e, 0x47, 0x65, 0x74, 0x53, 0x65, 0x67, 0x6d, 0x65, + 0x6e, 0x74, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1b, 0x2e, 0x63, 0x68, 0x72, + 0x6f, 0x6d, 0x61, 0x2e, 0x47, 0x65, 0x74, 0x53, 0x65, 0x67, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x52, + 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0x4e, 0x0a, 0x0d, 0x55, 0x70, 0x64, + 0x61, 0x74, 0x65, 0x53, 0x65, 0x67, 0x6d, 0x65, 0x6e, 0x74, 0x12, 0x1c, 0x2e, 0x63, 0x68, 0x72, + 0x6f, 0x6d, 0x61, 0x2e, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x53, 0x65, 0x67, 0x6d, 0x65, 0x6e, + 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1d, 0x2e, 0x63, 0x68, 0x72, 0x6f, 0x6d, 0x61, 0x2e, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x53, 0x65, 0x67, 0x6d, 0x65, 0x6e, 0x74, 0x52, - 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1d, 0x2e, 0x63, 0x68, 0x72, 0x6f, 0x6d, 0x61, 0x2e, - 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x53, 0x65, 0x67, 0x6d, 0x65, 0x6e, 0x74, 0x52, 0x65, 0x73, - 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0x57, 0x0a, 0x10, 0x43, 0x72, 0x65, 0x61, 0x74, - 0x65, 0x43, 0x6f, 0x6c, 0x6c, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x1f, 0x2e, 0x63, 0x68, - 0x72, 0x6f, 0x6d, 0x61, 0x2e, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x43, 0x6f, 0x6c, 0x6c, 0x65, - 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x20, 0x2e, 0x63, - 0x68, 0x72, 0x6f, 0x6d, 0x61, 0x2e, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x43, 0x6f, 0x6c, 0x6c, - 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, - 0x12, 0x57, 0x0a, 0x10, 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x43, 0x6f, 0x6c, 0x6c, 0x65, 0x63, - 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x1f, 0x2e, 0x63, 0x68, 0x72, 0x6f, 0x6d, 0x61, 0x2e, 0x44, 0x65, - 0x6c, 0x65, 0x74, 0x65, 0x43, 0x6f, 0x6c, 0x6c, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x65, - 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x20, 0x2e, 0x63, 0x68, 0x72, 0x6f, 0x6d, 0x61, 0x2e, 0x44, - 0x65, 0x6c, 0x65, 0x74, 0x65, 0x43, 0x6f, 0x6c, 0x6c, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x52, - 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0x51, 0x0a, 0x0e, 0x47, 0x65, 0x74, - 0x43, 0x6f, 0x6c, 0x6c, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x1d, 0x2e, 0x63, 0x68, - 0x72, 0x6f, 0x6d, 0x61, 0x2e, 0x47, 0x65, 0x74, 0x43, 0x6f, 0x6c, 0x6c, 0x65, 0x63, 0x74, 0x69, - 0x6f, 0x6e, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1e, 0x2e, 0x63, 0x68, 0x72, - 0x6f, 0x6d, 0x61, 0x2e, 0x47, 0x65, 0x74, 0x43, 0x6f, 0x6c, 0x6c, 0x65, 0x63, 0x74, 0x69, 0x6f, - 0x6e, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0x57, 0x0a, 0x10, - 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x43, 0x6f, 0x6c, 0x6c, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, - 0x12, 0x1f, 0x2e, 0x63, 0x68, 0x72, 0x6f, 0x6d, 0x61, 0x2e, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, - 0x43, 0x6f, 0x6c, 0x6c, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, - 0x74, 0x1a, 0x20, 0x2e, 0x63, 0x68, 0x72, 0x6f, 0x6d, 0x61, 0x2e, 0x55, 0x70, 0x64, 0x61, 0x74, - 0x65, 0x43, 0x6f, 0x6c, 0x6c, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x73, 0x70, 0x6f, - 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0x42, 0x0a, 0x0a, 0x52, 0x65, 0x73, 0x65, 0x74, 0x53, 0x74, - 0x61, 0x74, 0x65, 0x12, 0x16, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, - 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x1a, 0x1a, 0x2e, 0x63, 0x68, - 0x72, 0x6f, 0x6d, 0x61, 0x2e, 0x52, 0x65, 0x73, 0x65, 0x74, 0x53, 0x74, 0x61, 0x74, 0x65, 0x52, - 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0x81, 0x01, 0x0a, 0x1e, 0x47, 0x65, - 0x74, 0x4c, 0x61, 0x73, 0x74, 0x43, 0x6f, 0x6d, 0x70, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x54, - 0x69, 0x6d, 0x65, 0x46, 0x6f, 0x72, 0x54, 0x65, 0x6e, 0x61, 0x6e, 0x74, 0x12, 0x2d, 0x2e, 0x63, - 0x68, 0x72, 0x6f, 0x6d, 0x61, 0x2e, 0x47, 0x65, 0x74, 0x4c, 0x61, 0x73, 0x74, 0x43, 0x6f, 0x6d, - 0x70, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x54, 0x69, 0x6d, 0x65, 0x46, 0x6f, 0x72, 0x54, 0x65, - 0x6e, 0x61, 0x6e, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x2e, 0x2e, 0x63, 0x68, - 0x72, 0x6f, 0x6d, 0x61, 0x2e, 0x47, 0x65, 0x74, 0x4c, 0x61, 0x73, 0x74, 0x43, 0x6f, 0x6d, 0x70, - 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x54, 0x69, 0x6d, 0x65, 0x46, 0x6f, 0x72, 0x54, 0x65, 0x6e, - 0x61, 0x6e, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0x69, 0x0a, - 0x1e, 0x53, 0x65, 0x74, 0x4c, 0x61, 0x73, 0x74, 0x43, 0x6f, 0x6d, 0x70, 0x61, 0x63, 0x74, 0x69, - 0x6f, 0x6e, 0x54, 0x69, 0x6d, 0x65, 0x46, 0x6f, 0x72, 0x54, 0x65, 0x6e, 0x61, 0x6e, 0x74, 0x12, - 0x2d, 0x2e, 0x63, 0x68, 0x72, 0x6f, 0x6d, 0x61, 0x2e, 0x53, 0x65, 0x74, 0x4c, 0x61, 0x73, 0x74, - 0x43, 0x6f, 0x6d, 0x70, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x54, 0x69, 0x6d, 0x65, 0x46, 0x6f, - 0x72, 0x54, 0x65, 0x6e, 0x61, 0x6e, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x16, - 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, - 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x22, 0x00, 0x42, 0x3a, 0x5a, 0x38, 0x67, 0x69, 0x74, 0x68, - 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x63, 0x68, 0x72, 0x6f, 0x6d, 0x61, 0x2d, 0x63, 0x6f, - 0x72, 0x65, 0x2f, 0x63, 0x68, 0x72, 0x6f, 0x6d, 0x61, 0x2f, 0x67, 0x6f, 0x2f, 0x70, 0x6b, 0x67, - 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x63, 0x6f, 0x6f, 0x72, 0x64, 0x69, 0x6e, 0x61, 0x74, - 0x6f, 0x72, 0x70, 0x62, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, + 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0x57, 0x0a, 0x10, 0x43, 0x72, 0x65, + 0x61, 0x74, 0x65, 0x43, 0x6f, 0x6c, 0x6c, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x1f, 0x2e, + 0x63, 0x68, 0x72, 0x6f, 0x6d, 0x61, 0x2e, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x43, 0x6f, 0x6c, + 0x6c, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x20, + 0x2e, 0x63, 0x68, 0x72, 0x6f, 0x6d, 0x61, 0x2e, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x43, 0x6f, + 0x6c, 0x6c, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, + 0x22, 0x00, 0x12, 0x57, 0x0a, 0x10, 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x43, 0x6f, 0x6c, 0x6c, + 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x1f, 0x2e, 0x63, 0x68, 0x72, 0x6f, 0x6d, 0x61, 0x2e, + 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x43, 0x6f, 0x6c, 0x6c, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, + 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x20, 0x2e, 0x63, 0x68, 0x72, 0x6f, 0x6d, 0x61, + 0x2e, 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x43, 0x6f, 0x6c, 0x6c, 0x65, 0x63, 0x74, 0x69, 0x6f, + 0x6e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0x51, 0x0a, 0x0e, 0x47, + 0x65, 0x74, 0x43, 0x6f, 0x6c, 0x6c, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x1d, 0x2e, + 0x63, 0x68, 0x72, 0x6f, 0x6d, 0x61, 0x2e, 0x47, 0x65, 0x74, 0x43, 0x6f, 0x6c, 0x6c, 0x65, 0x63, + 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1e, 0x2e, 0x63, + 0x68, 0x72, 0x6f, 0x6d, 0x61, 0x2e, 0x47, 0x65, 0x74, 0x43, 0x6f, 0x6c, 0x6c, 0x65, 0x63, 0x74, + 0x69, 0x6f, 0x6e, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0x57, + 0x0a, 0x10, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x43, 0x6f, 0x6c, 0x6c, 0x65, 0x63, 0x74, 0x69, + 0x6f, 0x6e, 0x12, 0x1f, 0x2e, 0x63, 0x68, 0x72, 0x6f, 0x6d, 0x61, 0x2e, 0x55, 0x70, 0x64, 0x61, + 0x74, 0x65, 0x43, 0x6f, 0x6c, 0x6c, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x71, 0x75, + 0x65, 0x73, 0x74, 0x1a, 0x20, 0x2e, 0x63, 0x68, 0x72, 0x6f, 0x6d, 0x61, 0x2e, 0x55, 0x70, 0x64, + 0x61, 0x74, 0x65, 0x43, 0x6f, 0x6c, 0x6c, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x73, + 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0x42, 0x0a, 0x0a, 0x52, 0x65, 0x73, 0x65, 0x74, + 0x53, 0x74, 0x61, 0x74, 0x65, 0x12, 0x16, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, + 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x1a, 0x1a, 0x2e, + 0x63, 0x68, 0x72, 0x6f, 0x6d, 0x61, 0x2e, 0x52, 0x65, 0x73, 0x65, 0x74, 0x53, 0x74, 0x61, 0x74, + 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0x81, 0x01, 0x0a, 0x1e, + 0x47, 0x65, 0x74, 0x4c, 0x61, 0x73, 0x74, 0x43, 0x6f, 0x6d, 0x70, 0x61, 0x63, 0x74, 0x69, 0x6f, + 0x6e, 0x54, 0x69, 0x6d, 0x65, 0x46, 0x6f, 0x72, 0x54, 0x65, 0x6e, 0x61, 0x6e, 0x74, 0x12, 0x2d, + 0x2e, 0x63, 0x68, 0x72, 0x6f, 0x6d, 0x61, 0x2e, 0x47, 0x65, 0x74, 0x4c, 0x61, 0x73, 0x74, 0x43, + 0x6f, 0x6d, 0x70, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x54, 0x69, 0x6d, 0x65, 0x46, 0x6f, 0x72, + 0x54, 0x65, 0x6e, 0x61, 0x6e, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x2e, 0x2e, + 0x63, 0x68, 0x72, 0x6f, 0x6d, 0x61, 0x2e, 0x47, 0x65, 0x74, 0x4c, 0x61, 0x73, 0x74, 0x43, 0x6f, + 0x6d, 0x70, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x54, 0x69, 0x6d, 0x65, 0x46, 0x6f, 0x72, 0x54, + 0x65, 0x6e, 0x61, 0x6e, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, + 0x69, 0x0a, 0x1e, 0x53, 0x65, 0x74, 0x4c, 0x61, 0x73, 0x74, 0x43, 0x6f, 0x6d, 0x70, 0x61, 0x63, + 0x74, 0x69, 0x6f, 0x6e, 0x54, 0x69, 0x6d, 0x65, 0x46, 0x6f, 0x72, 0x54, 0x65, 0x6e, 0x61, 0x6e, + 0x74, 0x12, 0x2d, 0x2e, 0x63, 0x68, 0x72, 0x6f, 0x6d, 0x61, 0x2e, 0x53, 0x65, 0x74, 0x4c, 0x61, + 0x73, 0x74, 0x43, 0x6f, 0x6d, 0x70, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x54, 0x69, 0x6d, 0x65, + 0x46, 0x6f, 0x72, 0x54, 0x65, 0x6e, 0x61, 0x6e, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, + 0x1a, 0x16, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, + 0x75, 0x66, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x22, 0x00, 0x12, 0x72, 0x0a, 0x19, 0x46, 0x6c, + 0x75, 0x73, 0x68, 0x43, 0x6f, 0x6c, 0x6c, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x43, 0x6f, 0x6d, + 0x70, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x28, 0x2e, 0x63, 0x68, 0x72, 0x6f, 0x6d, 0x61, + 0x2e, 0x46, 0x6c, 0x75, 0x73, 0x68, 0x43, 0x6f, 0x6c, 0x6c, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, + 0x43, 0x6f, 0x6d, 0x70, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, + 0x74, 0x1a, 0x29, 0x2e, 0x63, 0x68, 0x72, 0x6f, 0x6d, 0x61, 0x2e, 0x46, 0x6c, 0x75, 0x73, 0x68, + 0x43, 0x6f, 0x6c, 0x6c, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x43, 0x6f, 0x6d, 0x70, 0x61, 0x63, + 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x42, 0x3a, + 0x5a, 0x38, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x63, 0x68, 0x72, + 0x6f, 0x6d, 0x61, 0x2d, 0x63, 0x6f, 0x72, 0x65, 0x2f, 0x63, 0x68, 0x72, 0x6f, 0x6d, 0x61, 0x2f, + 0x67, 0x6f, 0x2f, 0x70, 0x6b, 0x67, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x63, 0x6f, 0x6f, + 0x72, 0x64, 0x69, 0x6e, 0x61, 0x74, 0x6f, 0x72, 0x70, 0x62, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, + 0x6f, 0x33, } var ( @@ -2177,7 +2424,7 @@ func file_chromadb_proto_coordinator_proto_rawDescGZIP() []byte { return file_chromadb_proto_coordinator_proto_rawDescData } -var file_chromadb_proto_coordinator_proto_msgTypes = make([]protoimpl.MessageInfo, 30) +var file_chromadb_proto_coordinator_proto_msgTypes = make([]protoimpl.MessageInfo, 34) var file_chromadb_proto_coordinator_proto_goTypes = []interface{}{ (*CreateDatabaseRequest)(nil), // 0: chroma.CreateDatabaseRequest (*CreateDatabaseResponse)(nil), // 1: chroma.CreateDatabaseResponse @@ -2209,76 +2456,86 @@ var file_chromadb_proto_coordinator_proto_goTypes = []interface{}{ (*TenantLastCompactionTime)(nil), // 27: chroma.TenantLastCompactionTime (*GetLastCompactionTimeForTenantResponse)(nil), // 28: chroma.GetLastCompactionTimeForTenantResponse (*SetLastCompactionTimeForTenantRequest)(nil), // 29: chroma.SetLastCompactionTimeForTenantRequest - (*Status)(nil), // 30: chroma.Status - (*Database)(nil), // 31: chroma.Database - (*Tenant)(nil), // 32: chroma.Tenant - (*Segment)(nil), // 33: chroma.Segment - (SegmentScope)(0), // 34: chroma.SegmentScope - (*UpdateMetadata)(nil), // 35: chroma.UpdateMetadata - (*Collection)(nil), // 36: chroma.Collection - (*emptypb.Empty)(nil), // 37: google.protobuf.Empty + (*FlushSegmentCompactionInfo)(nil), // 30: chroma.FlushSegmentCompactionInfo + (*FlushCollectionCompactionRequest)(nil), // 31: chroma.FlushCollectionCompactionRequest + (*FlushCollectionCompactionResponse)(nil), // 32: chroma.FlushCollectionCompactionResponse + nil, // 33: chroma.FlushSegmentCompactionInfo.FilePathsEntry + (*Status)(nil), // 34: chroma.Status + (*Database)(nil), // 35: chroma.Database + (*Tenant)(nil), // 36: chroma.Tenant + (*Segment)(nil), // 37: chroma.Segment + (SegmentScope)(0), // 38: chroma.SegmentScope + (*UpdateMetadata)(nil), // 39: chroma.UpdateMetadata + (*Collection)(nil), // 40: chroma.Collection + (*FilePaths)(nil), // 41: chroma.FilePaths + (*emptypb.Empty)(nil), // 42: google.protobuf.Empty } var file_chromadb_proto_coordinator_proto_depIdxs = []int32{ - 30, // 0: chroma.CreateDatabaseResponse.status:type_name -> chroma.Status - 31, // 1: chroma.GetDatabaseResponse.database:type_name -> chroma.Database - 30, // 2: chroma.GetDatabaseResponse.status:type_name -> chroma.Status - 30, // 3: chroma.CreateTenantResponse.status:type_name -> chroma.Status - 32, // 4: chroma.GetTenantResponse.tenant:type_name -> chroma.Tenant - 30, // 5: chroma.GetTenantResponse.status:type_name -> chroma.Status - 33, // 6: chroma.CreateSegmentRequest.segment:type_name -> chroma.Segment - 30, // 7: chroma.CreateSegmentResponse.status:type_name -> chroma.Status - 30, // 8: chroma.DeleteSegmentResponse.status:type_name -> chroma.Status - 34, // 9: chroma.GetSegmentsRequest.scope:type_name -> chroma.SegmentScope - 33, // 10: chroma.GetSegmentsResponse.segments:type_name -> chroma.Segment - 30, // 11: chroma.GetSegmentsResponse.status:type_name -> chroma.Status - 35, // 12: chroma.UpdateSegmentRequest.metadata:type_name -> chroma.UpdateMetadata - 30, // 13: chroma.UpdateSegmentResponse.status:type_name -> chroma.Status - 35, // 14: chroma.CreateCollectionRequest.metadata:type_name -> chroma.UpdateMetadata - 36, // 15: chroma.CreateCollectionResponse.collection:type_name -> chroma.Collection - 30, // 16: chroma.CreateCollectionResponse.status:type_name -> chroma.Status - 30, // 17: chroma.DeleteCollectionResponse.status:type_name -> chroma.Status - 36, // 18: chroma.GetCollectionsResponse.collections:type_name -> chroma.Collection - 30, // 19: chroma.GetCollectionsResponse.status:type_name -> chroma.Status - 35, // 20: chroma.UpdateCollectionRequest.metadata:type_name -> chroma.UpdateMetadata - 30, // 21: chroma.UpdateCollectionResponse.status:type_name -> chroma.Status - 30, // 22: chroma.ResetStateResponse.status:type_name -> chroma.Status + 34, // 0: chroma.CreateDatabaseResponse.status:type_name -> chroma.Status + 35, // 1: chroma.GetDatabaseResponse.database:type_name -> chroma.Database + 34, // 2: chroma.GetDatabaseResponse.status:type_name -> chroma.Status + 34, // 3: chroma.CreateTenantResponse.status:type_name -> chroma.Status + 36, // 4: chroma.GetTenantResponse.tenant:type_name -> chroma.Tenant + 34, // 5: chroma.GetTenantResponse.status:type_name -> chroma.Status + 37, // 6: chroma.CreateSegmentRequest.segment:type_name -> chroma.Segment + 34, // 7: chroma.CreateSegmentResponse.status:type_name -> chroma.Status + 34, // 8: chroma.DeleteSegmentResponse.status:type_name -> chroma.Status + 38, // 9: chroma.GetSegmentsRequest.scope:type_name -> chroma.SegmentScope + 37, // 10: chroma.GetSegmentsResponse.segments:type_name -> chroma.Segment + 34, // 11: chroma.GetSegmentsResponse.status:type_name -> chroma.Status + 39, // 12: chroma.UpdateSegmentRequest.metadata:type_name -> chroma.UpdateMetadata + 34, // 13: chroma.UpdateSegmentResponse.status:type_name -> chroma.Status + 39, // 14: chroma.CreateCollectionRequest.metadata:type_name -> chroma.UpdateMetadata + 40, // 15: chroma.CreateCollectionResponse.collection:type_name -> chroma.Collection + 34, // 16: chroma.CreateCollectionResponse.status:type_name -> chroma.Status + 34, // 17: chroma.DeleteCollectionResponse.status:type_name -> chroma.Status + 40, // 18: chroma.GetCollectionsResponse.collections:type_name -> chroma.Collection + 34, // 19: chroma.GetCollectionsResponse.status:type_name -> chroma.Status + 39, // 20: chroma.UpdateCollectionRequest.metadata:type_name -> chroma.UpdateMetadata + 34, // 21: chroma.UpdateCollectionResponse.status:type_name -> chroma.Status + 34, // 22: chroma.ResetStateResponse.status:type_name -> chroma.Status 27, // 23: chroma.GetLastCompactionTimeForTenantResponse.tenant_last_compaction_time:type_name -> chroma.TenantLastCompactionTime 27, // 24: chroma.SetLastCompactionTimeForTenantRequest.tenant_last_compaction_time:type_name -> chroma.TenantLastCompactionTime - 0, // 25: chroma.SysDB.CreateDatabase:input_type -> chroma.CreateDatabaseRequest - 2, // 26: chroma.SysDB.GetDatabase:input_type -> chroma.GetDatabaseRequest - 4, // 27: chroma.SysDB.CreateTenant:input_type -> chroma.CreateTenantRequest - 6, // 28: chroma.SysDB.GetTenant:input_type -> chroma.GetTenantRequest - 8, // 29: chroma.SysDB.CreateSegment:input_type -> chroma.CreateSegmentRequest - 10, // 30: chroma.SysDB.DeleteSegment:input_type -> chroma.DeleteSegmentRequest - 12, // 31: chroma.SysDB.GetSegments:input_type -> chroma.GetSegmentsRequest - 14, // 32: chroma.SysDB.UpdateSegment:input_type -> chroma.UpdateSegmentRequest - 16, // 33: chroma.SysDB.CreateCollection:input_type -> chroma.CreateCollectionRequest - 18, // 34: chroma.SysDB.DeleteCollection:input_type -> chroma.DeleteCollectionRequest - 20, // 35: chroma.SysDB.GetCollections:input_type -> chroma.GetCollectionsRequest - 22, // 36: chroma.SysDB.UpdateCollection:input_type -> chroma.UpdateCollectionRequest - 37, // 37: chroma.SysDB.ResetState:input_type -> google.protobuf.Empty - 26, // 38: chroma.SysDB.GetLastCompactionTimeForTenant:input_type -> chroma.GetLastCompactionTimeForTenantRequest - 29, // 39: chroma.SysDB.SetLastCompactionTimeForTenant:input_type -> chroma.SetLastCompactionTimeForTenantRequest - 1, // 40: chroma.SysDB.CreateDatabase:output_type -> chroma.CreateDatabaseResponse - 3, // 41: chroma.SysDB.GetDatabase:output_type -> chroma.GetDatabaseResponse - 5, // 42: chroma.SysDB.CreateTenant:output_type -> chroma.CreateTenantResponse - 7, // 43: chroma.SysDB.GetTenant:output_type -> chroma.GetTenantResponse - 9, // 44: chroma.SysDB.CreateSegment:output_type -> chroma.CreateSegmentResponse - 11, // 45: chroma.SysDB.DeleteSegment:output_type -> chroma.DeleteSegmentResponse - 13, // 46: chroma.SysDB.GetSegments:output_type -> chroma.GetSegmentsResponse - 15, // 47: chroma.SysDB.UpdateSegment:output_type -> chroma.UpdateSegmentResponse - 17, // 48: chroma.SysDB.CreateCollection:output_type -> chroma.CreateCollectionResponse - 19, // 49: chroma.SysDB.DeleteCollection:output_type -> chroma.DeleteCollectionResponse - 21, // 50: chroma.SysDB.GetCollections:output_type -> chroma.GetCollectionsResponse - 23, // 51: chroma.SysDB.UpdateCollection:output_type -> chroma.UpdateCollectionResponse - 25, // 52: chroma.SysDB.ResetState:output_type -> chroma.ResetStateResponse - 28, // 53: chroma.SysDB.GetLastCompactionTimeForTenant:output_type -> chroma.GetLastCompactionTimeForTenantResponse - 37, // 54: chroma.SysDB.SetLastCompactionTimeForTenant:output_type -> google.protobuf.Empty - 40, // [40:55] is the sub-list for method output_type - 25, // [25:40] is the sub-list for method input_type - 25, // [25:25] is the sub-list for extension type_name - 25, // [25:25] is the sub-list for extension extendee - 0, // [0:25] is the sub-list for field type_name + 33, // 25: chroma.FlushSegmentCompactionInfo.file_paths:type_name -> chroma.FlushSegmentCompactionInfo.FilePathsEntry + 30, // 26: chroma.FlushCollectionCompactionRequest.segment_compaction_info:type_name -> chroma.FlushSegmentCompactionInfo + 41, // 27: chroma.FlushSegmentCompactionInfo.FilePathsEntry.value:type_name -> chroma.FilePaths + 0, // 28: chroma.SysDB.CreateDatabase:input_type -> chroma.CreateDatabaseRequest + 2, // 29: chroma.SysDB.GetDatabase:input_type -> chroma.GetDatabaseRequest + 4, // 30: chroma.SysDB.CreateTenant:input_type -> chroma.CreateTenantRequest + 6, // 31: chroma.SysDB.GetTenant:input_type -> chroma.GetTenantRequest + 8, // 32: chroma.SysDB.CreateSegment:input_type -> chroma.CreateSegmentRequest + 10, // 33: chroma.SysDB.DeleteSegment:input_type -> chroma.DeleteSegmentRequest + 12, // 34: chroma.SysDB.GetSegments:input_type -> chroma.GetSegmentsRequest + 14, // 35: chroma.SysDB.UpdateSegment:input_type -> chroma.UpdateSegmentRequest + 16, // 36: chroma.SysDB.CreateCollection:input_type -> chroma.CreateCollectionRequest + 18, // 37: chroma.SysDB.DeleteCollection:input_type -> chroma.DeleteCollectionRequest + 20, // 38: chroma.SysDB.GetCollections:input_type -> chroma.GetCollectionsRequest + 22, // 39: chroma.SysDB.UpdateCollection:input_type -> chroma.UpdateCollectionRequest + 42, // 40: chroma.SysDB.ResetState:input_type -> google.protobuf.Empty + 26, // 41: chroma.SysDB.GetLastCompactionTimeForTenant:input_type -> chroma.GetLastCompactionTimeForTenantRequest + 29, // 42: chroma.SysDB.SetLastCompactionTimeForTenant:input_type -> chroma.SetLastCompactionTimeForTenantRequest + 31, // 43: chroma.SysDB.FlushCollectionCompaction:input_type -> chroma.FlushCollectionCompactionRequest + 1, // 44: chroma.SysDB.CreateDatabase:output_type -> chroma.CreateDatabaseResponse + 3, // 45: chroma.SysDB.GetDatabase:output_type -> chroma.GetDatabaseResponse + 5, // 46: chroma.SysDB.CreateTenant:output_type -> chroma.CreateTenantResponse + 7, // 47: chroma.SysDB.GetTenant:output_type -> chroma.GetTenantResponse + 9, // 48: chroma.SysDB.CreateSegment:output_type -> chroma.CreateSegmentResponse + 11, // 49: chroma.SysDB.DeleteSegment:output_type -> chroma.DeleteSegmentResponse + 13, // 50: chroma.SysDB.GetSegments:output_type -> chroma.GetSegmentsResponse + 15, // 51: chroma.SysDB.UpdateSegment:output_type -> chroma.UpdateSegmentResponse + 17, // 52: chroma.SysDB.CreateCollection:output_type -> chroma.CreateCollectionResponse + 19, // 53: chroma.SysDB.DeleteCollection:output_type -> chroma.DeleteCollectionResponse + 21, // 54: chroma.SysDB.GetCollections:output_type -> chroma.GetCollectionsResponse + 23, // 55: chroma.SysDB.UpdateCollection:output_type -> chroma.UpdateCollectionResponse + 25, // 56: chroma.SysDB.ResetState:output_type -> chroma.ResetStateResponse + 28, // 57: chroma.SysDB.GetLastCompactionTimeForTenant:output_type -> chroma.GetLastCompactionTimeForTenantResponse + 42, // 58: chroma.SysDB.SetLastCompactionTimeForTenant:output_type -> google.protobuf.Empty + 32, // 59: chroma.SysDB.FlushCollectionCompaction:output_type -> chroma.FlushCollectionCompactionResponse + 44, // [44:60] is the sub-list for method output_type + 28, // [28:44] is the sub-list for method input_type + 28, // [28:28] is the sub-list for extension type_name + 28, // [28:28] is the sub-list for extension extendee + 0, // [0:28] is the sub-list for field type_name } func init() { file_chromadb_proto_coordinator_proto_init() } @@ -2648,6 +2905,42 @@ func file_chromadb_proto_coordinator_proto_init() { return nil } } + file_chromadb_proto_coordinator_proto_msgTypes[30].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*FlushSegmentCompactionInfo); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_chromadb_proto_coordinator_proto_msgTypes[31].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*FlushCollectionCompactionRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_chromadb_proto_coordinator_proto_msgTypes[32].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*FlushCollectionCompactionResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } } file_chromadb_proto_coordinator_proto_msgTypes[12].OneofWrappers = []interface{}{} file_chromadb_proto_coordinator_proto_msgTypes[14].OneofWrappers = []interface{}{ @@ -2670,7 +2963,7 @@ func file_chromadb_proto_coordinator_proto_init() { GoPackagePath: reflect.TypeOf(x{}).PkgPath(), RawDescriptor: file_chromadb_proto_coordinator_proto_rawDesc, NumEnums: 0, - NumMessages: 30, + NumMessages: 34, NumExtensions: 0, NumServices: 1, }, diff --git a/go/pkg/proto/coordinatorpb/coordinator_grpc.pb.go b/go/pkg/proto/coordinatorpb/coordinator_grpc.pb.go index 755d0190efd..d6ae92167c3 100644 --- a/go/pkg/proto/coordinatorpb/coordinator_grpc.pb.go +++ b/go/pkg/proto/coordinatorpb/coordinator_grpc.pb.go @@ -38,6 +38,7 @@ type SysDBClient interface { ResetState(ctx context.Context, in *emptypb.Empty, opts ...grpc.CallOption) (*ResetStateResponse, error) GetLastCompactionTimeForTenant(ctx context.Context, in *GetLastCompactionTimeForTenantRequest, opts ...grpc.CallOption) (*GetLastCompactionTimeForTenantResponse, error) SetLastCompactionTimeForTenant(ctx context.Context, in *SetLastCompactionTimeForTenantRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) + FlushCollectionCompaction(ctx context.Context, in *FlushCollectionCompactionRequest, opts ...grpc.CallOption) (*FlushCollectionCompactionResponse, error) } type sysDBClient struct { @@ -183,6 +184,15 @@ func (c *sysDBClient) SetLastCompactionTimeForTenant(ctx context.Context, in *Se return out, nil } +func (c *sysDBClient) FlushCollectionCompaction(ctx context.Context, in *FlushCollectionCompactionRequest, opts ...grpc.CallOption) (*FlushCollectionCompactionResponse, error) { + out := new(FlushCollectionCompactionResponse) + err := c.cc.Invoke(ctx, "/chroma.SysDB/FlushCollectionCompaction", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + // SysDBServer is the server API for SysDB service. // All implementations must embed UnimplementedSysDBServer // for forward compatibility @@ -202,6 +212,7 @@ type SysDBServer interface { ResetState(context.Context, *emptypb.Empty) (*ResetStateResponse, error) GetLastCompactionTimeForTenant(context.Context, *GetLastCompactionTimeForTenantRequest) (*GetLastCompactionTimeForTenantResponse, error) SetLastCompactionTimeForTenant(context.Context, *SetLastCompactionTimeForTenantRequest) (*emptypb.Empty, error) + FlushCollectionCompaction(context.Context, *FlushCollectionCompactionRequest) (*FlushCollectionCompactionResponse, error) mustEmbedUnimplementedSysDBServer() } @@ -254,6 +265,9 @@ func (UnimplementedSysDBServer) GetLastCompactionTimeForTenant(context.Context, func (UnimplementedSysDBServer) SetLastCompactionTimeForTenant(context.Context, *SetLastCompactionTimeForTenantRequest) (*emptypb.Empty, error) { return nil, status.Errorf(codes.Unimplemented, "method SetLastCompactionTimeForTenant not implemented") } +func (UnimplementedSysDBServer) FlushCollectionCompaction(context.Context, *FlushCollectionCompactionRequest) (*FlushCollectionCompactionResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method FlushCollectionCompaction not implemented") +} func (UnimplementedSysDBServer) mustEmbedUnimplementedSysDBServer() {} // UnsafeSysDBServer may be embedded to opt out of forward compatibility for this service. @@ -537,6 +551,24 @@ func _SysDB_SetLastCompactionTimeForTenant_Handler(srv interface{}, ctx context. return interceptor(ctx, in, info, handler) } +func _SysDB_FlushCollectionCompaction_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(FlushCollectionCompactionRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(SysDBServer).FlushCollectionCompaction(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/chroma.SysDB/FlushCollectionCompaction", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(SysDBServer).FlushCollectionCompaction(ctx, req.(*FlushCollectionCompactionRequest)) + } + return interceptor(ctx, in, info, handler) +} + // SysDB_ServiceDesc is the grpc.ServiceDesc for SysDB service. // It's only intended for direct use with grpc.RegisterService, // and not to be introspected or modified (even as a copy) @@ -604,6 +636,10 @@ var SysDB_ServiceDesc = grpc.ServiceDesc{ MethodName: "SetLastCompactionTimeForTenant", Handler: _SysDB_SetLastCompactionTimeForTenant_Handler, }, + { + MethodName: "FlushCollectionCompaction", + Handler: _SysDB_FlushCollectionCompaction_Handler, + }, }, Streams: []grpc.StreamDesc{}, Metadata: "chromadb/proto/coordinator.proto", diff --git a/idl/chromadb/proto/chroma.proto b/idl/chromadb/proto/chroma.proto index 7a95fbe5c89..44d899e4530 100644 --- a/idl/chromadb/proto/chroma.proto +++ b/idl/chromadb/proto/chroma.proto @@ -34,6 +34,10 @@ enum SegmentScope { METADATA = 1; } +message FilePaths { + repeated string paths = 1; +} + message Segment { string id = 1; string type = 2; @@ -43,6 +47,7 @@ message Segment { // collection and can be used to service queries (for it's given scope.) optional string collection = 5; optional UpdateMetadata metadata = 6; + map file_paths = 7; } message Collection { @@ -53,6 +58,8 @@ message Collection { optional int32 dimension = 5; string tenant = 6; string database = 7; + int64 logPosition = 8; + int32 version = 9; } message Database { diff --git a/idl/chromadb/proto/coordinator.proto b/idl/chromadb/proto/coordinator.proto index 5e31b3273af..3695999ded8 100644 --- a/idl/chromadb/proto/coordinator.proto +++ b/idl/chromadb/proto/coordinator.proto @@ -176,6 +176,25 @@ message SetLastCompactionTimeForTenantRequest { TenantLastCompactionTime tenant_last_compaction_time = 1; } +message FlushSegmentCompactionInfo { + string segment_id = 1; + map file_paths = 2; +} + +message FlushCollectionCompactionRequest { + string tenant_id = 1; + string collection_id = 2; + int64 log_position = 3; + int32 collection_version = 4; + repeated FlushSegmentCompactionInfo segment_compaction_info = 5; +} + +message FlushCollectionCompactionResponse { + string collection_id = 1; + int32 collection_version = 2; + int64 last_compaction_time = 3; +} + service SysDB { rpc CreateDatabase(CreateDatabaseRequest) returns (CreateDatabaseResponse) {} rpc GetDatabase(GetDatabaseRequest) returns (GetDatabaseResponse) {} @@ -192,4 +211,5 @@ service SysDB { rpc ResetState(google.protobuf.Empty) returns (ResetStateResponse) {} rpc GetLastCompactionTimeForTenant(GetLastCompactionTimeForTenantRequest) returns (GetLastCompactionTimeForTenantResponse) {} rpc SetLastCompactionTimeForTenant(SetLastCompactionTimeForTenantRequest) returns (google.protobuf.Empty) {} + rpc FlushCollectionCompaction(FlushCollectionCompactionRequest) returns (FlushCollectionCompactionResponse) {} } diff --git a/rust/worker/src/compactor/scheduler.rs b/rust/worker/src/compactor/scheduler.rs index 8e418dcd99e..7fc1c56c644 100644 --- a/rust/worker/src/compactor/scheduler.rs +++ b/rust/worker/src/compactor/scheduler.rs @@ -322,6 +322,8 @@ mod tests { dimension: Some(1), tenant: "tenant_1".to_string(), database: "database_1".to_string(), + log_position: 0, + version: 0, }; let collection_2 = Collection { @@ -332,6 +334,8 @@ mod tests { dimension: Some(1), tenant: "tenant_2".to_string(), database: "database_2".to_string(), + log_position: 0, + version: 0, }; sysdb.add_collection(collection_1); sysdb.add_collection(collection_2); diff --git a/rust/worker/src/types/collection.rs b/rust/worker/src/types/collection.rs index 049e0c4a133..ecfdeef1346 100644 --- a/rust/worker/src/types/collection.rs +++ b/rust/worker/src/types/collection.rs @@ -15,6 +15,8 @@ pub(crate) struct Collection { pub(crate) dimension: Option, pub(crate) tenant: String, pub(crate) database: String, + pub(crate) log_position: i64, + pub(crate) version: i32, } #[derive(Error, Debug)] @@ -57,6 +59,8 @@ impl TryFrom for Collection { dimension: proto_collection.dimension, tenant: proto_collection.tenant, database: proto_collection.database, + log_position: proto_collection.log_position, + version: proto_collection.version, }) } } @@ -75,6 +79,8 @@ mod test { dimension: None, tenant: "baz".to_string(), database: "qux".to_string(), + log_position: 0, + version: 0, }; let converted_collection: Collection = proto_collection.try_into().unwrap(); assert_eq!(converted_collection.id, Uuid::nil()); diff --git a/rust/worker/src/types/segment.rs b/rust/worker/src/types/segment.rs index 4b39161e2b2..d85d1293eea 100644 --- a/rust/worker/src/types/segment.rs +++ b/rust/worker/src/types/segment.rs @@ -3,6 +3,8 @@ use crate::{ chroma_proto, errors::{ChromaError, ErrorCodes}, }; +use std::collections::HashMap; +use std::vec::Vec; use thiserror::Error; use uuid::Uuid; @@ -19,6 +21,7 @@ pub(crate) struct Segment { pub(crate) topic: Option, pub(crate) collection: Option, pub(crate) metadata: Option, + pub(crate) file_path: HashMap>, } #[derive(Error, Debug)] @@ -48,6 +51,8 @@ impl TryFrom for Segment { type Error = SegmentConversionError; fn try_from(proto_segment: chroma_proto::Segment) -> Result { + let mut proto_segment = proto_segment; + let segment_uuid = match Uuid::try_parse(&proto_segment.id) { Ok(uuid) => uuid, Err(_) => return Err(SegmentConversionError::InvalidUuid), @@ -79,6 +84,12 @@ impl TryFrom for Segment { } }; + let mut file_paths = HashMap::new(); + let drain = proto_segment.file_paths.drain(); + for (key, mut value) in drain { + file_paths.insert(key, value.paths); + } + Ok(Segment { id: segment_uuid, r#type: segment_type, @@ -86,6 +97,7 @@ impl TryFrom for Segment { topic: proto_segment.topic, collection: collection_uuid, metadata: segment_metadata, + file_path: file_paths, }) } } @@ -115,6 +127,7 @@ mod tests { topic: Some("test".to_string()), collection: Some("00000000-0000-0000-0000-000000000000".to_string()), metadata: Some(metadata), + file_paths: HashMap::new(), }; let converted_segment: Segment = proto_segment.try_into().unwrap(); assert_eq!(converted_segment.id, Uuid::nil());