diff --git a/docs/nitric/proto/index.html b/docs/nitric/api/const.html similarity index 91% rename from docs/nitric/proto/index.html rename to docs/nitric/api/const.html index 185a8d4..d5b298f 100644 --- a/docs/nitric/proto/index.html +++ b/docs/nitric/api/const.html @@ -4,7 +4,7 @@ -nitric.proto API documentation +nitric.api.const API documentation @@ -19,7 +19,7 @@
-

Module nitric.proto

+

Module nitric.api.const

@@ -43,17 +43,14 @@

Module nitric.proto

# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -# +# + +# The maximum number of parent collections a sub-collection can have. +# This is implemented in the Membrane, but reinforced here for immediate exceptions without a server connection. +MAX_SUB_COLLECTION_DEPTH = 1
-

Sub-modules

-
-
nitric.proto.nitric
-
-
-
-
@@ -70,12 +67,7 @@

Index

diff --git a/docs/nitric/api/documents.html b/docs/nitric/api/documents.html new file mode 100644 index 0000000..671e10a --- /dev/null +++ b/docs/nitric/api/documents.html @@ -0,0 +1,1961 @@ + + + + + + +nitric.api.documents API documentation + + + + + + + + + + + +
+
+
+

Module nitric.api.documents

+
+
+
+ +Expand source code + +
#
+# Copyright (c) 2021 Nitric Technologies Pty Ltd.
+#
+# This file is part of Nitric Python 3 SDK.
+# See https://github.com/nitrictech/python-sdk for further info.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+from __future__ import annotations
+
+from dataclasses import dataclass, field
+from enum import Enum
+from typing import List, AsyncIterator, Union, Any, Tuple
+
+from grpclib import GRPCError
+
+from nitric.api.const import MAX_SUB_COLLECTION_DEPTH
+from nitric.api.exception import exception_from_grpc_error
+from nitricapi.nitric.document.v1 import (
+    DocumentServiceStub,
+    Collection as CollectionMessage,
+    Key as KeyMessage,
+    Expression as ExpressionMessage,
+    ExpressionValue,
+    Document as DocumentMessage,
+)
+
+from nitric.utils import new_default_channel, _dict_from_struct, _struct_from_dict
+
+NIL_DOC_ID = ""
+
+
+class CollectionDepthException(Exception):
+    """The max depth of document sub-collections has been exceeded."""
+
+    pass
+
+
+@dataclass(frozen=True, order=True)
+class DocumentRef:
+    """A reference to a document in a collection."""
+
+    _documents: Documents
+    parent: CollectionRef
+    id: str
+
+    def collection(self, name: str) -> CollectionRef:
+        """
+        Return a reference to a sub-collection of this document.
+
+        This is currently only supported to one level of depth.
+        e.g. Documents().collection('a').doc('b').collection('c').doc('d') is valid,
+        Documents().collection('a').doc('b').collection('c').doc('d').collection('e') is invalid (1 level too deep).
+        """
+        current_depth = self.parent.sub_collection_depth()
+        if current_depth >= MAX_SUB_COLLECTION_DEPTH:
+            # Collection nesting is only supported to a maximum depth.
+            raise CollectionDepthException(
+                f"sub-collections supported to a depth of {MAX_SUB_COLLECTION_DEPTH}, "
+                f"attempted to create new collection with depth {current_depth + 1}"
+            )
+        return CollectionRef(_documents=self._documents, name=name, parent=self)
+
+    async def get(self) -> Document:
+        """Retrieve the contents of this document, if it exists."""
+        try:
+            response = await self._documents._stub.get(key=_doc_ref_to_wire(self))
+            return _document_from_wire(documents=self._documents, message=response.document)
+        except GRPCError as grpc_err:
+            raise exception_from_grpc_error(grpc_err)
+
+    async def set(self, content: dict):
+        """
+        Set the contents of this document.
+
+        If the document exists it will be updated, otherwise a new document will be created.
+        """
+        try:
+            await self._documents._stub.set(
+                key=_doc_ref_to_wire(self),
+                content=_struct_from_dict(content),
+            )
+        except GRPCError as grpc_err:
+            raise exception_from_grpc_error(grpc_err)
+
+    async def delete(self):
+        """Delete this document, if it exists."""
+        try:
+            await self._documents._stub.delete(
+                key=_doc_ref_to_wire(self),
+            )
+        except GRPCError as grpc_err:
+            raise exception_from_grpc_error(grpc_err)
+
+
+def _document_from_wire(documents: Documents, message: DocumentMessage) -> Document:
+    ref = _doc_ref_from_wire(documents=documents, message=message.key)
+
+    return Document(
+        _ref=ref,
+        content=_dict_from_struct(message.content),
+    )
+
+
+def _doc_ref_to_wire(ref: DocumentRef) -> KeyMessage:
+    return KeyMessage(id=ref.id, collection=_collection_to_wire(ref.parent))
+
+
+def _doc_ref_from_wire(documents: Documents, message: KeyMessage) -> DocumentRef:
+    return DocumentRef(
+        _documents=documents,
+        id=message.id,
+        parent=_collection_from_wire(documents=documents, message=message.collection),
+    )
+
+
+def _collection_to_wire(ref: CollectionRef) -> CollectionMessage:
+    if ref.is_sub_collection():
+        return CollectionMessage(name=ref.name, parent=_doc_ref_to_wire(ref.parent) if ref.parent else None)
+    return CollectionMessage(name=ref.name)
+
+
+def _collection_from_wire(documents: Documents, message: CollectionMessage) -> CollectionRef:
+    return CollectionRef(
+        _documents=documents,
+        name=message.name,
+        parent=_doc_ref_from_wire(documents=documents, message=message.parent) if message.parent else None,
+    )
+
+
+@dataclass(frozen=True, order=True)
+class CollectionRef:
+    """A reference to a collection of documents."""
+
+    _documents: Documents
+    name: str
+    parent: Union[DocumentRef, None] = field(default_factory=lambda: None)
+
+    def doc(self, doc_id: str) -> DocumentRef:
+        """Return a reference to a document in the collection."""
+        return DocumentRef(_documents=self._documents, parent=self, id=doc_id)
+
+    def collection(self, name: str) -> CollectionGroupRef:
+        """
+        Return a reference to a sub-collection of this document.
+
+        This is currently only supported to one level of depth.
+        e.g. Documents().collection('a').collection('b').doc('c') is valid,
+        Documents().collection('a').doc('b').collection('c').collection('d') is invalid (1 level too deep).
+        """
+        current_depth = self.sub_collection_depth()
+        if current_depth >= MAX_SUB_COLLECTION_DEPTH:
+            # Collection nesting is only supported to a maximum depth.
+            raise CollectionDepthException(
+                f"sub-collections supported to a depth of {MAX_SUB_COLLECTION_DEPTH}, "
+                f"attempted to create new collection with depth {current_depth + 1}"
+            )
+        return CollectionGroupRef(_documents=self._documents, name=name, parent=self)
+
+    def query(
+        self,
+        paging_token: Any = None,
+        limit: int = 0,
+        expressions: Union[Expression, List[Expression]] = None,
+    ) -> QueryBuilder:
+        """Return a query builder scoped to this collection."""
+        return QueryBuilder(
+            documents=self._documents,
+            collection=self,
+            paging_token=paging_token,
+            limit=limit,
+            expressions=[expressions] if isinstance(expressions, Expression) else expressions,
+        )
+
+    def sub_collection_depth(self) -> int:
+        """Return the depth of this collection, which is a count of the parents above this collection."""
+        if not self.is_sub_collection():
+            return 0
+        else:
+            return self.parent.parent.sub_collection_depth() + 1
+
+    def is_sub_collection(self):
+        """Return True if this collection is a sub-collection of a document in another collection."""
+        return self.parent is not None
+
+
+@dataclass(frozen=True, order=True)
+class CollectionGroupRef:
+    """A reference to a collection group."""
+
+    _documents: Documents
+    name: str
+    parent: Union[CollectionRef, None] = field(default_factory=lambda: None)
+
+    def query(
+        self,
+        paging_token: Any = None,
+        limit: int = 0,
+        expressions: Union[Expression, List[Expression]] = None,
+    ) -> QueryBuilder:
+        """Return a query builder scoped to this collection."""
+        return QueryBuilder(
+            documents=self._documents,
+            collection=self.to_collection_ref(),
+            paging_token=paging_token,
+            limit=limit,
+            expressions=[expressions] if isinstance(expressions, Expression) else expressions,
+        )
+
+    def sub_collection_depth(self) -> int:
+        """Return the depth of this collection group, which is a count of the parents above this collection."""
+        if not self.is_sub_collection():
+            return 0
+        else:
+            return self.parent.sub_collection_depth() + 1
+
+    def is_sub_collection(self):
+        """Return True if this collection is a sub-collection of a document in another collection."""
+        return self.parent is not None
+
+    def to_collection_ref(self):
+        """Return this collection group as a collection ref."""
+        return CollectionRef(
+            self._documents,
+            self.name,
+            DocumentRef(
+                self._documents,
+                self.parent,
+                NIL_DOC_ID,
+            ),
+        )
+
+    @staticmethod
+    def from_collection_ref(collectionRef: CollectionRef, documents: Documents) -> CollectionGroupRef:
+        """Return a collection ref as a collection group."""
+        if collectionRef.parent is not None:
+            return CollectionGroupRef(
+                documents,
+                collectionRef.name,
+                CollectionGroupRef.from_collection_ref(
+                    collectionRef.parent,
+                    documents,
+                ),
+            )
+
+
+class Operator(Enum):
+    """Valid query expression operators."""
+
+    less_than = "<"
+    greater_than = ">"
+    less_than_or_equal = "<="
+    greater_than_or_equal = ">="
+    equals = "=="
+    starts_with = "startsWith"
+
+
+class _ExpressionBuilder:
+    """Builder for creating query expressions using magic methods."""
+
+    def __init__(self, operand):
+        self._operand = operand
+
+    def __eq__(self, other) -> Expression:
+        return Expression(self._operand, Operator.equals, other)
+
+    def __lt__(self, other) -> Expression:
+        return Expression(self._operand, Operator.less_than, other)
+
+    def __le__(self, other) -> Expression:
+        return Expression(self._operand, Operator.less_than_or_equal, other)
+
+    def __gt__(self, other) -> Expression:
+        return Expression(self._operand, Operator.greater_than, other)
+
+    def __ge__(self, other) -> Expression:
+        return Expression(self._operand, Operator.greater_than_or_equal, other)
+
+    def eq(self, other) -> Expression:
+        return self == other
+
+    def lt(self, other) -> Expression:
+        return self < other
+
+    def le(self, other) -> Expression:
+        return self <= other
+
+    def gt(self, other) -> Expression:
+        return self > other
+
+    def ge(self, other) -> Expression:
+        return self >= other
+
+    def starts_with(self, match) -> Expression:
+        return Expression(self._operand, Operator.starts_with, match)
+
+
+def condition(name: str) -> _ExpressionBuilder:
+    """
+    Construct a query expressions builder, for convenience.
+
+    Expression builders in turn provides magic methods for constructing expressions.
+
+    e.g. prop('first_name') == 'john' is equivalent to Expression('first_name, '=', 'john')
+
+    Supported operations are ==, <, >, <=, >=, .starts_with()
+    """
+    return _ExpressionBuilder(operand=name)
+
+
+@dataclass(order=True)
+class Expression:
+    """Query expressions, representing a boolean operation used for query filters."""
+
+    operand: str
+    operator: Union[Operator, str]
+    value: Union[str, int, float, bool]
+
+    def __post_init__(self):
+        if isinstance(self.operator, str):
+            # Convert string operators to their enum values
+            self.operator = Operator(self.operator)
+
+    def _value_to_expression_value(self):
+        """Return an ExpressionValue message representation of the value of this expression."""
+        if isinstance(self.value, str):
+            return ExpressionValue(string_value=self.value)
+        # Check bool before numbers, because booleans are numbers.
+        if isinstance(self.value, bool):
+            return ExpressionValue(bool_value=self.value)
+        if isinstance(self.value, int):
+            return ExpressionValue(int_value=self.value)
+        if isinstance(self.value, float):
+            return ExpressionValue(double_value=self.value)
+
+    def _to_wire(self):
+        """Return the Expression protobuf message representation of this expression."""
+        return ExpressionMessage(
+            operand=self.operand,
+            operator=self.operator.value,
+            value=self._value_to_expression_value(),
+        )
+
+    def __str__(self):
+        return "{0} {1} {2}".format(self.operand, self.operator.name, self.value)
+
+
+@dataclass(frozen=True, order=True)
+class Document:
+    """Represents a document and any associated metadata."""
+
+    _ref: DocumentRef
+    content: dict
+
+    @property
+    def id(self):
+        """Return the document's unique id."""
+        return self._ref.id
+
+    @property
+    def collection(self) -> CollectionRef:
+        """Return the CollectionRef for the collection that contains this document."""
+        return self._ref.parent
+
+    @property
+    def ref(self):
+        """Return the DocumentRef for this document."""
+        return self._ref
+
+
+@dataclass(frozen=True, order=True)
+class QueryResultsPage:
+    """Represents a page of results from a query."""
+
+    paging_token: any = field(default_factory=lambda: None)
+    documents: List[Document] = field(default_factory=lambda: [])
+
+    def has_more_pages(self) -> bool:
+        """Return false if the page token is None or empty (both represent no more pages)."""
+        return bool(self.paging_token)
+
+
+class QueryBuilder:
+    """Document query builder for retrieving documents from a collection based on filters."""
+
+    _documents: Documents
+    _collection: CollectionRef
+    _paging_token: Any
+    _limit: int
+    _expressions: List[Expression]
+
+    def __init__(
+        self,
+        documents: Documents,
+        collection: CollectionRef,
+        paging_token: Any = None,
+        limit: int = 0,
+        expressions: List[Expression] = None,
+    ):
+        """Construct a new QueryBuilder."""
+        self._documents = documents
+        self._collection = collection
+        self._paging_token = paging_token
+        self._limit = limit  # default to unlimited.
+        if expressions is None:
+            self._expressions = []
+        else:
+            self._expressions = expressions
+
+    def _flat_expressions(self, expressions) -> List[Expression]:
+        """Process possible inputs for .where() into a flattened list of expressions."""
+        if isinstance(expressions, tuple) and len(expressions) == 3 and isinstance(expressions[0], str):
+            # handle the special case where an expression was passed in as its component arguments.
+            # e.g. .where('age', '<', 30) instead of .where(condition('age') > 30)
+            return [Expression(*expressions)]
+        if isinstance(expressions, Expression):
+            # when a single expression is received, wrap in a list and return it
+            return [expressions]
+        else:
+            # flatten lists of lists into single dimension list of expressions
+            exps = []
+            for exp in expressions:
+                exps = exps + self._flat_expressions(exp)
+            return exps
+
+    def where(
+        self,
+        *expressions: Union[
+            Expression, List[Expression], Union[str, Operator, int, bool, Tuple[str, Union[str, Operator], Any]]
+        ],
+    ) -> QueryBuilder:
+        """
+        Add a filter expression to the query.
+
+        :param expressions: a single expression or a set of expression args or a variadic/tuple/list of expressions.
+
+        Examples
+        --------
+            .where('age', '>', 20)
+            .where(condition('age') > 20)
+            .where(condition('age').gt(20))
+            .where(
+                condition('age') > 20,
+                condition('age') < 50,
+            )
+            .where(
+                [
+                    condition('age') > 20,
+                    condition('age') < 50,
+                ]
+            )
+            .where(
+                ('age', '>', 20),
+                ('age', '<', 50),
+            )
+
+        """
+        for expression in self._flat_expressions(expressions):
+            self._expressions.append(expression)
+        return self
+
+    def page_from(self, token) -> QueryBuilder:
+        """
+        Set the paging token for the query.
+
+        Used when requesting subsequent pages from a query.
+        """
+        self._paging_token = token
+        return self
+
+    def limit(self, limit: int) -> QueryBuilder:
+        """Set the maximum number of results returned by this query."""
+        if limit is None or not isinstance(limit, int) or limit < 0:
+            raise ValueError("limit must be a positive integer or 0 for unlimited.")
+        self._limit = limit
+        return self
+
+    def _expressions_to_wire(self) -> List[ExpressionMessage]:
+        """Return this queries' expressions as a list of their protobuf message representation."""
+        return [expressions._to_wire() for expressions in self._expressions]
+
+    async def stream(self) -> AsyncIterator[Document]:
+        """Return all query results as a stream."""
+        # TODO: add limit, expressions and paging token to query.
+        if self._paging_token is not None:
+            raise ValueError("page_from() should not be used with streamed queries.")
+
+        try:
+            async for result in self._documents._stub.query_stream(
+                collection=_collection_to_wire(self._collection),
+                expressions=self._expressions_to_wire(),
+                limit=self._limit,
+            ):
+                yield _document_from_wire(documents=self._documents, message=result.document)
+        except GRPCError as grpc_err:
+            raise exception_from_grpc_error(grpc_err)
+
+    async def fetch(self) -> QueryResultsPage:
+        """
+        Fetch a single page of results.
+
+        If a page has been fetched previously, a token can be provided via paging_from(), to fetch the subsequent pages.
+        """
+        try:
+            results = await self._documents._stub.query(
+                collection=_collection_to_wire(self._collection),
+                expressions=self._expressions_to_wire(),
+                limit=self._limit,
+                paging_token=self._paging_token,
+            )
+
+            return QueryResultsPage(
+                paging_token=results.paging_token if results.paging_token else None,
+                documents=[
+                    _document_from_wire(documents=self._documents, message=result) for result in results.documents
+                ],
+            )
+        except GRPCError as grpc_err:
+            raise exception_from_grpc_error(grpc_err)
+
+    def __eq__(self, other):
+        return self.__repr__() == other.__repr__()
+
+    def __str__(self):
+        repr_str = "from {0}".format(str(self._collection))
+        if self._paging_token:
+            repr_str += ", paging token {0}".format(str(self._paging_token))
+        if len(self._expressions):
+            repr_str += ", where " + " and ".join([str(exp) for exp in self._expressions])
+        if self._limit != 1:
+            repr_str += ", limit to {0} results".format(self._limit)
+
+        return "Query({0})".format(repr_str)
+
+    def __repr__(self):
+        repr_str = "Documents.collection({0}).query()".format(self._collection)
+        if self._paging_token:
+            repr_str += ".page_from({0})".format(self._paging_token)
+        if len(self._expressions):
+            repr_str += "".join([".where({0})".format(str(exp)) for exp in self._expressions])
+        if self._limit != 1:
+            repr_str += ".limit({0})".format(self._limit)
+
+        return repr_str
+
+
+class Documents(object):
+    """
+    Nitric client for interacting with document collections.
+
+    This client insulates application code from stack specific event operations or SDKs.
+    """
+
+    _stub: DocumentServiceStub
+
+    def __init__(self):
+        """Construct a Nitric Document Client."""
+        self._channel = new_default_channel()
+        self._stub = DocumentServiceStub(channel=self._channel)
+
+    def __del__(self):
+        # close the channel when this client is destroyed
+        if self._channel is not None:
+            self._channel.close()
+
+    def collection(self, name: str) -> CollectionRef:
+        """Return a reference to a document collection."""
+        return CollectionRef(_documents=self, name=name)
+
+
+
+
+
+
+
+

Functions

+
+
+def condition(name: str) ‑> nitric.api.documents._ExpressionBuilder +
+
+

Construct a query expressions builder, for convenience.

+

Expression builders in turn provides magic methods for constructing expressions.

+

e.g. prop('first_name') == 'john' is equivalent to Expression('first_name, '=', 'john')

+

Supported operations are ==, <, >, <=, >=, .starts_with()

+
+ +Expand source code + +
def condition(name: str) -> _ExpressionBuilder:
+    """
+    Construct a query expressions builder, for convenience.
+
+    Expression builders in turn provides magic methods for constructing expressions.
+
+    e.g. prop('first_name') == 'john' is equivalent to Expression('first_name, '=', 'john')
+
+    Supported operations are ==, <, >, <=, >=, .starts_with()
+    """
+    return _ExpressionBuilder(operand=name)
+
+
+
+
+
+

Classes

+
+
+class CollectionDepthException +(*args, **kwargs) +
+
+

The max depth of document sub-collections has been exceeded.

+
+ +Expand source code + +
class CollectionDepthException(Exception):
+    """The max depth of document sub-collections has been exceeded."""
+
+    pass
+
+

Ancestors

+
    +
  • builtins.Exception
  • +
  • builtins.BaseException
  • +
+
+
+class CollectionGroupRef +(_documents: Documents, name: str, parent: Union[CollectionRef, None] = <factory>) +
+
+

A reference to a collection group.

+
+ +Expand source code + +
class CollectionGroupRef:
+    """A reference to a collection group."""
+
+    _documents: Documents
+    name: str
+    parent: Union[CollectionRef, None] = field(default_factory=lambda: None)
+
+    def query(
+        self,
+        paging_token: Any = None,
+        limit: int = 0,
+        expressions: Union[Expression, List[Expression]] = None,
+    ) -> QueryBuilder:
+        """Return a query builder scoped to this collection."""
+        return QueryBuilder(
+            documents=self._documents,
+            collection=self.to_collection_ref(),
+            paging_token=paging_token,
+            limit=limit,
+            expressions=[expressions] if isinstance(expressions, Expression) else expressions,
+        )
+
+    def sub_collection_depth(self) -> int:
+        """Return the depth of this collection group, which is a count of the parents above this collection."""
+        if not self.is_sub_collection():
+            return 0
+        else:
+            return self.parent.sub_collection_depth() + 1
+
+    def is_sub_collection(self):
+        """Return True if this collection is a sub-collection of a document in another collection."""
+        return self.parent is not None
+
+    def to_collection_ref(self):
+        """Return this collection group as a collection ref."""
+        return CollectionRef(
+            self._documents,
+            self.name,
+            DocumentRef(
+                self._documents,
+                self.parent,
+                NIL_DOC_ID,
+            ),
+        )
+
+    @staticmethod
+    def from_collection_ref(collectionRef: CollectionRef, documents: Documents) -> CollectionGroupRef:
+        """Return a collection ref as a collection group."""
+        if collectionRef.parent is not None:
+            return CollectionGroupRef(
+                documents,
+                collectionRef.name,
+                CollectionGroupRef.from_collection_ref(
+                    collectionRef.parent,
+                    documents,
+                ),
+            )
+
+

Class variables

+
+
var name : str
+
+
+
+
var parent : Union[CollectionRef, NoneType]
+
+
+
+
+

Static methods

+
+
+def from_collection_ref(collectionRef: CollectionRef, documents: Documents) ‑> CollectionGroupRef +
+
+

Return a collection ref as a collection group.

+
+ +Expand source code + +
@staticmethod
+def from_collection_ref(collectionRef: CollectionRef, documents: Documents) -> CollectionGroupRef:
+    """Return a collection ref as a collection group."""
+    if collectionRef.parent is not None:
+        return CollectionGroupRef(
+            documents,
+            collectionRef.name,
+            CollectionGroupRef.from_collection_ref(
+                collectionRef.parent,
+                documents,
+            ),
+        )
+
+
+
+

Methods

+
+
+def is_sub_collection(self) +
+
+

Return True if this collection is a sub-collection of a document in another collection.

+
+ +Expand source code + +
def is_sub_collection(self):
+    """Return True if this collection is a sub-collection of a document in another collection."""
+    return self.parent is not None
+
+
+
+def query(self, paging_token: Any = None, limit: int = 0, expressions: Union[Expression, List[Expression]] = None) ‑> QueryBuilder +
+
+

Return a query builder scoped to this collection.

+
+ +Expand source code + +
def query(
+    self,
+    paging_token: Any = None,
+    limit: int = 0,
+    expressions: Union[Expression, List[Expression]] = None,
+) -> QueryBuilder:
+    """Return a query builder scoped to this collection."""
+    return QueryBuilder(
+        documents=self._documents,
+        collection=self.to_collection_ref(),
+        paging_token=paging_token,
+        limit=limit,
+        expressions=[expressions] if isinstance(expressions, Expression) else expressions,
+    )
+
+
+
+def sub_collection_depth(self) ‑> int +
+
+

Return the depth of this collection group, which is a count of the parents above this collection.

+
+ +Expand source code + +
def sub_collection_depth(self) -> int:
+    """Return the depth of this collection group, which is a count of the parents above this collection."""
+    if not self.is_sub_collection():
+        return 0
+    else:
+        return self.parent.sub_collection_depth() + 1
+
+
+
+def to_collection_ref(self) +
+
+

Return this collection group as a collection ref.

+
+ +Expand source code + +
def to_collection_ref(self):
+    """Return this collection group as a collection ref."""
+    return CollectionRef(
+        self._documents,
+        self.name,
+        DocumentRef(
+            self._documents,
+            self.parent,
+            NIL_DOC_ID,
+        ),
+    )
+
+
+
+
+
+class CollectionRef +(_documents: Documents, name: str, parent: Union[DocumentRef, None] = <factory>) +
+
+

A reference to a collection of documents.

+
+ +Expand source code + +
class CollectionRef:
+    """A reference to a collection of documents."""
+
+    _documents: Documents
+    name: str
+    parent: Union[DocumentRef, None] = field(default_factory=lambda: None)
+
+    def doc(self, doc_id: str) -> DocumentRef:
+        """Return a reference to a document in the collection."""
+        return DocumentRef(_documents=self._documents, parent=self, id=doc_id)
+
+    def collection(self, name: str) -> CollectionGroupRef:
+        """
+        Return a reference to a sub-collection of this document.
+
+        This is currently only supported to one level of depth.
+        e.g. Documents().collection('a').collection('b').doc('c') is valid,
+        Documents().collection('a').doc('b').collection('c').collection('d') is invalid (1 level too deep).
+        """
+        current_depth = self.sub_collection_depth()
+        if current_depth >= MAX_SUB_COLLECTION_DEPTH:
+            # Collection nesting is only supported to a maximum depth.
+            raise CollectionDepthException(
+                f"sub-collections supported to a depth of {MAX_SUB_COLLECTION_DEPTH}, "
+                f"attempted to create new collection with depth {current_depth + 1}"
+            )
+        return CollectionGroupRef(_documents=self._documents, name=name, parent=self)
+
+    def query(
+        self,
+        paging_token: Any = None,
+        limit: int = 0,
+        expressions: Union[Expression, List[Expression]] = None,
+    ) -> QueryBuilder:
+        """Return a query builder scoped to this collection."""
+        return QueryBuilder(
+            documents=self._documents,
+            collection=self,
+            paging_token=paging_token,
+            limit=limit,
+            expressions=[expressions] if isinstance(expressions, Expression) else expressions,
+        )
+
+    def sub_collection_depth(self) -> int:
+        """Return the depth of this collection, which is a count of the parents above this collection."""
+        if not self.is_sub_collection():
+            return 0
+        else:
+            return self.parent.parent.sub_collection_depth() + 1
+
+    def is_sub_collection(self):
+        """Return True if this collection is a sub-collection of a document in another collection."""
+        return self.parent is not None
+
+

Class variables

+
+
var name : str
+
+
+
+
var parent : Union[DocumentRef, NoneType]
+
+
+
+
+

Methods

+
+
+def collection(self, name: str) ‑> CollectionGroupRef +
+
+

Return a reference to a sub-collection of this document.

+

This is currently only supported to one level of depth. +e.g. Documents().collection('a').collection('b').doc('c') is valid, +Documents().collection('a').doc('b').collection('c').collection('d') is invalid (1 level too deep).

+
+ +Expand source code + +
def collection(self, name: str) -> CollectionGroupRef:
+    """
+    Return a reference to a sub-collection of this document.
+
+    This is currently only supported to one level of depth.
+    e.g. Documents().collection('a').collection('b').doc('c') is valid,
+    Documents().collection('a').doc('b').collection('c').collection('d') is invalid (1 level too deep).
+    """
+    current_depth = self.sub_collection_depth()
+    if current_depth >= MAX_SUB_COLLECTION_DEPTH:
+        # Collection nesting is only supported to a maximum depth.
+        raise CollectionDepthException(
+            f"sub-collections supported to a depth of {MAX_SUB_COLLECTION_DEPTH}, "
+            f"attempted to create new collection with depth {current_depth + 1}"
+        )
+    return CollectionGroupRef(_documents=self._documents, name=name, parent=self)
+
+
+
+def doc(self, doc_id: str) ‑> DocumentRef +
+
+

Return a reference to a document in the collection.

+
+ +Expand source code + +
def doc(self, doc_id: str) -> DocumentRef:
+    """Return a reference to a document in the collection."""
+    return DocumentRef(_documents=self._documents, parent=self, id=doc_id)
+
+
+
+def is_sub_collection(self) +
+
+

Return True if this collection is a sub-collection of a document in another collection.

+
+ +Expand source code + +
def is_sub_collection(self):
+    """Return True if this collection is a sub-collection of a document in another collection."""
+    return self.parent is not None
+
+
+
+def query(self, paging_token: Any = None, limit: int = 0, expressions: Union[Expression, List[Expression]] = None) ‑> QueryBuilder +
+
+

Return a query builder scoped to this collection.

+
+ +Expand source code + +
def query(
+    self,
+    paging_token: Any = None,
+    limit: int = 0,
+    expressions: Union[Expression, List[Expression]] = None,
+) -> QueryBuilder:
+    """Return a query builder scoped to this collection."""
+    return QueryBuilder(
+        documents=self._documents,
+        collection=self,
+        paging_token=paging_token,
+        limit=limit,
+        expressions=[expressions] if isinstance(expressions, Expression) else expressions,
+    )
+
+
+
+def sub_collection_depth(self) ‑> int +
+
+

Return the depth of this collection, which is a count of the parents above this collection.

+
+ +Expand source code + +
def sub_collection_depth(self) -> int:
+    """Return the depth of this collection, which is a count of the parents above this collection."""
+    if not self.is_sub_collection():
+        return 0
+    else:
+        return self.parent.parent.sub_collection_depth() + 1
+
+
+
+
+
+class Document +(_ref: DocumentRef, content: dict) +
+
+

Represents a document and any associated metadata.

+
+ +Expand source code + +
class Document:
+    """Represents a document and any associated metadata."""
+
+    _ref: DocumentRef
+    content: dict
+
+    @property
+    def id(self):
+        """Return the document's unique id."""
+        return self._ref.id
+
+    @property
+    def collection(self) -> CollectionRef:
+        """Return the CollectionRef for the collection that contains this document."""
+        return self._ref.parent
+
+    @property
+    def ref(self):
+        """Return the DocumentRef for this document."""
+        return self._ref
+
+

Class variables

+
+
var content : dict
+
+
+
+
+

Instance variables

+
+
var collectionCollectionRef
+
+

Return the CollectionRef for the collection that contains this document.

+
+ +Expand source code + +
@property
+def collection(self) -> CollectionRef:
+    """Return the CollectionRef for the collection that contains this document."""
+    return self._ref.parent
+
+
+
var id
+
+

Return the document's unique id.

+
+ +Expand source code + +
@property
+def id(self):
+    """Return the document's unique id."""
+    return self._ref.id
+
+
+
var ref
+
+

Return the DocumentRef for this document.

+
+ +Expand source code + +
@property
+def ref(self):
+    """Return the DocumentRef for this document."""
+    return self._ref
+
+
+
+
+
+class DocumentRef +(_documents: Documents, parent: CollectionRef, id: str) +
+
+

A reference to a document in a collection.

+
+ +Expand source code + +
class DocumentRef:
+    """A reference to a document in a collection."""
+
+    _documents: Documents
+    parent: CollectionRef
+    id: str
+
+    def collection(self, name: str) -> CollectionRef:
+        """
+        Return a reference to a sub-collection of this document.
+
+        This is currently only supported to one level of depth.
+        e.g. Documents().collection('a').doc('b').collection('c').doc('d') is valid,
+        Documents().collection('a').doc('b').collection('c').doc('d').collection('e') is invalid (1 level too deep).
+        """
+        current_depth = self.parent.sub_collection_depth()
+        if current_depth >= MAX_SUB_COLLECTION_DEPTH:
+            # Collection nesting is only supported to a maximum depth.
+            raise CollectionDepthException(
+                f"sub-collections supported to a depth of {MAX_SUB_COLLECTION_DEPTH}, "
+                f"attempted to create new collection with depth {current_depth + 1}"
+            )
+        return CollectionRef(_documents=self._documents, name=name, parent=self)
+
+    async def get(self) -> Document:
+        """Retrieve the contents of this document, if it exists."""
+        try:
+            response = await self._documents._stub.get(key=_doc_ref_to_wire(self))
+            return _document_from_wire(documents=self._documents, message=response.document)
+        except GRPCError as grpc_err:
+            raise exception_from_grpc_error(grpc_err)
+
+    async def set(self, content: dict):
+        """
+        Set the contents of this document.
+
+        If the document exists it will be updated, otherwise a new document will be created.
+        """
+        try:
+            await self._documents._stub.set(
+                key=_doc_ref_to_wire(self),
+                content=_struct_from_dict(content),
+            )
+        except GRPCError as grpc_err:
+            raise exception_from_grpc_error(grpc_err)
+
+    async def delete(self):
+        """Delete this document, if it exists."""
+        try:
+            await self._documents._stub.delete(
+                key=_doc_ref_to_wire(self),
+            )
+        except GRPCError as grpc_err:
+            raise exception_from_grpc_error(grpc_err)
+
+

Class variables

+
+
var id : str
+
+
+
+
var parentCollectionRef
+
+
+
+
+

Methods

+
+
+def collection(self, name: str) ‑> CollectionRef +
+
+

Return a reference to a sub-collection of this document.

+

This is currently only supported to one level of depth. +e.g. Documents().collection('a').doc('b').collection('c').doc('d') is valid, +Documents().collection('a').doc('b').collection('c').doc('d').collection('e') is invalid (1 level too deep).

+
+ +Expand source code + +
def collection(self, name: str) -> CollectionRef:
+    """
+    Return a reference to a sub-collection of this document.
+
+    This is currently only supported to one level of depth.
+    e.g. Documents().collection('a').doc('b').collection('c').doc('d') is valid,
+    Documents().collection('a').doc('b').collection('c').doc('d').collection('e') is invalid (1 level too deep).
+    """
+    current_depth = self.parent.sub_collection_depth()
+    if current_depth >= MAX_SUB_COLLECTION_DEPTH:
+        # Collection nesting is only supported to a maximum depth.
+        raise CollectionDepthException(
+            f"sub-collections supported to a depth of {MAX_SUB_COLLECTION_DEPTH}, "
+            f"attempted to create new collection with depth {current_depth + 1}"
+        )
+    return CollectionRef(_documents=self._documents, name=name, parent=self)
+
+
+
+async def delete(self) +
+
+

Delete this document, if it exists.

+
+ +Expand source code + +
async def delete(self):
+    """Delete this document, if it exists."""
+    try:
+        await self._documents._stub.delete(
+            key=_doc_ref_to_wire(self),
+        )
+    except GRPCError as grpc_err:
+        raise exception_from_grpc_error(grpc_err)
+
+
+
+async def get(self) ‑> Document +
+
+

Retrieve the contents of this document, if it exists.

+
+ +Expand source code + +
async def get(self) -> Document:
+    """Retrieve the contents of this document, if it exists."""
+    try:
+        response = await self._documents._stub.get(key=_doc_ref_to_wire(self))
+        return _document_from_wire(documents=self._documents, message=response.document)
+    except GRPCError as grpc_err:
+        raise exception_from_grpc_error(grpc_err)
+
+
+
+async def set(self, content: dict) +
+
+

Set the contents of this document.

+

If the document exists it will be updated, otherwise a new document will be created.

+
+ +Expand source code + +
async def set(self, content: dict):
+    """
+    Set the contents of this document.
+
+    If the document exists it will be updated, otherwise a new document will be created.
+    """
+    try:
+        await self._documents._stub.set(
+            key=_doc_ref_to_wire(self),
+            content=_struct_from_dict(content),
+        )
+    except GRPCError as grpc_err:
+        raise exception_from_grpc_error(grpc_err)
+
+
+
+
+
+class Documents +
+
+

Nitric client for interacting with document collections.

+

This client insulates application code from stack specific event operations or SDKs.

+

Construct a Nitric Document Client.

+
+ +Expand source code + +
class Documents(object):
+    """
+    Nitric client for interacting with document collections.
+
+    This client insulates application code from stack specific event operations or SDKs.
+    """
+
+    _stub: DocumentServiceStub
+
+    def __init__(self):
+        """Construct a Nitric Document Client."""
+        self._channel = new_default_channel()
+        self._stub = DocumentServiceStub(channel=self._channel)
+
+    def __del__(self):
+        # close the channel when this client is destroyed
+        if self._channel is not None:
+            self._channel.close()
+
+    def collection(self, name: str) -> CollectionRef:
+        """Return a reference to a document collection."""
+        return CollectionRef(_documents=self, name=name)
+
+

Methods

+
+
+def collection(self, name: str) ‑> CollectionRef +
+
+

Return a reference to a document collection.

+
+ +Expand source code + +
def collection(self, name: str) -> CollectionRef:
+    """Return a reference to a document collection."""
+    return CollectionRef(_documents=self, name=name)
+
+
+
+
+
+class Expression +(operand: str, operator: Union[Operator, str], value: Union[str, int, float, bool]) +
+
+

Query expressions, representing a boolean operation used for query filters.

+
+ +Expand source code + +
class Expression:
+    """Query expressions, representing a boolean operation used for query filters."""
+
+    operand: str
+    operator: Union[Operator, str]
+    value: Union[str, int, float, bool]
+
+    def __post_init__(self):
+        if isinstance(self.operator, str):
+            # Convert string operators to their enum values
+            self.operator = Operator(self.operator)
+
+    def _value_to_expression_value(self):
+        """Return an ExpressionValue message representation of the value of this expression."""
+        if isinstance(self.value, str):
+            return ExpressionValue(string_value=self.value)
+        # Check bool before numbers, because booleans are numbers.
+        if isinstance(self.value, bool):
+            return ExpressionValue(bool_value=self.value)
+        if isinstance(self.value, int):
+            return ExpressionValue(int_value=self.value)
+        if isinstance(self.value, float):
+            return ExpressionValue(double_value=self.value)
+
+    def _to_wire(self):
+        """Return the Expression protobuf message representation of this expression."""
+        return ExpressionMessage(
+            operand=self.operand,
+            operator=self.operator.value,
+            value=self._value_to_expression_value(),
+        )
+
+    def __str__(self):
+        return "{0} {1} {2}".format(self.operand, self.operator.name, self.value)
+
+

Class variables

+
+
var operand : str
+
+
+
+
var operator : Union[Operator, str]
+
+
+
+
var value : Union[str, int, float, bool]
+
+
+
+
+
+
+class Operator +(value, names=None, *, module=None, qualname=None, type=None, start=1) +
+
+

Valid query expression operators.

+
+ +Expand source code + +
class Operator(Enum):
+    """Valid query expression operators."""
+
+    less_than = "<"
+    greater_than = ">"
+    less_than_or_equal = "<="
+    greater_than_or_equal = ">="
+    equals = "=="
+    starts_with = "startsWith"
+
+

Ancestors

+
    +
  • enum.Enum
  • +
+

Class variables

+
+
var equals
+
+
+
+
var greater_than
+
+
+
+
var greater_than_or_equal
+
+
+
+
var less_than
+
+
+
+
var less_than_or_equal
+
+
+
+
var starts_with
+
+
+
+
+
+
+class QueryBuilder +(documents: Documents, collection: CollectionRef, paging_token: Any = None, limit: int = 0, expressions: List[Expression] = None) +
+
+

Document query builder for retrieving documents from a collection based on filters.

+

Construct a new QueryBuilder.

+
+ +Expand source code + +
class QueryBuilder:
+    """Document query builder for retrieving documents from a collection based on filters."""
+
+    _documents: Documents
+    _collection: CollectionRef
+    _paging_token: Any
+    _limit: int
+    _expressions: List[Expression]
+
+    def __init__(
+        self,
+        documents: Documents,
+        collection: CollectionRef,
+        paging_token: Any = None,
+        limit: int = 0,
+        expressions: List[Expression] = None,
+    ):
+        """Construct a new QueryBuilder."""
+        self._documents = documents
+        self._collection = collection
+        self._paging_token = paging_token
+        self._limit = limit  # default to unlimited.
+        if expressions is None:
+            self._expressions = []
+        else:
+            self._expressions = expressions
+
+    def _flat_expressions(self, expressions) -> List[Expression]:
+        """Process possible inputs for .where() into a flattened list of expressions."""
+        if isinstance(expressions, tuple) and len(expressions) == 3 and isinstance(expressions[0], str):
+            # handle the special case where an expression was passed in as its component arguments.
+            # e.g. .where('age', '<', 30) instead of .where(condition('age') > 30)
+            return [Expression(*expressions)]
+        if isinstance(expressions, Expression):
+            # when a single expression is received, wrap in a list and return it
+            return [expressions]
+        else:
+            # flatten lists of lists into single dimension list of expressions
+            exps = []
+            for exp in expressions:
+                exps = exps + self._flat_expressions(exp)
+            return exps
+
+    def where(
+        self,
+        *expressions: Union[
+            Expression, List[Expression], Union[str, Operator, int, bool, Tuple[str, Union[str, Operator], Any]]
+        ],
+    ) -> QueryBuilder:
+        """
+        Add a filter expression to the query.
+
+        :param expressions: a single expression or a set of expression args or a variadic/tuple/list of expressions.
+
+        Examples
+        --------
+            .where('age', '>', 20)
+            .where(condition('age') > 20)
+            .where(condition('age').gt(20))
+            .where(
+                condition('age') > 20,
+                condition('age') < 50,
+            )
+            .where(
+                [
+                    condition('age') > 20,
+                    condition('age') < 50,
+                ]
+            )
+            .where(
+                ('age', '>', 20),
+                ('age', '<', 50),
+            )
+
+        """
+        for expression in self._flat_expressions(expressions):
+            self._expressions.append(expression)
+        return self
+
+    def page_from(self, token) -> QueryBuilder:
+        """
+        Set the paging token for the query.
+
+        Used when requesting subsequent pages from a query.
+        """
+        self._paging_token = token
+        return self
+
+    def limit(self, limit: int) -> QueryBuilder:
+        """Set the maximum number of results returned by this query."""
+        if limit is None or not isinstance(limit, int) or limit < 0:
+            raise ValueError("limit must be a positive integer or 0 for unlimited.")
+        self._limit = limit
+        return self
+
+    def _expressions_to_wire(self) -> List[ExpressionMessage]:
+        """Return this queries' expressions as a list of their protobuf message representation."""
+        return [expressions._to_wire() for expressions in self._expressions]
+
+    async def stream(self) -> AsyncIterator[Document]:
+        """Return all query results as a stream."""
+        # TODO: add limit, expressions and paging token to query.
+        if self._paging_token is not None:
+            raise ValueError("page_from() should not be used with streamed queries.")
+
+        try:
+            async for result in self._documents._stub.query_stream(
+                collection=_collection_to_wire(self._collection),
+                expressions=self._expressions_to_wire(),
+                limit=self._limit,
+            ):
+                yield _document_from_wire(documents=self._documents, message=result.document)
+        except GRPCError as grpc_err:
+            raise exception_from_grpc_error(grpc_err)
+
+    async def fetch(self) -> QueryResultsPage:
+        """
+        Fetch a single page of results.
+
+        If a page has been fetched previously, a token can be provided via paging_from(), to fetch the subsequent pages.
+        """
+        try:
+            results = await self._documents._stub.query(
+                collection=_collection_to_wire(self._collection),
+                expressions=self._expressions_to_wire(),
+                limit=self._limit,
+                paging_token=self._paging_token,
+            )
+
+            return QueryResultsPage(
+                paging_token=results.paging_token if results.paging_token else None,
+                documents=[
+                    _document_from_wire(documents=self._documents, message=result) for result in results.documents
+                ],
+            )
+        except GRPCError as grpc_err:
+            raise exception_from_grpc_error(grpc_err)
+
+    def __eq__(self, other):
+        return self.__repr__() == other.__repr__()
+
+    def __str__(self):
+        repr_str = "from {0}".format(str(self._collection))
+        if self._paging_token:
+            repr_str += ", paging token {0}".format(str(self._paging_token))
+        if len(self._expressions):
+            repr_str += ", where " + " and ".join([str(exp) for exp in self._expressions])
+        if self._limit != 1:
+            repr_str += ", limit to {0} results".format(self._limit)
+
+        return "Query({0})".format(repr_str)
+
+    def __repr__(self):
+        repr_str = "Documents.collection({0}).query()".format(self._collection)
+        if self._paging_token:
+            repr_str += ".page_from({0})".format(self._paging_token)
+        if len(self._expressions):
+            repr_str += "".join([".where({0})".format(str(exp)) for exp in self._expressions])
+        if self._limit != 1:
+            repr_str += ".limit({0})".format(self._limit)
+
+        return repr_str
+
+

Methods

+
+
+async def fetch(self) ‑> QueryResultsPage +
+
+

Fetch a single page of results.

+

If a page has been fetched previously, a token can be provided via paging_from(), to fetch the subsequent pages.

+
+ +Expand source code + +
async def fetch(self) -> QueryResultsPage:
+    """
+    Fetch a single page of results.
+
+    If a page has been fetched previously, a token can be provided via paging_from(), to fetch the subsequent pages.
+    """
+    try:
+        results = await self._documents._stub.query(
+            collection=_collection_to_wire(self._collection),
+            expressions=self._expressions_to_wire(),
+            limit=self._limit,
+            paging_token=self._paging_token,
+        )
+
+        return QueryResultsPage(
+            paging_token=results.paging_token if results.paging_token else None,
+            documents=[
+                _document_from_wire(documents=self._documents, message=result) for result in results.documents
+            ],
+        )
+    except GRPCError as grpc_err:
+        raise exception_from_grpc_error(grpc_err)
+
+
+
+def limit(self, limit: int) ‑> QueryBuilder +
+
+

Set the maximum number of results returned by this query.

+
+ +Expand source code + +
def limit(self, limit: int) -> QueryBuilder:
+    """Set the maximum number of results returned by this query."""
+    if limit is None or not isinstance(limit, int) or limit < 0:
+        raise ValueError("limit must be a positive integer or 0 for unlimited.")
+    self._limit = limit
+    return self
+
+
+
+def page_from(self, token) ‑> QueryBuilder +
+
+

Set the paging token for the query.

+

Used when requesting subsequent pages from a query.

+
+ +Expand source code + +
def page_from(self, token) -> QueryBuilder:
+    """
+    Set the paging token for the query.
+
+    Used when requesting subsequent pages from a query.
+    """
+    self._paging_token = token
+    return self
+
+
+
+async def stream(self) ‑> AsyncIterator[Document] +
+
+

Return all query results as a stream.

+
+ +Expand source code + +
async def stream(self) -> AsyncIterator[Document]:
+    """Return all query results as a stream."""
+    # TODO: add limit, expressions and paging token to query.
+    if self._paging_token is not None:
+        raise ValueError("page_from() should not be used with streamed queries.")
+
+    try:
+        async for result in self._documents._stub.query_stream(
+            collection=_collection_to_wire(self._collection),
+            expressions=self._expressions_to_wire(),
+            limit=self._limit,
+        ):
+            yield _document_from_wire(documents=self._documents, message=result.document)
+    except GRPCError as grpc_err:
+        raise exception_from_grpc_error(grpc_err)
+
+
+
+def where(self, *expressions: Union[Expression, List[Expression], Union[str, Operator, int, bool, Tuple[str, Union[str, Operator], Any]]]) ‑> QueryBuilder +
+
+

Add a filter expression to the query.

+

:param expressions: a single expression or a set of expression args or a variadic/tuple/list of expressions.

+

Examples

+
.where('age', '>', 20)
+.where(condition('age') > 20)
+.where(condition('age').gt(20))
+.where(
+    condition('age') > 20,
+    condition('age') < 50,
+)
+.where(
+    [
+        condition('age') > 20,
+        condition('age') < 50,
+    ]
+)
+.where(
+    ('age', '>', 20),
+    ('age', '<', 50),
+)
+
+
+ +Expand source code + +
def where(
+    self,
+    *expressions: Union[
+        Expression, List[Expression], Union[str, Operator, int, bool, Tuple[str, Union[str, Operator], Any]]
+    ],
+) -> QueryBuilder:
+    """
+    Add a filter expression to the query.
+
+    :param expressions: a single expression or a set of expression args or a variadic/tuple/list of expressions.
+
+    Examples
+    --------
+        .where('age', '>', 20)
+        .where(condition('age') > 20)
+        .where(condition('age').gt(20))
+        .where(
+            condition('age') > 20,
+            condition('age') < 50,
+        )
+        .where(
+            [
+                condition('age') > 20,
+                condition('age') < 50,
+            ]
+        )
+        .where(
+            ('age', '>', 20),
+            ('age', '<', 50),
+        )
+
+    """
+    for expression in self._flat_expressions(expressions):
+        self._expressions.append(expression)
+    return self
+
+
+
+
+
+class QueryResultsPage +(paging_token: any = <factory>, documents: List[Document] = <factory>) +
+
+

Represents a page of results from a query.

+
+ +Expand source code + +
class QueryResultsPage:
+    """Represents a page of results from a query."""
+
+    paging_token: any = field(default_factory=lambda: None)
+    documents: List[Document] = field(default_factory=lambda: [])
+
+    def has_more_pages(self) -> bool:
+        """Return false if the page token is None or empty (both represent no more pages)."""
+        return bool(self.paging_token)
+
+

Class variables

+
+
var documents : List[Document]
+
+
+
+
var paging_token
+
+
+
+
+

Methods

+
+
+def has_more_pages(self) ‑> bool +
+
+

Return false if the page token is None or empty (both represent no more pages).

+
+ +Expand source code + +
def has_more_pages(self) -> bool:
+    """Return false if the page token is None or empty (both represent no more pages)."""
+    return bool(self.paging_token)
+
+
+
+
+
+
+
+ +
+ + + \ No newline at end of file diff --git a/docs/nitric/api/event.html b/docs/nitric/api/event.html deleted file mode 100644 index 9a64413..0000000 --- a/docs/nitric/api/event.html +++ /dev/null @@ -1,355 +0,0 @@ - - - - - - -nitric.api.event API documentation - - - - - - - - - - - -
-
-
-

Module nitric.api.event

-
-
-
- -Expand source code - -
#
-# Copyright (c) 2021 Nitric Technologies Pty Ltd.
-#
-# This file is part of Nitric Python 3 SDK.
-# See https://github.com/nitrictech/python-sdk for further info.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-from typing import List
-
-from nitric.proto import event as event_model, event
-from nitric.proto import event_service
-from nitric.proto.event.v1.event_pb2 import NitricEvent
-from nitric.api._base_client import BaseClient
-from google.protobuf.struct_pb2 import Struct
-
-from nitric.api.models import Topic
-
-
-class EventClient(BaseClient):
-    """
-    Nitric generic publish/subscribe event client.
-
-    This client insulates application code from stack specific event operations or SDKs.
-    """
-
-    def __init__(self):
-        """Construct a Nitric Event Client."""
-        super(self.__class__, self).__init__()
-        self._stub = event_service.EventStub(self._channel)
-
-    def publish(
-        self,
-        topic_name: str,
-        payload: dict = None,
-        payload_type: str = "",
-        event_id: str = None,
-    ) -> str:
-        """
-        Publish an event/message to a topic, which can be subscribed to by other services.
-
-        :param topic_name: the name of the topic to publish to
-        :param payload: content of the message to send
-        :param payload_type: fully qualified name of the event payload type, e.g. io.nitric.example.customer.created
-        :param event_id: a unique id, used to ensure idempotent processing of events. Defaults to a version 4 UUID.
-        :return: the request id on successful publish
-        """
-        if payload is None:
-            payload = {}
-        payload_struct = Struct()
-        payload_struct.update(payload)
-        nitric_event = NitricEvent(id=event_id, payload_type=payload_type, payload=payload_struct)
-        request = event_model.EventPublishRequest(topic=topic_name, event=nitric_event)
-        self._exec("Publish", request)
-        return event_id
-
-
-class TopicClient(BaseClient):
-    """
-    Nitric generic event topic client.
-
-    This client insulates application code from stack specific topic operations or SDKs.
-    """
-
-    def __init__(self):
-        """Construct a Nitric Topic Client."""
-        super(self.__class__, self).__init__()
-        self._stub = event_service.TopicStub(self._channel)
-
-    def get_topics(self) -> List[Topic]:
-        """Get a list of topics available for publishing or subscription."""
-        response: event.TopicListResponse = self._exec("List")
-        topics = [Topic(name=topic.name) for topic in response.topics]
-        return topics
-
-
-
-
-
-
-
-
-
-

Classes

-
-
-class EventClient -
-
-

Nitric generic publish/subscribe event client.

-

This client insulates application code from stack specific event operations or SDKs.

-

Construct a Nitric Event Client.

-
- -Expand source code - -
class EventClient(BaseClient):
-    """
-    Nitric generic publish/subscribe event client.
-
-    This client insulates application code from stack specific event operations or SDKs.
-    """
-
-    def __init__(self):
-        """Construct a Nitric Event Client."""
-        super(self.__class__, self).__init__()
-        self._stub = event_service.EventStub(self._channel)
-
-    def publish(
-        self,
-        topic_name: str,
-        payload: dict = None,
-        payload_type: str = "",
-        event_id: str = None,
-    ) -> str:
-        """
-        Publish an event/message to a topic, which can be subscribed to by other services.
-
-        :param topic_name: the name of the topic to publish to
-        :param payload: content of the message to send
-        :param payload_type: fully qualified name of the event payload type, e.g. io.nitric.example.customer.created
-        :param event_id: a unique id, used to ensure idempotent processing of events. Defaults to a version 4 UUID.
-        :return: the request id on successful publish
-        """
-        if payload is None:
-            payload = {}
-        payload_struct = Struct()
-        payload_struct.update(payload)
-        nitric_event = NitricEvent(id=event_id, payload_type=payload_type, payload=payload_struct)
-        request = event_model.EventPublishRequest(topic=topic_name, event=nitric_event)
-        self._exec("Publish", request)
-        return event_id
-
-

Ancestors

-
    -
  • nitric.api._base_client.BaseClient
  • -
  • abc.ABC
  • -
-

Methods

-
-
-def publish(self, topic_name: str, payload: dict = None, payload_type: str = '', event_id: str = None) ‑> str -
-
-

Publish an event/message to a topic, which can be subscribed to by other services.

-

:param topic_name: the name of the topic to publish to -:param payload: content of the message to send -:param payload_type: fully qualified name of the event payload type, e.g. io.nitric.example.customer.created -:param event_id: a unique id, used to ensure idempotent processing of events. Defaults to a version 4 UUID. -:return: the request id on successful publish

-
- -Expand source code - -
def publish(
-    self,
-    topic_name: str,
-    payload: dict = None,
-    payload_type: str = "",
-    event_id: str = None,
-) -> str:
-    """
-    Publish an event/message to a topic, which can be subscribed to by other services.
-
-    :param topic_name: the name of the topic to publish to
-    :param payload: content of the message to send
-    :param payload_type: fully qualified name of the event payload type, e.g. io.nitric.example.customer.created
-    :param event_id: a unique id, used to ensure idempotent processing of events. Defaults to a version 4 UUID.
-    :return: the request id on successful publish
-    """
-    if payload is None:
-        payload = {}
-    payload_struct = Struct()
-    payload_struct.update(payload)
-    nitric_event = NitricEvent(id=event_id, payload_type=payload_type, payload=payload_struct)
-    request = event_model.EventPublishRequest(topic=topic_name, event=nitric_event)
-    self._exec("Publish", request)
-    return event_id
-
-
-
-
-
-class NitricEvent -(*args, **kwargs) -
-
-

A ProtocolMessage

-

Ancestors

-
    -
  • google.protobuf.pyext._message.CMessage
  • -
  • google.protobuf.message.Message
  • -
-

Class variables

-
-
var DESCRIPTOR
-
-
-
-
-

Instance variables

-
-
var id
-
-

Field nitric.event.v1.NitricEvent.id

-
-
var payload
-
-

Field nitric.event.v1.NitricEvent.payload

-
-
var payload_type
-
-

Field nitric.event.v1.NitricEvent.payload_type

-
-
-
-
-class TopicClient -
-
-

Nitric generic event topic client.

-

This client insulates application code from stack specific topic operations or SDKs.

-

Construct a Nitric Topic Client.

-
- -Expand source code - -
class TopicClient(BaseClient):
-    """
-    Nitric generic event topic client.
-
-    This client insulates application code from stack specific topic operations or SDKs.
-    """
-
-    def __init__(self):
-        """Construct a Nitric Topic Client."""
-        super(self.__class__, self).__init__()
-        self._stub = event_service.TopicStub(self._channel)
-
-    def get_topics(self) -> List[Topic]:
-        """Get a list of topics available for publishing or subscription."""
-        response: event.TopicListResponse = self._exec("List")
-        topics = [Topic(name=topic.name) for topic in response.topics]
-        return topics
-
-

Ancestors

-
    -
  • nitric.api._base_client.BaseClient
  • -
  • abc.ABC
  • -
-

Methods

-
-
-def get_topics(self) ‑> List[Topic] -
-
-

Get a list of topics available for publishing or subscription.

-
- -Expand source code - -
def get_topics(self) -> List[Topic]:
-    """Get a list of topics available for publishing or subscription."""
-    response: event.TopicListResponse = self._exec("List")
-    topics = [Topic(name=topic.name) for topic in response.topics]
-    return topics
-
-
-
-
-
-
-
- -
- - - \ No newline at end of file diff --git a/docs/nitric/api/events.html b/docs/nitric/api/events.html index b1af7bd..ea153c2 100644 --- a/docs/nitric/api/events.html +++ b/docs/nitric/api/events.html @@ -44,15 +44,21 @@

Module nitric.api.events

# See the License for the specific language governing permissions and # limitations under the License. # +from __future__ import annotations + from typing import List, Union + +from grpclib import GRPCError + +from nitric.api.exception import exception_from_grpc_error from nitric.utils import new_default_channel, _struct_from_dict -from nitric.proto.nitric.event.v1 import EventStub, NitricEvent, TopicStub +from nitricapi.nitric.event.v1 import EventServiceStub, NitricEvent, TopicServiceStub from dataclasses import dataclass, field @dataclass(frozen=True, order=True) class Event(object): - """Represents a NitricEvent.""" + """Eventing client, providing access to Topic and Event references and operations on those entities.""" payload: dict = field(default_factory=dict) id: str = field(default=None) @@ -71,7 +77,7 @@

Module nitric.api.events

class Topic(object): """A reference to a topic on an event service, used to perform operations on that topic.""" - _stub: EventStub + _events: Events name: str async def publish( @@ -91,11 +97,14 @@

Module nitric.api.events

# TODO: handle events that are just a payload event = Event(**event) - response = await self._stub.publish(topic=self.name, event=_event_to_wire(event)) - return Event(**{**event.__dict__.copy(), **{"id": response.id}}) + try: + response = await self._events._stub.publish(topic=self.name, event=_event_to_wire(event)) + return Event(**{**event.__dict__.copy(), **{"id": response.id}}) + except GRPCError as grpc_err: + raise exception_from_grpc_error(grpc_err) -class EventClient(object): +class Events(object): """ Nitric generic publish/subscribe event client. @@ -104,18 +113,26 @@

Module nitric.api.events

def __init__(self): """Construct a Nitric Event Client.""" - channel = new_default_channel() - self._stub = EventStub(channel=channel) - self._topic_stub = TopicStub(channel=channel) + self.channel = new_default_channel() + self._stub = EventServiceStub(channel=self.channel) + self._topic_stub = TopicServiceStub(channel=self.channel) + + def __del__(self): + # close the channel when this client is destroyed + if self.channel is not None: + self.channel.close() async def topics(self) -> List[Topic]: """Get a list of topics available for publishing or subscription.""" - response = await self._topic_stub.list() - return [self.topic(topic.name) for topic in response.topics] + try: + response = await self._topic_stub.list() + return [self.topic(topic.name) for topic in response.topics] + except GRPCError as grpc_err: + raise exception_from_grpc_error(grpc_err) def topic(self, name: str) -> Topic: - """Return a reference a topic from the connected event service.""" - return Topic(_stub=self._stub, name=name) + """Return a reference to a topic.""" + return Topic(_events=self, name=name)
@@ -132,13 +149,13 @@

Classes

(payload: dict = <factory>, id: str = None, payload_type: str = None)
-

Represents a NitricEvent.

+

Eventing client, providing access to Topic and Event references and operations on those entities.

Expand source code
class Event(object):
-    """Represents a NitricEvent."""
+    """Eventing client, providing access to Topic and Event references and operations on those entities."""
 
     payload: dict = field(default_factory=dict)
     id: str = field(default=None)
@@ -160,8 +177,8 @@ 

Class variables

-
-class EventClient +
+class Events

Nitric generic publish/subscribe event client.

@@ -171,11 +188,7 @@

Class variables

Expand source code -<<<<<<< refs/remotes/origin/main:docs/nitric/api/models.html -
class FailedTask(Task):
-    """Represents a failed queue publish for an event."""
-=======
-
class EventClient(object):
+
class Events(object):
     """
     Nitric generic publish/subscribe event client.
 
@@ -184,37 +197,44 @@ 

Class variables

def __init__(self): """Construct a Nitric Event Client.""" - channel = new_default_channel() - self._stub = EventStub(channel=channel) - self._topic_stub = TopicStub(channel=channel) ->>>>>>> feat: port faas.start to bi-di streaming with membrane:docs/nitric/api/events.html + self.channel = new_default_channel() + self._stub = EventServiceStub(channel=self.channel) + self._topic_stub = TopicServiceStub(channel=self.channel) + + def __del__(self): + # close the channel when this client is destroyed + if self.channel is not None: + self.channel.close() async def topics(self) -> List[Topic]: """Get a list of topics available for publishing or subscription.""" - response = await self._topic_stub.list() - return [self.topic(topic.name) for topic in response.topics] + try: + response = await self._topic_stub.list() + return [self.topic(topic.name) for topic in response.topics] + except GRPCError as grpc_err: + raise exception_from_grpc_error(grpc_err) def topic(self, name: str) -> Topic: - """Return a reference a topic from the connected event service.""" - return Topic(_stub=self._stub, name=name)
+ """Return a reference to a topic.""" + return Topic(_events=self, name=name)

Methods

-
+
def topic(self, name: str) ‑> Topic
-

Return a reference a topic from the connected event service.

+

Return a reference to a topic.

Expand source code
def topic(self, name: str) -> Topic:
-    """Return a reference a topic from the connected event service."""
-    return Topic(_stub=self._stub, name=name)
+ """Return a reference to a topic.""" + return Topic(_events=self, name=name)
-
+
async def topics(self) ‑> List[Topic]
@@ -225,15 +245,18 @@

Methods

async def topics(self) -> List[Topic]:
     """Get a list of topics available for publishing or subscription."""
-    response = await self._topic_stub.list()
-    return [self.topic(topic.name) for topic in response.topics]
+ try: + response = await self._topic_stub.list() + return [self.topic(topic.name) for topic in response.topics] + except GRPCError as grpc_err: + raise exception_from_grpc_error(grpc_err)
class Topic -(_stub: EventStub, name: str) +(_events: Events, name: str)

A reference to a topic on an event service, used to perform operations on that topic.

@@ -241,14 +264,10 @@

Methods

Expand source code -<<<<<<< refs/remotes/origin/main:docs/nitric/api/models.html -
class Task(object):
-    """Represents a NitricTask."""
-=======
 
class Topic(object):
     """A reference to a topic on an event service, used to perform operations on that topic."""
 
-    _stub: EventStub
+    _events: Events
     name: str
 
     async def publish(
@@ -267,10 +286,12 @@ 

Methods

if isinstance(event, dict): # TODO: handle events that are just a payload event = Event(**event) ->>>>>>> feat: port faas.start to bi-di streaming with membrane:docs/nitric/api/events.html - response = await self._stub.publish(topic=self.name, event=_event_to_wire(event)) - return Event(**{**event.__dict__.copy(), **{"id": response.id}})
+ try: + response = await self._events._stub.publish(topic=self.name, event=_event_to_wire(event)) + return Event(**{**event.__dict__.copy(), **{"id": response.id}}) + except GRPCError as grpc_err: + raise exception_from_grpc_error(grpc_err)

Class variables

@@ -292,10 +313,6 @@

Methods

Expand source code -<<<<<<< refs/remotes/origin/main:docs/nitric/api/models.html -
class Topic(object):
-    """Represents event topic metadata."""
-=======
 
async def publish(
     self,
     event: Union[Event, dict] = None,
@@ -312,10 +329,12 @@ 

Methods

if isinstance(event, dict): # TODO: handle events that are just a payload event = Event(**event) ->>>>>>> feat: port faas.start to bi-di streaming with membrane:docs/nitric/api/events.html - response = await self._stub.publish(topic=self.name, event=_event_to_wire(event)) - return Event(**{**event.__dict__.copy(), **{"id": response.id}})
+ try: + response = await self._events._stub.publish(topic=self.name, event=_event_to_wire(event)) + return Event(**{**event.__dict__.copy(), **{"id": response.id}}) + except GRPCError as grpc_err: + raise exception_from_grpc_error(grpc_err)
@@ -345,10 +364,10 @@

Eve
  • -

    EventClient

    +

    Events

  • diff --git a/docs/nitric/api/exception.html b/docs/nitric/api/exception.html index fc2daed..9dfa6ec 100644 --- a/docs/nitric/api/exception.html +++ b/docs/nitric/api/exception.html @@ -44,24 +44,169 @@

    Module nitric.api.exception

    # See the License for the specific language governing permissions and # limitations under the License. # +from grpclib import GRPCError -class UnimplementedException(Exception): - """Exception raised when the requested operation isn't supported by the server.""" +class NitricServiceException(Exception): + """Base exception for all errors returned by Nitric API methods.""" pass -class AlreadyExistsException(Exception): - """Exception raised when an entity already exist during a request to create a new entity.""" +class AbortedException(NitricServiceException): + """The operation was aborted, typically due to a concurrency issue such as a transaction abort.""" pass -class UnavailableException(Exception): - """Exception raised when a gRPC service is unavailable.""" +class AlreadyExistsException(NitricServiceException): + """The entity that a client attempted to create (e.g., file or directory) already exists.""" - pass
    + pass + + +class CancelledException(NitricServiceException): + """The operation was cancelled, typically by the caller.""" + + pass + + +class DataLossException(NitricServiceException): + """Unrecoverable data loss or corruption.""" + + pass + + +class DeadlineExceededException(NitricServiceException): + """The deadline expired before the operation could complete.""" + + pass + + +class FailedPreconditionException(NitricServiceException): + """ + The operation was rejected because the system is not in a state required for the operation's execution. + + For example, the document collection to be deleted is not empty. + """ + + pass + + +class InternalException(NitricServiceException): + """Internal errors.""" + + pass + + +class InvalidArgumentException(NitricServiceException): + """ + The client specified an invalid argument. + + Note that this differs from FAILED_PRECONDITION. INVALID_ARGUMENT indicates arguments that are problematic + regardless of the state of the system (e.g., a malformed file name). + """ + + pass + + +class OutOfRangeException(NitricServiceException): + """ + The operation was attempted past the valid range. + + E.g. reading past the end of a file. + """ + + pass + + +class NotFoundException(NitricServiceException): + """Some requested entity was not found.""" + + pass + + +class PermissionDeniedException(NitricServiceException): + """The caller does not have permission to execute the specified operation.""" + + pass + + +class ResourceExhaustedException(NitricServiceException): + """Some resource has been exhausted, perhaps a per-user quota, or perhaps the entire file system is out of space.""" + + pass + + +class UnauthenticatedException(NitricServiceException): + """The request does not have valid authentication credentials for the operation.""" + + pass + + +class UnavailableException(NitricServiceException): + """ + The service is currently unavailable. + + This is most likely a transient condition, which can be corrected by retrying with a backoff. + """ + + pass + + +class UnimplementedException(NitricServiceException): + """ + The operation is not implemented or is not supported/enabled in this service. + + May appear when using an older version of the Membrane with a newer SDK. + """ + + pass + + +class UnknownException(NitricServiceException): + """Unknown error.""" + + pass + + +def exception_from_grpc_error(error: GRPCError): + """Translate a gRPC error to a nitric api exception.""" + return exception_from_grpc_code(error.status.value, error.message) + + +def exception_from_grpc_code(code: int, message: str = None): + """ + Return a new instance of the appropriate exception for the given status code. + + If an unknown or unexpected status code value is provided an UnknownException will be returned. + """ + if code not in _exception_code_map: + return UnknownException() + + return _exception_code_map[code](message) + + +# Map of gRPC status codes to the appropriate exception class. +_exception_code_map = { + 0: lambda message: Exception("Error returned with status 0, which is a success status"), + 1: CancelledException, + 2: UnknownException, + 3: InvalidArgumentException, + 4: DeadlineExceededException, + 5: NotFoundException, + 6: AlreadyExistsException, + 7: PermissionDeniedException, + 8: ResourceExhaustedException, + 9: FailedPreconditionException, + 10: AbortedException, + 11: OutOfRangeException, + 12: UnimplementedException, + 13: InternalException, + 14: UnavailableException, + 15: DataLossException, + 16: UnauthenticatedException, +}
  • @@ -69,27 +214,388 @@

    Module nitric.api.exception

    +

    Functions

    +
    +
    +def exception_from_grpc_code(code: int, message: str = None) +
    +
    +

    Return a new instance of the appropriate exception for the given status code.

    +

    If an unknown or unexpected status code value is provided an UnknownException will be returned.

    +
    + +Expand source code + +
    def exception_from_grpc_code(code: int, message: str = None):
    +    """
    +    Return a new instance of the appropriate exception for the given status code.
    +
    +    If an unknown or unexpected status code value is provided an UnknownException will be returned.
    +    """
    +    if code not in _exception_code_map:
    +        return UnknownException()
    +
    +    return _exception_code_map[code](message)
    +
    +
    +
    +def exception_from_grpc_error(error: grpclib.exceptions.GRPCError) +
    +
    +

    Translate a gRPC error to a nitric api exception.

    +
    + +Expand source code + +
    def exception_from_grpc_error(error: GRPCError):
    +    """Translate a gRPC error to a nitric api exception."""
    +    return exception_from_grpc_code(error.status.value, error.message)
    +
    +
    +

    Classes

    +
    +class AbortedException +(*args, **kwargs) +
    +
    +

    The operation was aborted, typically due to a concurrency issue such as a transaction abort.

    +
    + +Expand source code + +
    class AbortedException(NitricServiceException):
    +    """The operation was aborted, typically due to a concurrency issue such as a transaction abort."""
    +
    +    pass
    +
    +

    Ancestors

    + +
    class AlreadyExistsException (*args, **kwargs)
    -

    Exception raised when an entity already exist during a request to create a new entity.

    +

    The entity that a client attempted to create (e.g., file or directory) already exists.

    +
    + +Expand source code + +
    class AlreadyExistsException(NitricServiceException):
    +    """The entity that a client attempted to create (e.g., file or directory) already exists."""
    +
    +    pass
    +
    +

    Ancestors

    + +
    +
    +class CancelledException +(*args, **kwargs) +
    +
    +

    The operation was cancelled, typically by the caller.

    +
    + +Expand source code + +
    class CancelledException(NitricServiceException):
    +    """The operation was cancelled, typically by the caller."""
    +
    +    pass
    +
    +

    Ancestors

    + +
    +
    +class DataLossException +(*args, **kwargs) +
    +
    +

    Unrecoverable data loss or corruption.

    +
    + +Expand source code + +
    class DataLossException(NitricServiceException):
    +    """Unrecoverable data loss or corruption."""
    +
    +    pass
    +
    +

    Ancestors

    + +
    +
    +class DeadlineExceededException +(*args, **kwargs) +
    +
    +

    The deadline expired before the operation could complete.

    +
    + +Expand source code + +
    class DeadlineExceededException(NitricServiceException):
    +    """The deadline expired before the operation could complete."""
    +
    +    pass
    +
    +

    Ancestors

    + +
    +
    +class FailedPreconditionException +(*args, **kwargs) +
    +
    +

    The operation was rejected because the system is not in a state required for the operation's execution.

    +

    For example, the document collection to be deleted is not empty.

    +
    + +Expand source code + +
    class FailedPreconditionException(NitricServiceException):
    +    """
    +    The operation was rejected because the system is not in a state required for the operation's execution.
    +
    +    For example, the document collection to be deleted is not empty.
    +    """
    +
    +    pass
    +
    +

    Ancestors

    + +
    +
    +class InternalException +(*args, **kwargs) +
    +
    +

    Internal errors.

    +
    + +Expand source code + +
    class InternalException(NitricServiceException):
    +    """Internal errors."""
    +
    +    pass
    +
    +

    Ancestors

    + +
    +
    +class InvalidArgumentException +(*args, **kwargs) +
    +
    +

    The client specified an invalid argument.

    +

    Note that this differs from FAILED_PRECONDITION. INVALID_ARGUMENT indicates arguments that are problematic +regardless of the state of the system (e.g., a malformed file name).

    +
    + +Expand source code + +
    class InvalidArgumentException(NitricServiceException):
    +    """
    +    The client specified an invalid argument.
    +
    +    Note that this differs from FAILED_PRECONDITION. INVALID_ARGUMENT indicates arguments that are problematic
    +    regardless of the state of the system (e.g., a malformed file name).
    +    """
    +
    +    pass
    +
    +

    Ancestors

    + +
    +
    +class NitricServiceException +(*args, **kwargs) +
    +
    +

    Base exception for all errors returned by Nitric API methods.

    +
    + +Expand source code + +
    class NitricServiceException(Exception):
    +    """Base exception for all errors returned by Nitric API methods."""
    +
    +    pass
    +
    +

    Ancestors

    +
      +
    • builtins.Exception
    • +
    • builtins.BaseException
    • +
    +

    Subclasses

    + +
    +
    +class NotFoundException +(*args, **kwargs) +
    +
    +

    Some requested entity was not found.

    +
    + +Expand source code + +
    class NotFoundException(NitricServiceException):
    +    """Some requested entity was not found."""
    +
    +    pass
    +
    +

    Ancestors

    + +
    +
    +class OutOfRangeException +(*args, **kwargs) +
    +
    +

    The operation was attempted past the valid range.

    +

    E.g. reading past the end of a file.

    +
    + +Expand source code + +
    class OutOfRangeException(NitricServiceException):
    +    """
    +    The operation was attempted past the valid range.
    +
    +    E.g. reading past the end of a file.
    +    """
    +
    +    pass
    +
    +

    Ancestors

    + +
    +
    +class PermissionDeniedException +(*args, **kwargs) +
    +
    +

    The caller does not have permission to execute the specified operation.

    +
    + +Expand source code + +
    class PermissionDeniedException(NitricServiceException):
    +    """The caller does not have permission to execute the specified operation."""
    +
    +    pass
    +
    +

    Ancestors

    + +
    +
    +class ResourceExhaustedException +(*args, **kwargs) +
    +
    +

    Some resource has been exhausted, perhaps a per-user quota, or perhaps the entire file system is out of space.

    +
    + +Expand source code + +
    class ResourceExhaustedException(NitricServiceException):
    +    """Some resource has been exhausted, perhaps a per-user quota, or perhaps the entire file system is out of space."""
    +
    +    pass
    +
    +

    Ancestors

    + +
    +
    +class UnauthenticatedException +(*args, **kwargs) +
    +
    +

    The request does not have valid authentication credentials for the operation.

    Expand source code -
    class AlreadyExistsException(Exception):
    -    """Exception raised when an entity already exist during a request to create a new entity."""
    +
    class UnauthenticatedException(NitricServiceException):
    +    """The request does not have valid authentication credentials for the operation."""
     
         pass

    Ancestors

    @@ -99,18 +605,24 @@

    Ancestors

    (*args, **kwargs)
    -

    Exception raised when a gRPC service is unavailable.

    +

    The service is currently unavailable.

    +

    This is most likely a transient condition, which can be corrected by retrying with a backoff.

    Expand source code -
    class UnavailableException(Exception):
    -    """Exception raised when a gRPC service is unavailable."""
    +
    class UnavailableException(NitricServiceException):
    +    """
    +    The service is currently unavailable.
    +
    +    This is most likely a transient condition, which can be corrected by retrying with a backoff.
    +    """
     
         pass

    Ancestors

    @@ -120,18 +632,46 @@

    Ancestors

    (*args, **kwargs)
    -

    Exception raised when the requested operation isn't supported by the server.

    +

    The operation is not implemented or is not supported/enabled in this service.

    +

    May appear when using an older version of the Membrane with a newer SDK.

    +
    + +Expand source code + +
    class UnimplementedException(NitricServiceException):
    +    """
    +    The operation is not implemented or is not supported/enabled in this service.
    +
    +    May appear when using an older version of the Membrane with a newer SDK.
    +    """
    +
    +    pass
    +
    +

    Ancestors

    + +
    +
    +class UnknownException +(*args, **kwargs) +
    +
    +

    Unknown error.

    Expand source code -
    class UnimplementedException(Exception):
    -    """Exception raised when the requested operation isn't supported by the server."""
    +
    class UnknownException(NitricServiceException):
    +    """Unknown error."""
     
         pass

    Ancestors

    @@ -150,17 +690,65 @@

    Index

  • nitric.api
  • +
  • Functions

    + +
  • Classes

  • diff --git a/docs/nitric/api/index.html b/docs/nitric/api/index.html index 4000717..5922a66 100644 --- a/docs/nitric/api/index.html +++ b/docs/nitric/api/index.html @@ -46,26 +46,36 @@

    Module nitric.api

    # limitations under the License. # """Nitric API SDK.""" -from nitric.api.events import EventClient, Event, Topic -from nitric.api.kv import KeyValueClient -from nitric.api.queues import QueueClient, Task, FailedTask -from nitric.api.storage import StorageClient +from nitric.api.events import Events, Event, Topic +from nitric.api.queues import Queues, Task, FailedTask +from nitric.api.storage import Storage +from nitric.api.documents import Documents +from nitric.api.secrets import Secrets __all__ = [ - "EventClient", - "KeyValueClient", - "QueueClient", - "StorageClient", + "Events", + "Queues", + "Documents", + "Storage", "Event", "Task", "FailedTask", "Topic", + "Secrets", ]

    Sub-modules

    +
    nitric.api.const
    +
    +
    +
    +
    nitric.api.documents
    +
    +
    +
    nitric.api.events
    @@ -74,11 +84,11 @@

    Sub-modules

    -
    nitric.api.kv
    +
    nitric.api.queues
    -
    nitric.api.queues
    +
    nitric.api.secrets
    @@ -95,18 +105,70 @@

    Sub-modules

    Classes

    +
    +class Documents +
    +
    +

    Nitric client for interacting with document collections.

    +

    This client insulates application code from stack specific event operations or SDKs.

    +

    Construct a Nitric Document Client.

    +
    + +Expand source code + +
    class Documents(object):
    +    """
    +    Nitric client for interacting with document collections.
    +
    +    This client insulates application code from stack specific event operations or SDKs.
    +    """
    +
    +    _stub: DocumentServiceStub
    +
    +    def __init__(self):
    +        """Construct a Nitric Document Client."""
    +        self._channel = new_default_channel()
    +        self._stub = DocumentServiceStub(channel=self._channel)
    +
    +    def __del__(self):
    +        # close the channel when this client is destroyed
    +        if self._channel is not None:
    +            self._channel.close()
    +
    +    def collection(self, name: str) -> CollectionRef:
    +        """Return a reference to a document collection."""
    +        return CollectionRef(_documents=self, name=name)
    +
    +

    Methods

    +
    +
    +def collection(self, name: str) ‑> CollectionRef +
    +
    +

    Return a reference to a document collection.

    +
    + +Expand source code + +
    def collection(self, name: str) -> CollectionRef:
    +    """Return a reference to a document collection."""
    +    return CollectionRef(_documents=self, name=name)
    +
    +
    +
    +
    class Event (payload: dict = <factory>, id: str = None, payload_type: str = None)
    -

    Represents a NitricEvent.

    +

    Eventing client, providing access to Topic and Event references and operations on those entities.

    Expand source code
    class Event(object):
    -    """Represents a NitricEvent."""
    +    """Eventing client, providing access to Topic and Event references and operations on those entities."""
     
         payload: dict = field(default_factory=dict)
         id: str = field(default=None)
    @@ -128,8 +190,8 @@ 

    Class variables

    -
    -class EventClient +
    +class Events

    Nitric generic publish/subscribe event client.

    @@ -139,7 +201,7 @@

    Class variables

    Expand source code -
    class EventClient(object):
    +
    class Events(object):
         """
         Nitric generic publish/subscribe event client.
     
    @@ -148,36 +210,44 @@ 

    Class variables

    def __init__(self): """Construct a Nitric Event Client.""" - channel = new_default_channel() - self._stub = EventStub(channel=channel) - self._topic_stub = TopicStub(channel=channel) + self.channel = new_default_channel() + self._stub = EventServiceStub(channel=self.channel) + self._topic_stub = TopicServiceStub(channel=self.channel) + + def __del__(self): + # close the channel when this client is destroyed + if self.channel is not None: + self.channel.close() async def topics(self) -> List[Topic]: """Get a list of topics available for publishing or subscription.""" - response = await self._topic_stub.list() - return [self.topic(topic.name) for topic in response.topics] + try: + response = await self._topic_stub.list() + return [self.topic(topic.name) for topic in response.topics] + except GRPCError as grpc_err: + raise exception_from_grpc_error(grpc_err) def topic(self, name: str) -> Topic: - """Return a reference a topic from the connected event service.""" - return Topic(_stub=self._stub, name=name)
    + """Return a reference to a topic.""" + return Topic(_events=self, name=name)

    Methods

    -
    +
    def topic(self, name: str) ‑> Topic
    -

    Return a reference a topic from the connected event service.

    +

    Return a reference to a topic.

    Expand source code
    def topic(self, name: str) -> Topic:
    -    """Return a reference a topic from the connected event service."""
    -    return Topic(_stub=self._stub, name=name)
    + """Return a reference to a topic.""" + return Topic(_events=self, name=name)
    -
    +
    async def topics(self) ‑> List[Topic]
    @@ -188,26 +258,28 @@

    Methods

    async def topics(self) -> List[Topic]:
         """Get a list of topics available for publishing or subscription."""
    -    response = await self._topic_stub.list()
    -    return [self.topic(topic.name) for topic in response.topics]
    + try: + response = await self._topic_stub.list() + return [self.topic(topic.name) for topic in response.topics] + except GRPCError as grpc_err: + raise exception_from_grpc_error(grpc_err)
    class FailedTask -(id: str = None, payload_type: str = None, payload: dict = <factory>, lease_id: str = None, message: str = '') +(id: str = None, payload_type: str = None, payload: dict = <factory>, message: str = '')
    -

    Represents a failed queue publish for an event.

    +

    Represents a failed queue publish.

    Expand source code
    class FailedTask(Task):
    -    """Represents a failed queue publish for an event."""
    +    """Represents a failed queue publish."""
     
    -    lease_id: str = None  # failed tasks should never have a lease id.
         message: str = field(default="")

    Ancestors

    @@ -216,159 +288,109 @@

    Ancestors

    Class variables

    -
    var lease_id : str
    -
    -
    -
    var message : str
    -

    Inherited members

    -
    -
    -class KeyValueClient -(collection: str) +
    +class Queues
    -

    Nitric generic document store/db client.

    -

    This client insulates application code from stack specific document CRUD operations or SDKs.

    -

    Construct a new DocumentClient.

    -

    :param collection: name of the key/value collection

    +

    Queueing client, providing access to Queue and Task references and operations on those entities.

    +

    Construct a Nitric Queue Client.

    Expand source code -
    class KeyValueClient(object):
    -    """
    -    Nitric generic document store/db client.
    -
    -    This client insulates application code from stack specific document CRUD operations or SDKs.
    -    """
    +
    class Queues(object):
    +    """Queueing client, providing access to Queue and Task references and operations on those entities."""
     
    -    def __init__(self, collection: str):
    -        """
    -        Construct a new DocumentClient.
    -
    -        :param collection: name of the key/value collection
    -        """
    -        self.collection = collection
    -        self._stub = KeyValueStub(channel=new_default_channel())
    -
    -    async def put(self, key: str, value: dict):
    -        """Create a new document with the specified key."""
    -        await self._stub.put(collection=self.collection, key=key, value=_struct_from_dict(value))
    +    def __init__(self):
    +        """Construct a Nitric Queue Client."""
    +        self.channel = new_default_channel()
    +        self._queue_stub = QueueServiceStub(channel=self.channel)
     
    -    async def get(self, key: str) -> dict:
    -        """Retrieve a document from the specified key."""
    -        response = await self._stub.get(collection=self.collection, key=key)
    -        return response.value.to_dict()
    +    def __del__(self):
    +        # close the channel when this client is destroyed
    +        if self.channel is not None:
    +            self.channel.close()
     
    -    async def delete(self, key: str):
    -        """Delete the specified document from the collection."""
    -        await self._stub.delete(collection=self.collection, key=key)
    + def queue(self, name: str): + """Return a reference to a queue from the connected queue service.""" + return Queue(_queueing=self, name=name)

    Methods

    -
    -async def delete(self, key: str) -
    -
    -

    Delete the specified document from the collection.

    -
    - -Expand source code - -
    async def delete(self, key: str):
    -    """Delete the specified document from the collection."""
    -    await self._stub.delete(collection=self.collection, key=key)
    -
    -
    -
    -async def get(self, key: str) ‑> dict -
    -
    -

    Retrieve a document from the specified key.

    -
    - -Expand source code - -
    async def get(self, key: str) -> dict:
    -    """Retrieve a document from the specified key."""
    -    response = await self._stub.get(collection=self.collection, key=key)
    -    return response.value.to_dict()
    -
    -
    -
    -async def put(self, key: str, value: dict) +
    +def queue(self, name: str)
    -

    Create a new document with the specified key.

    +

    Return a reference to a queue from the connected queue service.

    Expand source code -
    async def put(self, key: str, value: dict):
    -    """Create a new document with the specified key."""
    -    await self._stub.put(collection=self.collection, key=key, value=_struct_from_dict(value))
    +
    def queue(self, name: str):
    +    """Return a reference to a queue from the connected queue service."""
    +    return Queue(_queueing=self, name=name)
    -
    -class QueueClient +
    +class Secrets
    -

    Nitric generic publish/subscribe tasking client.

    -

    This client insulates application code from stack specific task/topic operations or SDKs.

    -

    Construct a Nitric Queue Client.

    +

    Nitric secrets management client.

    +

    This client insulates application code from stack specific secrets managements services.

    +

    Construct a Nitric Storage Client.

    Expand source code -
    class QueueClient(object):
    +
    class Secrets(object):
         """
    -    Nitric generic publish/subscribe tasking client.
    +    Nitric secrets management client.
     
    -    This client insulates application code from stack specific task/topic operations or SDKs.
    +    This client insulates application code from stack specific secrets managements services.
         """
     
         def __init__(self):
    -        """Construct a Nitric Queue Client."""
    -        self._queue_stub = QueueStub(channel=new_default_channel())
    +        """Construct a Nitric Storage Client."""
    +        self._channel = new_default_channel()
    +        self._secrets_stub = SecretServiceStub(channel=self._channel)
     
    -    def queue(self, name: str):
    -        """Return a reference to a queue from the connected queue service."""
    -        return Queue(_queue_stub=self._queue_stub, name=name)
    + def __del__(self): + # close the channel when this client is destroyed + if self._channel is not None: + self._channel.close() + + def secret(self, name: str): + """Return a reference to a secret container from the connected secrets management service.""" + return SecretContainer(_secrets=self, name=name)

    Methods

    -
    -def queue(self, name: str) +
    +def secret(self, name: str)
    -

    Return a reference to a queue from the connected queue service.

    +

    Return a reference to a secret container from the connected secrets management service.

    Expand source code -
    def queue(self, name: str):
    -    """Return a reference to a queue from the connected queue service."""
    -    return Queue(_queue_stub=self._queue_stub, name=name)
    +
    def secret(self, name: str):
    +    """Return a reference to a secret container from the connected secrets management service."""
    +    return SecretContainer(_secrets=self, name=name)
    -
    -class StorageClient +
    +class Storage

    Nitric generic blob storage client.

    @@ -378,7 +400,7 @@

    Methods

    Expand source code -
    class StorageClient(object):
    +
    class Storage(object):
         """
         Nitric generic blob storage client.
     
    @@ -387,15 +409,21 @@ 

    Methods

    def __init__(self): """Construct a Nitric Storage Client.""" - self._storage_stub = StorageStub(channel=new_default_channel()) + self._channel = new_default_channel() + self._storage_stub = StorageServiceStub(channel=self._channel) + + def __del__(self): + # close the channel when this client is destroyed + if self._channel is not None: + self._channel.close() def bucket(self, name: str): """Return a reference to a bucket from the connected storage service.""" - return Bucket(_storage_stub=self._storage_stub, name=name)
    + return Bucket(_storage=self, name=name)

    Methods

    -
    +
    def bucket(self, name: str)
    @@ -406,41 +434,27 @@

    Methods

    def bucket(self, name: str):
         """Return a reference to a bucket from the connected storage service."""
    -    return Bucket(_storage_stub=self._storage_stub, name=name)
    + return Bucket(_storage=self, name=name)
    class Task -(id: str = None, payload_type: str = None, payload: dict = <factory>, lease_id: str = None) +(id: str = None, payload_type: str = None, payload: dict = <factory>)
    -

    Represents a NitricTask.

    +

    A task to be sent to a Queue.

    Expand source code
    class Task(object):
    -    """Represents a NitricTask."""
    +    """A task to be sent to a Queue."""
     
         id: str = field(default=None)
         payload_type: str = field(default=None)
    -    payload: dict = field(default_factory=dict)
    -    lease_id: str = field(default=None)
    -    _queue_stub: QueueStub = field(default=None)
    -    _queue: str = field(default=None)
    -
    -    async def complete(self):
    -        """Mark this task as complete and remove it from the queue."""
    -        if self._queue_stub is None or self._queue is None or self._queue == "":
    -            raise Exception("Task was not created via Queue.")
    -        if self.lease_id is None:
    -            raise Exception(
    -                "No lease_id available for task. Tasks must be received using Queue.receive to have a "
    -                "valid lease_id."
    -            )
    -        await self._queue_stub.complete(queue=self._queue, lease_id=self.lease_id)
    + payload: dict = field(default_factory=dict)

    Subclasses

      @@ -452,10 +466,6 @@

      Class variables

      -
      var lease_id : str
      -
      -
      -
      var payload : dict
      @@ -465,34 +475,10 @@

      Class variables

    -

    Methods

    -
    -
    -async def complete(self) -
    -
    -

    Mark this task as complete and remove it from the queue.

    -
    - -Expand source code - -
    async def complete(self):
    -    """Mark this task as complete and remove it from the queue."""
    -    if self._queue_stub is None or self._queue is None or self._queue == "":
    -        raise Exception("Task was not created via Queue.")
    -    if self.lease_id is None:
    -        raise Exception(
    -            "No lease_id available for task. Tasks must be received using Queue.receive to have a "
    -            "valid lease_id."
    -        )
    -    await self._queue_stub.complete(queue=self._queue, lease_id=self.lease_id)
    -
    -
    -
    class Topic -(_stub: EventStub, name: str) +(_events: Events, name: str)

    A reference to a topic on an event service, used to perform operations on that topic.

    @@ -501,12 +487,9 @@

    Methods

    Expand source code
    class Topic(object):
    -<<<<<<< refs/remotes/origin/main
    -    """Represents event topic metadata."""
    -=======
         """A reference to a topic on an event service, used to perform operations on that topic."""
     
    -    _stub: EventStub
    +    _events: Events
         name: str
     
         async def publish(
    @@ -525,10 +508,12 @@ 

    Methods

    if isinstance(event, dict): # TODO: handle events that are just a payload event = Event(**event) ->>>>>>> feat: port faas.start to bi-di streaming with membrane - response = await self._stub.publish(topic=self.name, event=_event_to_wire(event)) - return Event(**{**event.__dict__.copy(), **{"id": response.id}})
    + try: + response = await self._events._stub.publish(topic=self.name, event=_event_to_wire(event)) + return Event(**{**event.__dict__.copy(), **{"id": response.id}}) + except GRPCError as grpc_err: + raise exception_from_grpc_error(grpc_err)

    Class variables

    @@ -540,7 +525,7 @@

    Class variables

    Methods

    -async def publish(self, event: Union[Event, dict] = None) ‑> Event +async def publish(self, event: Union[Event, dict] = None) ‑> Event

    Publish an event/message to a topic, which can be subscribed to by other services.

    @@ -567,8 +552,11 @@

    Methods

    # TODO: handle events that are just a payload event = Event(**event) - response = await self._stub.publish(topic=self.name, event=_event_to_wire(event)) - return Event(**{**event.__dict__.copy(), **{"id": response.id}})
    + try: + response = await self._events._stub.publish(topic=self.name, event=_event_to_wire(event)) + return Event(**{**event.__dict__.copy(), **{"id": response.id}}) + except GRPCError as grpc_err: + raise exception_from_grpc_error(grpc_err)
    @@ -589,16 +577,24 @@

    Index

  • Sub-modules

  • Classes

    • +

      Documents

      + +
    • +
    • Event

    • -

      EventClient

      +

      Events

    • FailedTask

    • -

      KeyValueClient

      +

      Queues

    • -

      QueueClient

      +

      Secrets

    • -

      StorageClient

      +

      Storage

    • Task

      diff --git a/docs/nitric/api/kv.html b/docs/nitric/api/kv.html deleted file mode 100644 index 57e530b..0000000 --- a/docs/nitric/api/kv.html +++ /dev/null @@ -1,265 +0,0 @@ - - - - - - -nitric.api.kv API documentation - - - - - - - - - - - -
      -
      -
      -

      Module nitric.api.kv

      -
      -
      -
      - -Expand source code - -
      #
      -# Copyright (c) 2021 Nitric Technologies Pty Ltd.
      -#
      -# This file is part of Nitric Python 3 SDK.
      -# See https://github.com/nitrictech/python-sdk for further info.
      -#
      -# Licensed under the Apache License, Version 2.0 (the "License");
      -# you may not use this file except in compliance with the License.
      -# You may obtain a copy of the License at
      -#
      -#     http://www.apache.org/licenses/LICENSE-2.0
      -#
      -# Unless required by applicable law or agreed to in writing, software
      -# distributed under the License is distributed on an "AS IS" BASIS,
      -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
      -# See the License for the specific language governing permissions and
      -# limitations under the License.
      -#
      -from nitric.utils import new_default_channel, _struct_from_dict
      -from nitric.proto.nitric.kv.v1 import KeyValueStub
      -
      -
      -class KeyValueClient(object):
      -    """
      -    Nitric generic document store/db client.
      -
      -    This client insulates application code from stack specific document CRUD operations or SDKs.
      -    """
      -
      -    def __init__(self, collection: str):
      -        """
      -        Construct a new DocumentClient.
      -
      -        :param collection: name of the key/value collection
      -        """
      -        self.collection = collection
      -        self._stub = KeyValueStub(channel=new_default_channel())
      -
      -    async def put(self, key: str, value: dict):
      -        """Create a new document with the specified key."""
      -        await self._stub.put(collection=self.collection, key=key, value=_struct_from_dict(value))
      -
      -    async def get(self, key: str) -> dict:
      -        """Retrieve a document from the specified key."""
      -        response = await self._stub.get(collection=self.collection, key=key)
      -        return response.value.to_dict()
      -
      -    async def delete(self, key: str):
      -        """Delete the specified document from the collection."""
      -        await self._stub.delete(collection=self.collection, key=key)
      -
      -
      -
      -
      -
      -
      -
      -
      -
      -

      Classes

      -
      -
      -class KeyValueClient -(collection: str) -
      -
      -

      Nitric generic document store/db client.

      -

      This client insulates application code from stack specific document CRUD operations or SDKs.

      -

      Construct a new DocumentClient.

      -

      :param collection: name of the key/value collection

      -
      - -Expand source code - -
      class KeyValueClient(object):
      -    """
      -    Nitric generic document store/db client.
      -
      -    This client insulates application code from stack specific document CRUD operations or SDKs.
      -    """
      -
      -    def __init__(self, collection: str):
      -        """
      -        Construct a new DocumentClient.
      -
      -        :param collection: name of the key/value collection
      -        """
      -        self.collection = collection
      -        self._stub = KeyValueStub(channel=new_default_channel())
      -
      -    async def put(self, key: str, value: dict):
      -        """Create a new document with the specified key."""
      -        await self._stub.put(collection=self.collection, key=key, value=_struct_from_dict(value))
      -
      -    async def get(self, key: str) -> dict:
      -        """Retrieve a document from the specified key."""
      -        response = await self._stub.get(collection=self.collection, key=key)
      -        return response.value.to_dict()
      -
      -    async def delete(self, key: str):
      -        """Delete the specified document from the collection."""
      -        await self._stub.delete(collection=self.collection, key=key)
      -
      -

      Methods

      -
      -
      -async def delete(self, key: str) -
      -
      -

      Delete the specified document from the collection.

      -
      - -Expand source code - -
      async def delete(self, key: str):
      -    """Delete the specified document from the collection."""
      -    await self._stub.delete(collection=self.collection, key=key)
      -
      -
      -
      -async def get(self, key: str) ‑> dict -
      -
      -

      Retrieve a document from the specified key.

      -
      - -Expand source code - -
      async def get(self, key: str) -> dict:
      -    """Retrieve a document from the specified key."""
      -    response = await self._stub.get(collection=self.collection, key=key)
      -    return response.value.to_dict()
      -
      -
      -
      -<<<<<<< refs/remotes/origin/main -def put(self, collection: str, key: str, value: dict) -
      -
      -

      Create a new document with the specified key in the specified collection.

      -======= -async def put(self, key: str, value: dict) - -
      -

      Create a new document with the specified key.

      ->>>>>>> feat: port faas.start to bi-di streaming with membrane -
      - -Expand source code - -<<<<<<< refs/remotes/origin/main -
      def put(self, collection: str, key: str, value: dict):
      -    """Create a new document with the specified key in the specified collection."""
      -    value_struct = Struct()
      -    value_struct.update(value)
      -    request = key_value.KeyValuePutRequest(collection=collection, key=key, value=value_struct)
      -    return self._exec("Put", request)
      -
      -
      -
      -
      -
      -class KeyValueGetResponse -(*args, **kwargs) -
      -
      -

      A ProtocolMessage

      -

      Ancestors

      -
        -
      • google.protobuf.pyext._message.CMessage
      • -
      • google.protobuf.message.Message
      • -
      -

      Class variables

      -
      -
      var DESCRIPTOR
      -
      -
      -
      -
      -

      Instance variables

      -
      -
      var value
      -
      -

      Field nitric.kv.v1.KeyValueGetResponse.value

      -======= -
      async def put(self, key: str, value: dict):
      -    """Create a new document with the specified key."""
      -    await self._stub.put(collection=self.collection, key=key, value=_struct_from_dict(value))
      - ->>>>>>> feat: port faas.start to bi-di streaming with membrane -
      -
      -
      -
      -
      -
      - -
      - - - \ No newline at end of file diff --git a/docs/nitric/api/queues.html b/docs/nitric/api/queues.html index 0eb1f2a..5f0fd44 100644 --- a/docs/nitric/api/queues.html +++ b/docs/nitric/api/queues.html @@ -44,40 +44,58 @@

      Module nitric.api.queues

      # See the License for the specific language governing permissions and # limitations under the License. # +from __future__ import annotations + from typing import List, Union -from nitric.utils import new_default_channel, _struct_from_dict -from nitric.proto.nitric.queue.v1 import QueueStub, NitricTask, FailedTask as WireFailedTask + +from grpclib import GRPCError + +from nitric.api.exception import FailedPreconditionException, exception_from_grpc_error, InvalidArgumentException +from nitric.utils import new_default_channel, _struct_from_dict, _dict_from_struct +from nitricapi.nitric.queue.v1 import QueueServiceStub, NitricTask, FailedTask as WireFailedTask from dataclasses import dataclass, field @dataclass(frozen=True, order=True) class Task(object): - """Represents a NitricTask.""" + """A task to be sent to a Queue.""" + + id: str = field(default=None) + payload_type: str = field(default=None) + payload: dict = field(default_factory=dict) + + +@dataclass(frozen=True, order=True) +class ReceivedTask(object): + """A reference to a task received from a Queue, with a lease.""" id: str = field(default=None) payload_type: str = field(default=None) payload: dict = field(default_factory=dict) lease_id: str = field(default=None) - _queue_stub: QueueStub = field(default=None) - _queue: str = field(default=None) + _queueing: Queues = field(default=None) + _queue: Queue = field(default=None) async def complete(self): - """Mark this task as complete and remove it from the queue.""" - if self._queue_stub is None or self._queue is None or self._queue == "": - raise Exception("Task was not created via Queue.") - if self.lease_id is None: - raise Exception( - "No lease_id available for task. Tasks must be received using Queue.receive to have a " - "valid lease_id." + """ + Mark this task as complete and remove it from the queue. + + Only callable for tasks that have been received from a Queue. + """ + if self._queueing is None or self._queue is None or self.lease_id is None: + raise FailedPreconditionException( + "Task is missing internal client or lease id, was it returned from " "queue.receive?" ) - await self._queue_stub.complete(queue=self._queue, lease_id=self.lease_id) + try: + await self._queueing._queue_stub.complete(queue=self._queue.name, lease_id=self.lease_id) + except GRPCError as grpc_err: + raise exception_from_grpc_error(grpc_err) @dataclass(frozen=True, order=True) class FailedTask(Task): - """Represents a failed queue publish for an event.""" + """Represents a failed queue publish.""" - lease_id: str = None # failed tasks should never have a lease id. message: str = field(default="") @@ -95,18 +113,20 @@

      Module nitric.api.queues

      ) -def _wire_to_task(task: NitricTask) -> Task: +def _wire_to_received_task(task: NitricTask, queueing: Queues = None, queue: Queue = None) -> ReceivedTask: """ Convert a Nitric Queue Task (protobuf) to a Nitric Task (python SDK). :param task: to convert :return: converted task """ - return Task( + return ReceivedTask( id=task.id, payload_type=task.payload_type, - payload=task.payload.to_dict(), + payload=_dict_from_struct(task.payload), lease_id=task.lease_id, + _queueing=queueing, + _queue=queue, ) @@ -117,13 +137,12 @@

      Module nitric.api.queues

      :param failed_task: the failed task :return: the Failed Task with failure message """ - task = _wire_to_task(failed_task.task) + task = _wire_to_received_task(failed_task.task) return FailedTask( id=task.id, payload_type=task.payload_type, payload=task.payload, - lease_id=task.lease_id, message=failed_task.message, ) @@ -132,7 +151,7 @@

      Module nitric.api.queues

      class Queue(object): """A reference to a queue from a queue service, used to perform operations on that queue.""" - _queue_stub: QueueStub + _queueing: Queues name: str async def send( @@ -157,11 +176,12 @@

      Module nitric.api.queues

      # TODO: handle tasks that are just a payload task = Task(**task) - await self._queue_stub.send(queue=self.name, task=_task_to_wire(task)) + try: + await self._queueing._queue_stub.send(queue=self.name, task=_task_to_wire(task)) + except GRPCError as grpc_err: + raise exception_from_grpc_error(grpc_err) - async def _send_batch( - self, tasks: List[Union[Task, dict]] = None, raise_on_failure: bool = True - ) -> List[FailedTask]: + async def _send_batch(self, tasks: List[Union[Task, dict]], raise_on_failure: bool = True) -> List[FailedTask]: """ Push a collection of tasks to a queue, which can be retrieved by other services. @@ -169,14 +189,16 @@

      Module nitric.api.queues

      :param raise_on_failure: Whether to raise an exception when one or more tasks fails to send :return: PushResponse containing a list containing details of any messages that failed to publish. """ - if tasks is None: - tasks = [] + if tasks is None or len(tasks) < 1: + raise InvalidArgumentException("No tasks provided, nothing to send.") wire_tasks = [_task_to_wire(Task(**task) if isinstance(task, dict) else task) for task in tasks] - response = await self._queue_stub.send_batch(queue=self.name, tasks=wire_tasks) - - return [_wire_to_failed_task(failed_task) for failed_task in response.failed_tasks] + try: + response = await self._queueing._queue_stub.send_batch(queue=self.name, tasks=wire_tasks) + return [_wire_to_failed_task(failed_task) for failed_task in response.failed_tasks] + except GRPCError as grpc_err: + raise exception_from_grpc_error(grpc_err) async def receive(self, limit: int = None) -> List[Task]: """ @@ -195,26 +217,30 @@

      Module nitric.api.queues

      if limit is None or limit < 1: limit = 1 - response = await self._queue_stub.receive(queue=self.name, depth=limit) - - # Map the response protobuf response items to Python SDK Nitric Tasks - return [_wire_to_task(task) for task in response.tasks] + try: + response = await self._queueing._queue_stub.receive(queue=self.name, depth=limit) + # Map the response protobuf response items to Python SDK Nitric Tasks + return [_wire_to_received_task(task=task, queueing=self._queueing, queue=self) for task in response.tasks] + except GRPCError as grpc_err: + raise exception_from_grpc_error(grpc_err) -class QueueClient(object): - """ - Nitric generic publish/subscribe tasking client. - - This client insulates application code from stack specific task/topic operations or SDKs. - """ +class Queues(object): + """Queueing client, providing access to Queue and Task references and operations on those entities.""" def __init__(self): """Construct a Nitric Queue Client.""" - self._queue_stub = QueueStub(channel=new_default_channel()) + self.channel = new_default_channel() + self._queue_stub = QueueServiceStub(channel=self.channel) + + def __del__(self): + # close the channel when this client is destroyed + if self.channel is not None: + self.channel.close() def queue(self, name: str): """Return a reference to a queue from the connected queue service.""" - return Queue(_queue_stub=self._queue_stub, name=name) + return Queue(_queueing=self, name=name)
  • @@ -228,18 +254,17 @@

    Classes

    class FailedTask -(id: str = None, payload_type: str = None, payload: dict = <factory>, lease_id: str = None, message: str = '') +(id: str = None, payload_type: str = None, payload: dict = <factory>, message: str = '')
    -

    Represents a failed queue publish for an event.

    +

    Represents a failed queue publish.

    Expand source code
    class FailedTask(Task):
    -    """Represents a failed queue publish for an event."""
    +    """Represents a failed queue publish."""
     
    -    lease_id: str = None  # failed tasks should never have a lease id.
         message: str = field(default="")

    Ancestors

    @@ -248,27 +273,15 @@

    Ancestors

    Class variables

    -
    var lease_id : str
    -
    -
    -
    var message : str
    -

    Inherited members

    -
    class Queue -(_queue_stub: QueueStub, name: str) +(_queueing: Queues, name: str)

    A reference to a queue from a queue service, used to perform operations on that queue.

    @@ -279,7 +292,7 @@

    Inherited members

    class Queue(object):
         """A reference to a queue from a queue service, used to perform operations on that queue."""
     
    -    _queue_stub: QueueStub
    +    _queueing: Queues
         name: str
     
         async def send(
    @@ -304,11 +317,12 @@ 

    Inherited members

    # TODO: handle tasks that are just a payload task = Task(**task) - await self._queue_stub.send(queue=self.name, task=_task_to_wire(task)) + try: + await self._queueing._queue_stub.send(queue=self.name, task=_task_to_wire(task)) + except GRPCError as grpc_err: + raise exception_from_grpc_error(grpc_err) - async def _send_batch( - self, tasks: List[Union[Task, dict]] = None, raise_on_failure: bool = True - ) -> List[FailedTask]: + async def _send_batch(self, tasks: List[Union[Task, dict]], raise_on_failure: bool = True) -> List[FailedTask]: """ Push a collection of tasks to a queue, which can be retrieved by other services. @@ -316,14 +330,16 @@

    Inherited members

    :param raise_on_failure: Whether to raise an exception when one or more tasks fails to send :return: PushResponse containing a list containing details of any messages that failed to publish. """ - if tasks is None: - tasks = [] + if tasks is None or len(tasks) < 1: + raise InvalidArgumentException("No tasks provided, nothing to send.") wire_tasks = [_task_to_wire(Task(**task) if isinstance(task, dict) else task) for task in tasks] - response = await self._queue_stub.send_batch(queue=self.name, tasks=wire_tasks) - - return [_wire_to_failed_task(failed_task) for failed_task in response.failed_tasks] + try: + response = await self._queueing._queue_stub.send_batch(queue=self.name, tasks=wire_tasks) + return [_wire_to_failed_task(failed_task) for failed_task in response.failed_tasks] + except GRPCError as grpc_err: + raise exception_from_grpc_error(grpc_err) async def receive(self, limit: int = None) -> List[Task]: """ @@ -342,10 +358,12 @@

    Inherited members

    if limit is None or limit < 1: limit = 1 - response = await self._queue_stub.receive(queue=self.name, depth=limit) - - # Map the response protobuf response items to Python SDK Nitric Tasks - return [_wire_to_task(task) for task in response.tasks]
    + try: + response = await self._queueing._queue_stub.receive(queue=self.name, depth=limit) + # Map the response protobuf response items to Python SDK Nitric Tasks + return [_wire_to_received_task(task=task, queueing=self._queueing, queue=self) for task in response.tasks] + except GRPCError as grpc_err: + raise exception_from_grpc_error(grpc_err)

    Class variables

    @@ -388,10 +406,12 @@

    Methods

    if limit is None or limit < 1: limit = 1 - response = await self._queue_stub.receive(queue=self.name, depth=limit) - - # Map the response protobuf response items to Python SDK Nitric Tasks - return [_wire_to_task(task) for task in response.tasks] + try: + response = await self._queueing._queue_stub.receive(queue=self.name, depth=limit) + # Map the response protobuf response items to Python SDK Nitric Tasks + return [_wire_to_received_task(task=task, queueing=self._queueing, queue=self) for task in response.tasks] + except GRPCError as grpc_err: + raise exception_from_grpc_error(grpc_err)
    @@ -428,40 +448,44 @@

    Methods

    # TODO: handle tasks that are just a payload task = Task(**task) - await self._queue_stub.send(queue=self.name, task=_task_to_wire(task))
    + try: + await self._queueing._queue_stub.send(queue=self.name, task=_task_to_wire(task)) + except GRPCError as grpc_err: + raise exception_from_grpc_error(grpc_err)
    -
    -class QueueClient +
    +class Queues
    -

    Nitric generic publish/subscribe tasking client.

    -

    This client insulates application code from stack specific task/topic operations or SDKs.

    +

    Queueing client, providing access to Queue and Task references and operations on those entities.

    Construct a Nitric Queue Client.

    Expand source code -
    class QueueClient(object):
    -    """
    -    Nitric generic publish/subscribe tasking client.
    -
    -    This client insulates application code from stack specific task/topic operations or SDKs.
    -    """
    +
    class Queues(object):
    +    """Queueing client, providing access to Queue and Task references and operations on those entities."""
     
         def __init__(self):
             """Construct a Nitric Queue Client."""
    -        self._queue_stub = QueueStub(channel=new_default_channel())
    +        self.channel = new_default_channel()
    +        self._queue_stub = QueueServiceStub(channel=self.channel)
    +
    +    def __del__(self):
    +        # close the channel when this client is destroyed
    +        if self.channel is not None:
    +            self.channel.close()
     
         def queue(self, name: str):
             """Return a reference to a queue from the connected queue service."""
    -        return Queue(_queue_stub=self._queue_stub, name=name)
    + return Queue(_queueing=self, name=name)

    Methods

    -
    +
    def queue(self, name: str)
    @@ -472,90 +496,132 @@

    Methods

    def queue(self, name: str):
         """Return a reference to a queue from the connected queue service."""
    -    return Queue(_queue_stub=self._queue_stub, name=name)
    + return Queue(_queueing=self, name=name)
    -
    -class Task +
    +class ReceivedTask (id: str = None, payload_type: str = None, payload: dict = <factory>, lease_id: str = None)
    -

    Represents a NitricTask.

    +

    A reference to a task received from a Queue, with a lease.

    Expand source code -
    class Task(object):
    -    """Represents a NitricTask."""
    +
    class ReceivedTask(object):
    +    """A reference to a task received from a Queue, with a lease."""
     
         id: str = field(default=None)
         payload_type: str = field(default=None)
         payload: dict = field(default_factory=dict)
         lease_id: str = field(default=None)
    -    _queue_stub: QueueStub = field(default=None)
    -    _queue: str = field(default=None)
    +    _queueing: Queues = field(default=None)
    +    _queue: Queue = field(default=None)
     
         async def complete(self):
    -        """Mark this task as complete and remove it from the queue."""
    -        if self._queue_stub is None or self._queue is None or self._queue == "":
    -            raise Exception("Task was not created via Queue.")
    -        if self.lease_id is None:
    -            raise Exception(
    -                "No lease_id available for task. Tasks must be received using Queue.receive to have a "
    -                "valid lease_id."
    +        """
    +        Mark this task as complete and remove it from the queue.
    +
    +        Only callable for tasks that have been received from a Queue.
    +        """
    +        if self._queueing is None or self._queue is None or self.lease_id is None:
    +            raise FailedPreconditionException(
    +                "Task is missing internal client or lease id, was it returned from " "queue.receive?"
                 )
    -        await self._queue_stub.complete(queue=self._queue, lease_id=self.lease_id)
    + try: + await self._queueing._queue_stub.complete(queue=self._queue.name, lease_id=self.lease_id) + except GRPCError as grpc_err: + raise exception_from_grpc_error(grpc_err)
    -

    Subclasses

    -

    Class variables

    -
    var id : str
    +
    var id : str
    -
    var lease_id : str
    +
    var lease_id : str
    -
    var payload : dict
    +
    var payload : dict
    -
    var payload_type : str
    +
    var payload_type : str

    Methods

    -
    +
    async def complete(self)
    -

    Mark this task as complete and remove it from the queue.

    +

    Mark this task as complete and remove it from the queue.

    +

    Only callable for tasks that have been received from a Queue.

    Expand source code
    async def complete(self):
    -    """Mark this task as complete and remove it from the queue."""
    -    if self._queue_stub is None or self._queue is None or self._queue == "":
    -        raise Exception("Task was not created via Queue.")
    -    if self.lease_id is None:
    -        raise Exception(
    -            "No lease_id available for task. Tasks must be received using Queue.receive to have a "
    -            "valid lease_id."
    +    """
    +    Mark this task as complete and remove it from the queue.
    +
    +    Only callable for tasks that have been received from a Queue.
    +    """
    +    if self._queueing is None or self._queue is None or self.lease_id is None:
    +        raise FailedPreconditionException(
    +            "Task is missing internal client or lease id, was it returned from " "queue.receive?"
             )
    -    await self._queue_stub.complete(queue=self._queue, lease_id=self.lease_id)
    + try: + await self._queueing._queue_stub.complete(queue=self._queue.name, lease_id=self.lease_id) + except GRPCError as grpc_err: + raise exception_from_grpc_error(grpc_err)
    +
    +class Task +(id: str = None, payload_type: str = None, payload: dict = <factory>) +
    +
    +

    A task to be sent to a Queue.

    +
    + +Expand source code + +
    class Task(object):
    +    """A task to be sent to a Queue."""
    +
    +    id: str = field(default=None)
    +    payload_type: str = field(default=None)
    +    payload: dict = field(default_factory=dict)
    +
    +

    Subclasses

    + +

    Class variables

    +
    +
    var id : str
    +
    +
    +
    +
    var payload : dict
    +
    +
    +
    +
    var payload_type : str
    +
    +
    +
    +
    +
    @@ -575,7 +641,6 @@

    Index

  • FailedTask

  • @@ -588,17 +653,25 @@

    Que
  • -

    QueueClient

    +

    Queues

    + +
  • +
  • +

    ReceivedTask

  • Task

    diff --git a/docs/nitric/api/secrets.html b/docs/nitric/api/secrets.html new file mode 100644 index 0000000..4efa7ac --- /dev/null +++ b/docs/nitric/api/secrets.html @@ -0,0 +1,580 @@ + + + + + + +nitric.api.secrets API documentation + + + + + + + + + + + +
    +
    +
    +

    Module nitric.api.secrets

    +
    +
    +
    + +Expand source code + +
    #
    +# Copyright (c) 2021 Nitric Technologies Pty Ltd.
    +#
    +# This file is part of Nitric Python 3 SDK.
    +# See https://github.com/nitrictech/python-sdk for further info.
    +#
    +# Licensed under the Apache License, Version 2.0 (the "License");
    +# you may not use this file except in compliance with the License.
    +# You may obtain a copy of the License at
    +#
    +#     http://www.apache.org/licenses/LICENSE-2.0
    +#
    +# Unless required by applicable law or agreed to in writing, software
    +# distributed under the License is distributed on an "AS IS" BASIS,
    +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    +# See the License for the specific language governing permissions and
    +# limitations under the License.
    +#
    +from __future__ import annotations
    +from dataclasses import dataclass
    +from typing import Union
    +
    +from grpclib import GRPCError
    +
    +from nitric.api.exception import exception_from_grpc_error
    +from nitric.utils import new_default_channel
    +from nitricapi.nitric.secret.v1 import SecretServiceStub, Secret as SecretMessage, SecretVersion as VersionMessage
    +
    +
    +class Secrets(object):
    +    """
    +    Nitric secrets management client.
    +
    +    This client insulates application code from stack specific secrets managements services.
    +    """
    +
    +    def __init__(self):
    +        """Construct a Nitric Storage Client."""
    +        self._channel = new_default_channel()
    +        self._secrets_stub = SecretServiceStub(channel=self._channel)
    +
    +    def __del__(self):
    +        # close the channel when this client is destroyed
    +        if self._channel is not None:
    +            self._channel.close()
    +
    +    def secret(self, name: str):
    +        """Return a reference to a secret container from the connected secrets management service."""
    +        return SecretContainer(_secrets=self, name=name)
    +
    +
    +def _secret_to_wire(secret: SecretContainer) -> SecretMessage:
    +    return SecretMessage(name=secret.name)
    +
    +
    +@dataclass(frozen=True)
    +class SecretContainer(object):
    +    """A reference to a secret container, used to store and retrieve secret versions."""
    +
    +    _secrets: Secrets
    +    name: str
    +
    +    async def put(self, value: Union[str, bytes]) -> SecretVersion:
    +        """
    +        Create a new secret version, making it the latest and storing the provided value.
    +
    +        :param value: the secret value to store
    +        """
    +        if isinstance(value, str):
    +            value = bytes(value, "utf-8")
    +
    +        secret_message = _secret_to_wire(self)
    +
    +        try:
    +            response = await self._secrets._secrets_stub.put(secret=secret_message, value=value)
    +            return self.version(version=response.secret_version.version)
    +        except GRPCError as grpc_err:
    +            raise exception_from_grpc_error(grpc_err)
    +
    +    def version(self, version: str):
    +        """
    +        Return a reference to a specific version of a secret.
    +
    +        Can be used to retrieve the secret value associated with the version.
    +        """
    +        return SecretVersion(_secrets=self._secrets, secret=self, id=version)
    +
    +    def latest(self):
    +        """
    +        Return a reference to the 'latest' secret version.
    +
    +        Note: using 'access' on this reference may return different values between requests if a
    +        new version is created between access calls.
    +        """
    +        return self.version("latest")
    +
    +
    +def _secret_version_to_wire(version: SecretVersion) -> VersionMessage:
    +    return VersionMessage(_secret_to_wire(version.secret), version=version.id)
    +
    +
    +@dataclass(frozen=True)
    +class SecretVersion(object):
    +    """A reference to a version of a secret, used to access the value of the version."""
    +
    +    _secrets: Secrets
    +    secret: SecretContainer
    +    id: str
    +
    +    async def access(self) -> SecretValue:
    +        """Return the value stored against this version of the secret."""
    +        version_message = _secret_version_to_wire(self)
    +        try:
    +            response = await self._secrets._secrets_stub.access(secret_version=version_message)
    +        except GRPCError as grpc_err:
    +            raise exception_from_grpc_error(grpc_err)
    +
    +        # Construct a new SecretVersion if the response version id doesn't match this reference.
    +        # This ensures calls to access from the 'latest' version return new version objects
    +        # with a fixed version id.
    +        static_version = (
    +            self
    +            if response.secret_version.version == self.id
    +            else SecretVersion(_secrets=self._secrets, secret=self.secret, id=response.secret_version.version)
    +        )
    +
    +        return SecretValue(version=static_version, value=response.value)
    +
    +
    +@dataclass(frozen=True)
    +class SecretValue(object):
    +    """Represents the value of a secret, tied to a specific version."""
    +
    +    # The version containing this value. Never 'latest', always a specific version.
    +    version: SecretVersion
    +    value: bytes
    +
    +    def __str__(self) -> str:
    +        return self.value.decode("utf-8")
    +
    +    def __bytes__(self) -> bytes:
    +        return self.value
    +
    +    def as_string(self):
    +        """Return the content of this secret value as a string."""
    +        return str(self)
    +
    +    def as_bytes(self):
    +        """Return the content of this secret value."""
    +        return bytes(self)
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +

    Classes

    +
    +
    +class SecretContainer +(_secrets: Secrets, name: str) +
    +
    +

    A reference to a secret container, used to store and retrieve secret versions.

    +
    + +Expand source code + +
    class SecretContainer(object):
    +    """A reference to a secret container, used to store and retrieve secret versions."""
    +
    +    _secrets: Secrets
    +    name: str
    +
    +    async def put(self, value: Union[str, bytes]) -> SecretVersion:
    +        """
    +        Create a new secret version, making it the latest and storing the provided value.
    +
    +        :param value: the secret value to store
    +        """
    +        if isinstance(value, str):
    +            value = bytes(value, "utf-8")
    +
    +        secret_message = _secret_to_wire(self)
    +
    +        try:
    +            response = await self._secrets._secrets_stub.put(secret=secret_message, value=value)
    +            return self.version(version=response.secret_version.version)
    +        except GRPCError as grpc_err:
    +            raise exception_from_grpc_error(grpc_err)
    +
    +    def version(self, version: str):
    +        """
    +        Return a reference to a specific version of a secret.
    +
    +        Can be used to retrieve the secret value associated with the version.
    +        """
    +        return SecretVersion(_secrets=self._secrets, secret=self, id=version)
    +
    +    def latest(self):
    +        """
    +        Return a reference to the 'latest' secret version.
    +
    +        Note: using 'access' on this reference may return different values between requests if a
    +        new version is created between access calls.
    +        """
    +        return self.version("latest")
    +
    +

    Class variables

    +
    +
    var name : str
    +
    +
    +
    +
    +

    Methods

    +
    +
    +def latest(self) +
    +
    +

    Return a reference to the 'latest' secret version.

    +

    Note: using 'access' on this reference may return different values between requests if a +new version is created between access calls.

    +
    + +Expand source code + +
    def latest(self):
    +    """
    +    Return a reference to the 'latest' secret version.
    +
    +    Note: using 'access' on this reference may return different values between requests if a
    +    new version is created between access calls.
    +    """
    +    return self.version("latest")
    +
    +
    +
    +async def put(self, value: Union[str, bytes]) ‑> SecretVersion +
    +
    +

    Create a new secret version, making it the latest and storing the provided value.

    +

    :param value: the secret value to store

    +
    + +Expand source code + +
    async def put(self, value: Union[str, bytes]) -> SecretVersion:
    +    """
    +    Create a new secret version, making it the latest and storing the provided value.
    +
    +    :param value: the secret value to store
    +    """
    +    if isinstance(value, str):
    +        value = bytes(value, "utf-8")
    +
    +    secret_message = _secret_to_wire(self)
    +
    +    try:
    +        response = await self._secrets._secrets_stub.put(secret=secret_message, value=value)
    +        return self.version(version=response.secret_version.version)
    +    except GRPCError as grpc_err:
    +        raise exception_from_grpc_error(grpc_err)
    +
    +
    +
    +def version(self, version: str) +
    +
    +

    Return a reference to a specific version of a secret.

    +

    Can be used to retrieve the secret value associated with the version.

    +
    + +Expand source code + +
    def version(self, version: str):
    +    """
    +    Return a reference to a specific version of a secret.
    +
    +    Can be used to retrieve the secret value associated with the version.
    +    """
    +    return SecretVersion(_secrets=self._secrets, secret=self, id=version)
    +
    +
    +
    +
    +
    +class SecretValue +(version: SecretVersion, value: bytes) +
    +
    +

    Represents the value of a secret, tied to a specific version.

    +
    + +Expand source code + +
    class SecretValue(object):
    +    """Represents the value of a secret, tied to a specific version."""
    +
    +    # The version containing this value. Never 'latest', always a specific version.
    +    version: SecretVersion
    +    value: bytes
    +
    +    def __str__(self) -> str:
    +        return self.value.decode("utf-8")
    +
    +    def __bytes__(self) -> bytes:
    +        return self.value
    +
    +    def as_string(self):
    +        """Return the content of this secret value as a string."""
    +        return str(self)
    +
    +    def as_bytes(self):
    +        """Return the content of this secret value."""
    +        return bytes(self)
    +
    +

    Class variables

    +
    +
    var value : bytes
    +
    +
    +
    +
    var versionSecretVersion
    +
    +
    +
    +
    +

    Methods

    +
    +
    +def as_bytes(self) +
    +
    +

    Return the content of this secret value.

    +
    + +Expand source code + +
    def as_bytes(self):
    +    """Return the content of this secret value."""
    +    return bytes(self)
    +
    +
    +
    +def as_string(self) +
    +
    +

    Return the content of this secret value as a string.

    +
    + +Expand source code + +
    def as_string(self):
    +    """Return the content of this secret value as a string."""
    +    return str(self)
    +
    +
    +
    +
    +
    +class SecretVersion +(_secrets: Secrets, secret: SecretContainer, id: str) +
    +
    +

    A reference to a version of a secret, used to access the value of the version.

    +
    + +Expand source code + +
    class SecretVersion(object):
    +    """A reference to a version of a secret, used to access the value of the version."""
    +
    +    _secrets: Secrets
    +    secret: SecretContainer
    +    id: str
    +
    +    async def access(self) -> SecretValue:
    +        """Return the value stored against this version of the secret."""
    +        version_message = _secret_version_to_wire(self)
    +        try:
    +            response = await self._secrets._secrets_stub.access(secret_version=version_message)
    +        except GRPCError as grpc_err:
    +            raise exception_from_grpc_error(grpc_err)
    +
    +        # Construct a new SecretVersion if the response version id doesn't match this reference.
    +        # This ensures calls to access from the 'latest' version return new version objects
    +        # with a fixed version id.
    +        static_version = (
    +            self
    +            if response.secret_version.version == self.id
    +            else SecretVersion(_secrets=self._secrets, secret=self.secret, id=response.secret_version.version)
    +        )
    +
    +        return SecretValue(version=static_version, value=response.value)
    +
    +

    Class variables

    +
    +
    var id : str
    +
    +
    +
    +
    var secretSecretContainer
    +
    +
    +
    +
    +

    Methods

    +
    +
    +async def access(self) ‑> SecretValue +
    +
    +

    Return the value stored against this version of the secret.

    +
    + +Expand source code + +
    async def access(self) -> SecretValue:
    +    """Return the value stored against this version of the secret."""
    +    version_message = _secret_version_to_wire(self)
    +    try:
    +        response = await self._secrets._secrets_stub.access(secret_version=version_message)
    +    except GRPCError as grpc_err:
    +        raise exception_from_grpc_error(grpc_err)
    +
    +    # Construct a new SecretVersion if the response version id doesn't match this reference.
    +    # This ensures calls to access from the 'latest' version return new version objects
    +    # with a fixed version id.
    +    static_version = (
    +        self
    +        if response.secret_version.version == self.id
    +        else SecretVersion(_secrets=self._secrets, secret=self.secret, id=response.secret_version.version)
    +    )
    +
    +    return SecretValue(version=static_version, value=response.value)
    +
    +
    +
    +
    +
    +class Secrets +
    +
    +

    Nitric secrets management client.

    +

    This client insulates application code from stack specific secrets managements services.

    +

    Construct a Nitric Storage Client.

    +
    + +Expand source code + +
    class Secrets(object):
    +    """
    +    Nitric secrets management client.
    +
    +    This client insulates application code from stack specific secrets managements services.
    +    """
    +
    +    def __init__(self):
    +        """Construct a Nitric Storage Client."""
    +        self._channel = new_default_channel()
    +        self._secrets_stub = SecretServiceStub(channel=self._channel)
    +
    +    def __del__(self):
    +        # close the channel when this client is destroyed
    +        if self._channel is not None:
    +            self._channel.close()
    +
    +    def secret(self, name: str):
    +        """Return a reference to a secret container from the connected secrets management service."""
    +        return SecretContainer(_secrets=self, name=name)
    +
    +

    Methods

    +
    +
    +def secret(self, name: str) +
    +
    +

    Return a reference to a secret container from the connected secrets management service.

    +
    + +Expand source code + +
    def secret(self, name: str):
    +    """Return a reference to a secret container from the connected secrets management service."""
    +    return SecretContainer(_secrets=self, name=name)
    +
    +
    +
    +
    +
    +
    +
    + +
    + + + \ No newline at end of file diff --git a/docs/nitric/api/storage.html b/docs/nitric/api/storage.html index 4b78a51..40ec8d1 100644 --- a/docs/nitric/api/storage.html +++ b/docs/nitric/api/storage.html @@ -46,11 +46,14 @@

    Module nitric.api.storage

    # from dataclasses import dataclass +from grpclib import GRPCError + +from nitric.api.exception import exception_from_grpc_error from nitric.utils import new_default_channel -from nitric.proto.nitric.storage.v1 import StorageStub +from nitricapi.nitric.storage.v1 import StorageServiceStub -class StorageClient(object): +class Storage(object): """ Nitric generic blob storage client. @@ -59,30 +62,36 @@

    Module nitric.api.storage

    def __init__(self): """Construct a Nitric Storage Client.""" - self._storage_stub = StorageStub(channel=new_default_channel()) + self._channel = new_default_channel() + self._storage_stub = StorageServiceStub(channel=self._channel) + + def __del__(self): + # close the channel when this client is destroyed + if self._channel is not None: + self._channel.close() def bucket(self, name: str): """Return a reference to a bucket from the connected storage service.""" - return Bucket(_storage_stub=self._storage_stub, name=name) + return Bucket(_storage=self, name=name) @dataclass(frozen=True, order=True) class Bucket(object): """A reference to a bucket in a storage service, used to the perform operations on that bucket.""" - _storage_stub: StorageStub + _storage: Storage name: str def file(self, key: str): """Return a reference to a file in this bucket.""" - return File(_storage_stub=self._storage_stub, _bucket=self.name, key=key) + return File(_storage=self._storage, _bucket=self.name, key=key) @dataclass(frozen=True, order=True) class File(object): """A reference to a file in a bucket, used to perform operations on that file.""" - _storage_stub: StorageStub + _storage: Storage _bucket: str key: str @@ -92,16 +101,25 @@

    Module nitric.api.storage

    Will create the file if it doesn't already exist. """ - await self._storage_stub.write(bucket_name=self._bucket, key=self.key, body=body) + try: + await self._storage._storage_stub.write(bucket_name=self._bucket, key=self.key, body=body) + except GRPCError as grpc_err: + raise exception_from_grpc_error(grpc_err) async def read(self) -> bytes: """Read this files contents from the bucket.""" - response = await self._storage_stub.read(bucket_name=self._bucket, key=self.key) - return response.body + try: + response = await self._storage._storage_stub.read(bucket_name=self._bucket, key=self.key) + return response.body + except GRPCError as grpc_err: + raise exception_from_grpc_error(grpc_err) async def delete(self): """Delete this file from the bucket.""" - await self._storage_stub.delete(bucket_name=self._bucket, key=self.key)
    + try: + await self._storage._storage_stub.delete(bucket_name=self._bucket, key=self.key) + except GRPCError as grpc_err: + raise exception_from_grpc_error(grpc_err)
    @@ -115,7 +133,7 @@

    Classes

    class Bucket -(_storage_stub: StorageStub, name: str) +(_storage: Storage, name: str)

    A reference to a bucket in a storage service, used to the perform operations on that bucket.

    @@ -126,12 +144,12 @@

    Classes

    class Bucket(object):
         """A reference to a bucket in a storage service, used to the perform operations on that bucket."""
     
    -    _storage_stub: StorageStub
    +    _storage: Storage
         name: str
     
         def file(self, key: str):
             """Return a reference to a file in this bucket."""
    -        return File(_storage_stub=self._storage_stub, _bucket=self.name, key=key)
    + return File(_storage=self._storage, _bucket=self.name, key=key)

    Class variables

    @@ -153,14 +171,14 @@

    Methods

    def file(self, key: str):
         """Return a reference to a file in this bucket."""
    -    return File(_storage_stub=self._storage_stub, _bucket=self.name, key=key)
    + return File(_storage=self._storage, _bucket=self.name, key=key)
    class File -(_storage_stub: StorageStub, _bucket: str, key: str) +(_storage: Storage, _bucket: str, key: str)

    A reference to a file in a bucket, used to perform operations on that file.

    @@ -171,7 +189,7 @@

    Methods

    class File(object):
         """A reference to a file in a bucket, used to perform operations on that file."""
     
    -    _storage_stub: StorageStub
    +    _storage: Storage
         _bucket: str
         key: str
     
    @@ -181,16 +199,25 @@ 

    Methods

    Will create the file if it doesn't already exist. """ - await self._storage_stub.write(bucket_name=self._bucket, key=self.key, body=body) + try: + await self._storage._storage_stub.write(bucket_name=self._bucket, key=self.key, body=body) + except GRPCError as grpc_err: + raise exception_from_grpc_error(grpc_err) async def read(self) -> bytes: """Read this files contents from the bucket.""" - response = await self._storage_stub.read(bucket_name=self._bucket, key=self.key) - return response.body + try: + response = await self._storage._storage_stub.read(bucket_name=self._bucket, key=self.key) + return response.body + except GRPCError as grpc_err: + raise exception_from_grpc_error(grpc_err) async def delete(self): """Delete this file from the bucket.""" - await self._storage_stub.delete(bucket_name=self._bucket, key=self.key)
    + try: + await self._storage._storage_stub.delete(bucket_name=self._bucket, key=self.key) + except GRPCError as grpc_err: + raise exception_from_grpc_error(grpc_err)

    Class variables

    @@ -212,7 +239,10 @@

    Methods

    async def delete(self):
         """Delete this file from the bucket."""
    -    await self._storage_stub.delete(bucket_name=self._bucket, key=self.key)
    + try: + await self._storage._storage_stub.delete(bucket_name=self._bucket, key=self.key) + except GRPCError as grpc_err: + raise exception_from_grpc_error(grpc_err)
    @@ -226,8 +256,11 @@

    Methods

    async def read(self) -> bytes:
         """Read this files contents from the bucket."""
    -    response = await self._storage_stub.read(bucket_name=self._bucket, key=self.key)
    -    return response.body
    + try: + response = await self._storage._storage_stub.read(bucket_name=self._bucket, key=self.key) + return response.body + except GRPCError as grpc_err: + raise exception_from_grpc_error(grpc_err)
    @@ -246,13 +279,16 @@

    Methods

    Will create the file if it doesn't already exist. """ - await self._storage_stub.write(bucket_name=self._bucket, key=self.key, body=body)
    + try: + await self._storage._storage_stub.write(bucket_name=self._bucket, key=self.key, body=body) + except GRPCError as grpc_err: + raise exception_from_grpc_error(grpc_err) -
    -class StorageClient +
    +class Storage

    Nitric generic blob storage client.

    @@ -262,7 +298,7 @@

    Methods

    Expand source code -
    class StorageClient(object):
    +
    class Storage(object):
         """
         Nitric generic blob storage client.
     
    @@ -271,15 +307,21 @@ 

    Methods

    def __init__(self): """Construct a Nitric Storage Client.""" - self._storage_stub = StorageStub(channel=new_default_channel()) + self._channel = new_default_channel() + self._storage_stub = StorageServiceStub(channel=self._channel) + + def __del__(self): + # close the channel when this client is destroyed + if self._channel is not None: + self._channel.close() def bucket(self, name: str): """Return a reference to a bucket from the connected storage service.""" - return Bucket(_storage_stub=self._storage_stub, name=name)
    + return Bucket(_storage=self, name=name)

    Methods

    -
    +
    def bucket(self, name: str)
    @@ -290,7 +332,7 @@

    Methods

    def bucket(self, name: str):
         """Return a reference to a bucket from the connected storage service."""
    -    return Bucket(_storage_stub=self._storage_stub, name=name)
    + return Bucket(_storage=self, name=name)
    @@ -328,9 +370,9 @@

    Fil

  • -

    StorageClient

    +

    Storage

  • diff --git a/docs/nitric/config/index.html b/docs/nitric/config/index.html index 72eff4f..9ec3869 100644 --- a/docs/nitric/config/index.html +++ b/docs/nitric/config/index.html @@ -61,9 +61,6 @@

    Module nitric.config

    env_variable = os.environ.get(setting) setattr(self, setting, env_variable or default_value) - # def __getattr__(self, name): - # return getattr(self, name) - settings = Settings()
    diff --git a/docs/nitric/faas.html b/docs/nitric/faas.html new file mode 100644 index 0000000..88b54c1 --- /dev/null +++ b/docs/nitric/faas.html @@ -0,0 +1,1278 @@ + + + + + + +nitric.faas API documentation + + + + + + + + + + + +
    +
    +
    +

    Module nitric.faas

    +
    +
    +
    + +Expand source code + +
    from __future__ import annotations
    +
    +import functools
    +import json
    +import traceback
    +from typing import Dict, Union, List, TypeVar, Callable, Coroutine, Any
    +
    +import betterproto
    +from betterproto.grpc.util.async_channel import AsyncChannel
    +from nitric.utils import new_default_channel
    +from nitricapi.nitric.faas.v1 import (
    +    FaasServiceStub,
    +    InitRequest,
    +    ClientMessage,
    +    TriggerRequest,
    +    TriggerResponse,
    +    HttpResponseContext,
    +    TopicResponseContext,
    +)
    +import asyncio
    +from abc import ABC
    +
    +Record = Dict[str, Union[str, List[str]]]
    +
    +
    +class Request(ABC):
    +    """Represents an abstract trigger request."""
    +
    +    def __init__(self, data: bytes):
    +        """Construct a new Request."""
    +        self.data = data
    +
    +
    +class Response(ABC):
    +    """Represents an abstract trigger response."""
    +
    +    pass
    +
    +
    +class TriggerContext(ABC):
    +    """Represents an abstract request/response context for any trigger."""
    +
    +    def http(self) -> Union[HttpContext, None]:
    +        """Return this context as an HttpContext if it is one, otherwise returns None."""
    +        return None
    +
    +    def event(self) -> Union[EventContext, None]:
    +        """Return this context as an EventContext if it is one, otherwise returns None."""
    +        return None
    +
    +
    +def _ctx_from_grpc_trigger_request(trigger_request: TriggerRequest):
    +    """Return a TriggerContext from a TriggerRequest."""
    +    context_type, context = betterproto.which_one_of(trigger_request, "context")
    +    if context_type == "http":
    +        return HttpContext.from_grpc_trigger_request(trigger_request)
    +    elif context_type == "topic":
    +        return EventContext.from_grpc_trigger_request(trigger_request)
    +    else:
    +        print(f"Trigger with unknown context received, context type: {context_type}")
    +        raise Exception(f"Unknown trigger context, type: {context_type}")
    +
    +
    +def _grpc_response_from_ctx(ctx: TriggerContext) -> TriggerResponse:
    +    """
    +    Create a GRPC TriggerResponse from a TriggerContext.
    +
    +    The ctx is used to determine the appropriate TriggerResponse content,
    +    the ctx.res is then used to construct the response.
    +    """
    +    if ctx.http():
    +        ctx = ctx.http()
    +        return TriggerResponse(
    +            data=ctx.res.body,
    +            http=HttpResponseContext(
    +                status=ctx.res.status,
    +                headers=ctx.res.headers,
    +            ),
    +        )
    +    elif ctx.event():
    +        ctx = ctx.event()
    +        return TriggerResponse(
    +            topic=TopicResponseContext(
    +                success=ctx.res.success,
    +            ),
    +        )
    +    else:
    +        raise Exception("Unknown Trigger Context type, unable to return valid response")
    +
    +
    +# ====== HTTP ======
    +
    +
    +class HttpRequest(Request):
    +    """Represents a translated Http Request forwarded from the Nitric Membrane."""
    +
    +    def __init__(self, data: bytes, method: str, path: str, query: Record, headers: Record):
    +        """Construct a new HttpRequest."""
    +        super().__init__(data)
    +        self.method = method
    +        self.path = path
    +        self.query = query
    +        self.headers = headers
    +
    +    @property
    +    def body(self):
    +        """Get the body of the request as text."""
    +        return self.data.decode("utf-8")
    +
    +
    +class HttpResponse(Response):
    +    """Represents an Http Response to be generated by the Nitric Membrane in response to an Http Request Trigger."""
    +
    +    def __init__(self, status: int = 200, headers: Record = None, body: bytes = None):
    +        """Construct a new HttpResponse."""
    +        self.status = status
    +        self.headers = headers if headers else {}
    +        self.body = body if body else bytes()
    +
    +
    +class HttpContext(TriggerContext):
    +    """Represents the full request/response context for an Http based trigger."""
    +
    +    def __init__(self, request: HttpRequest, response: HttpResponse = None):
    +        """Construct a new HttpContext."""
    +        super().__init__()
    +        self.req = request
    +        self.res = response if response else HttpResponse()
    +
    +    def http(self) -> HttpContext:
    +        """Return this HttpContext, used when determining the context type of a trigger."""
    +        return self
    +
    +    @staticmethod
    +    def from_grpc_trigger_request(trigger_request: TriggerRequest) -> HttpContext:
    +        """Construct a new HttpContext from an Http trigger from the Nitric Membrane."""
    +        if len(trigger_request.http.headers.keys()) > 0:
    +            headers = {k: v[0].value for (k, v) in trigger_request.http.headers.items()}
    +        else:
    +            headers = trigger_request.http.headers_old
    +
    +        return HttpContext(
    +            request=HttpRequest(
    +                data=trigger_request.data,
    +                method=trigger_request.http.method,
    +                query=trigger_request.http.query_params,
    +                path=trigger_request.http.path,
    +                headers=headers,
    +            )
    +        )
    +
    +
    +# ====== Events ======
    +
    +
    +class EventRequest(Request):
    +    """Represents a translated Event, from a Subscribed Topic, forwarded from the Nitric Membrane."""
    +
    +    def __init__(self, data: bytes, topic: str):
    +        """Construct a new EventRequest."""
    +        super().__init__(data)
    +        self.topic = topic
    +
    +    @property
    +    def payload(self) -> bytes:
    +        """Return the payload of this request as text."""
    +        return json.loads(self.data.decode("utf-8"))
    +
    +
    +class EventResponse(Response):
    +    """Represents the response to a trigger from an Event as a result of a Topic subscription."""
    +
    +    def __init__(self, success: bool = True):
    +        """Construct a new EventResponse."""
    +        self.success = success
    +
    +
    +class EventContext(TriggerContext):
    +    """Represents the full request/response context for an Event based trigger."""
    +
    +    def __init__(self, request: EventRequest, response: EventResponse = None):
    +        """Construct a new EventContext."""
    +        super().__init__()
    +        self.req = request
    +        self.res = response if response else EventResponse()
    +
    +    def event(self) -> EventContext:
    +        """Return this EventContext, used when determining the context type of a trigger."""
    +        return self
    +
    +    @staticmethod
    +    def from_grpc_trigger_request(trigger_request: TriggerRequest):
    +        """Construct a new EventContext from an Event trigger from the Nitric Membrane."""
    +        return EventContext(request=EventRequest(data=trigger_request.data, topic=trigger_request.topic.topic))
    +
    +
    +# async def face(inpp: int) -> str:
    +#     return "thing"
    +
    +
    +# ====== Function Handlers ======
    +
    +C = TypeVar("C", TriggerContext, HttpContext, EventContext)
    +Middleware = Callable
    +Handler = Coroutine[Any, Any, C]
    +HttpHandler = Coroutine[Any, Any, HttpContext]
    +EventHandler = Coroutine[Any, Any, EventContext]
    +Middleware = Callable[[C, Middleware], Handler]
    +# HttpMiddleware = Middleware[[HttpContext, Middleware], HttpHandler]
    +# EventMiddleware = Middleware[[EventContext, Middleware], EventHandler]
    +
    +
    +def compose_middleware(*middlewares: Union[Middleware, List[Middleware]]) -> Middleware:
    +    """
    +    Compose multiple middleware functions into a single middleware function.
    +
    +    The resulting middleware will effectively be a chain of the provided middleware,
    +    where each calls the next in the chain when they're successful.
    +    """
    +    if len(middlewares) == 1 and not isinstance(middlewares[0], list):
    +        return middlewares[0]
    +
    +    middlewares = [compose_middleware(m) if isinstance(m, list) else m for m in middlewares]
    +
    +    async def handler(ctx, next_middleware=lambda ctx: ctx):
    +        middleware_chain = functools.reduce(
    +            lambda acc_next, cur: lambda context: cur(context, acc_next), reversed(middlewares + (next_middleware,))
    +        )
    +        return middleware_chain(ctx)
    +
    +    return handler
    +
    +
    +# ====== Function Server ======
    +
    +
    +def _create_internal_error_response(req: TriggerRequest) -> TriggerResponse:
    +    """Create a general error response based on the trigger request type."""
    +    context_type, context = betterproto.which_one_of(req, "context")
    +    if context_type == "http":
    +        return TriggerResponse(data=bytes(), http=HttpResponseContext(status=500))
    +    elif context_type == "topic":
    +        return TriggerResponse(data=bytes(), topic=TopicResponseContext(success=False))
    +    else:
    +        raise Exception(f"Unknown trigger type: {context_type}, unable to generate expected response")
    +
    +
    +class FunctionServer:
    +    """A Function as a Service server, which acts as a faas handler for the Nitric Membrane."""
    +
    +    def __init__(self):
    +        """Construct a new function server."""
    +        self.__http_handler = None
    +        self.__event_handler = None
    +        self._any_handler = None
    +
    +    def http(self, *handlers: Union[Middleware, List[Middleware]]) -> FunctionServer:
    +        """
    +        Register one or more HTTP Trigger Handlers or Middleware.
    +
    +        When multiple handlers are provided, they will be called in order.
    +        """
    +        self.__http_handler = compose_middleware(*handlers)
    +        return self
    +
    +    def event(self, *handlers: Union[Middleware, List[Middleware]]) -> FunctionServer:
    +        """
    +        Register one or more Event Trigger Handlers or Middleware.
    +
    +        When multiple handlers are provided, they will be called in order.
    +        """
    +        self.__event_handler = compose_middleware(*handlers)
    +        return self
    +
    +    def start(self, *handlers: Union[Middleware, List[Middleware]]):
    +        """Start the function server using the provided trigger handlers."""
    +        self._any_handler = compose_middleware(*handlers) if len(handlers) > 0 else None
    +        # TODO: implement the server
    +        if not self._any_handler and not self._http_handler and not self._event_handler:
    +            raise Exception("At least one handler function must be provided.")
    +
    +        asyncio.run(self.run())
    +
    +    @property
    +    def _http_handler(self):
    +        return self.__http_handler if self.__http_handler else self._any_handler
    +
    +    @property
    +    def _event_handler(self):
    +        return self.__event_handler if self.__event_handler else self._any_handler
    +
    +    async def run(self):
    +        """Register a new FaaS worker with the Membrane, using the provided function as the handler."""
    +        channel = new_default_channel()
    +        client = FaasServiceStub(channel)
    +        request_channel = AsyncChannel(close=True)
    +        # We can start be sending all the requests we already have
    +        try:
    +            await request_channel.send(ClientMessage(init_request=InitRequest()))
    +            async for srv_msg in client.trigger_stream(request_channel):
    +                # The response iterator will remain active until the connection is closed
    +                msg_type, val = betterproto.which_one_of(srv_msg, "content")
    +
    +                if msg_type == "init_response":
    +                    print("function connected to Membrane")
    +                    # We don't need to reply
    +                    # proceed to the next available message
    +                    continue
    +                if msg_type == "trigger_request":
    +                    ctx = _ctx_from_grpc_trigger_request(srv_msg.trigger_request)
    +
    +                    try:
    +                        if ctx.http():
    +                            func = self._http_handler
    +                        elif ctx.event():
    +                            func = self._event_handler
    +                        else:
    +                            func = self._any_handler
    +                        response_ctx = (await func(ctx)) if asyncio.iscoroutinefunction(func) else func(ctx)
    +                        # Send function response back to server
    +                        await request_channel.send(
    +                            ClientMessage(
    +                                id=srv_msg.id,
    +                                trigger_response=_grpc_response_from_ctx(response_ctx),
    +                            )
    +                        )
    +                    except Exception:
    +                        # Any unhandled exceptions in the above code will end the loop
    +                        # and stop processing future triggers, we catch them here as a last resort.
    +                        print("An unexpected error occurred processing trigger or response")
    +                        traceback.print_exc()
    +                        response = _create_internal_error_response(srv_msg.trigger_request)
    +                        await request_channel.send(ClientMessage(id=srv_msg.id, trigger_response=response))
    +                else:
    +                    print(f"unhandled message type {msg_type}, skipping")
    +                    continue
    +                if request_channel.done():
    +                    break
    +        except ConnectionRefusedError as cre:
    +            traceback.print_exc()
    +            raise ConnectionRefusedError("Failed to register function with Membrane") from cre
    +        except Exception as e:
    +            traceback.print_exc()
    +            raise Exception("An unexpected error occurred.") from e
    +        finally:
    +            print("stream from Membrane closed, closing client stream")
    +            # The channel must be closed to complete the gRPC connection
    +            request_channel.close()
    +            channel.close()
    +
    +
    +# Convenience functions to create function servers
    +
    +
    +def http(*handlers: Union[Middleware, List[Middleware]]) -> FunctionServer:
    +    """
    +    Create a new Function Server and Register one or more HTTP Trigger Handlers or Middleware.
    +
    +    When multiple handlers are provided, they will be called in order.
    +    """
    +    return FunctionServer().http(*handlers)
    +
    +
    +def event(*handlers: Union[Middleware, List[Middleware]]) -> FunctionServer:
    +    """
    +    Create a new Function Server and Register one or more Event Trigger Handlers or Middleware.
    +
    +    When multiple handlers are provided, they will be called in order.
    +    """
    +    return FunctionServer().event(*handlers)
    +
    +
    +def start(*handlers: Union[Middleware, List[Middleware]]):
    +    """Create a new Function Server and start it using the provided trigger handlers."""
    +    if len(handlers) < 1:
    +        raise Exception("At least one handler must be provided.")
    +    return FunctionServer().start(*handlers)
    +
    +
    +
    +
    +
    +
    +
    +

    Functions

    +
    +
    +def compose_middleware(*middlewares: Union[Middleware, List[Middleware]]) ‑> Callable[[~C, Callable], Coroutine[Any, Any, ~C]] +
    +
    +

    Compose multiple middleware functions into a single middleware function.

    +

    The resulting middleware will effectively be a chain of the provided middleware, +where each calls the next in the chain when they're successful.

    +
    + +Expand source code + +
    def compose_middleware(*middlewares: Union[Middleware, List[Middleware]]) -> Middleware:
    +    """
    +    Compose multiple middleware functions into a single middleware function.
    +
    +    The resulting middleware will effectively be a chain of the provided middleware,
    +    where each calls the next in the chain when they're successful.
    +    """
    +    if len(middlewares) == 1 and not isinstance(middlewares[0], list):
    +        return middlewares[0]
    +
    +    middlewares = [compose_middleware(m) if isinstance(m, list) else m for m in middlewares]
    +
    +    async def handler(ctx, next_middleware=lambda ctx: ctx):
    +        middleware_chain = functools.reduce(
    +            lambda acc_next, cur: lambda context: cur(context, acc_next), reversed(middlewares + (next_middleware,))
    +        )
    +        return middleware_chain(ctx)
    +
    +    return handler
    +
    +
    +
    +def event(*handlers: Union[Middleware, List[Middleware]]) ‑> FunctionServer +
    +
    +

    Create a new Function Server and Register one or more Event Trigger Handlers or Middleware.

    +

    When multiple handlers are provided, they will be called in order.

    +
    + +Expand source code + +
    def event(*handlers: Union[Middleware, List[Middleware]]) -> FunctionServer:
    +    """
    +    Create a new Function Server and Register one or more Event Trigger Handlers or Middleware.
    +
    +    When multiple handlers are provided, they will be called in order.
    +    """
    +    return FunctionServer().event(*handlers)
    +
    +
    +
    +def http(*handlers: Union[Middleware, List[Middleware]]) ‑> FunctionServer +
    +
    +

    Create a new Function Server and Register one or more HTTP Trigger Handlers or Middleware.

    +

    When multiple handlers are provided, they will be called in order.

    +
    + +Expand source code + +
    def http(*handlers: Union[Middleware, List[Middleware]]) -> FunctionServer:
    +    """
    +    Create a new Function Server and Register one or more HTTP Trigger Handlers or Middleware.
    +
    +    When multiple handlers are provided, they will be called in order.
    +    """
    +    return FunctionServer().http(*handlers)
    +
    +
    +
    +def start(*handlers: Union[Middleware, List[Middleware]]) +
    +
    +

    Create a new Function Server and start it using the provided trigger handlers.

    +
    + +Expand source code + +
    def start(*handlers: Union[Middleware, List[Middleware]]):
    +    """Create a new Function Server and start it using the provided trigger handlers."""
    +    if len(handlers) < 1:
    +        raise Exception("At least one handler must be provided.")
    +    return FunctionServer().start(*handlers)
    +
    +
    +
    +
    +
    +

    Classes

    +
    +
    +class EventContext +(request: EventRequest, response: EventResponse = None) +
    +
    +

    Represents the full request/response context for an Event based trigger.

    +

    Construct a new EventContext.

    +
    + +Expand source code + +
    class EventContext(TriggerContext):
    +    """Represents the full request/response context for an Event based trigger."""
    +
    +    def __init__(self, request: EventRequest, response: EventResponse = None):
    +        """Construct a new EventContext."""
    +        super().__init__()
    +        self.req = request
    +        self.res = response if response else EventResponse()
    +
    +    def event(self) -> EventContext:
    +        """Return this EventContext, used when determining the context type of a trigger."""
    +        return self
    +
    +    @staticmethod
    +    def from_grpc_trigger_request(trigger_request: TriggerRequest):
    +        """Construct a new EventContext from an Event trigger from the Nitric Membrane."""
    +        return EventContext(request=EventRequest(data=trigger_request.data, topic=trigger_request.topic.topic))
    +
    +

    Ancestors

    + +

    Static methods

    +
    +
    +def from_grpc_trigger_request(trigger_request: TriggerRequest) +
    +
    +

    Construct a new EventContext from an Event trigger from the Nitric Membrane.

    +
    + +Expand source code + +
    @staticmethod
    +def from_grpc_trigger_request(trigger_request: TriggerRequest):
    +    """Construct a new EventContext from an Event trigger from the Nitric Membrane."""
    +    return EventContext(request=EventRequest(data=trigger_request.data, topic=trigger_request.topic.topic))
    +
    +
    +
    +

    Methods

    +
    +
    +def event(self) ‑> EventContext +
    +
    +

    Return this EventContext, used when determining the context type of a trigger.

    +
    + +Expand source code + +
    def event(self) -> EventContext:
    +    """Return this EventContext, used when determining the context type of a trigger."""
    +    return self
    +
    +
    +
    +

    Inherited members

    + +
    +
    +class EventRequest +(data: bytes, topic: str) +
    +
    +

    Represents a translated Event, from a Subscribed Topic, forwarded from the Nitric Membrane.

    +

    Construct a new EventRequest.

    +
    + +Expand source code + +
    class EventRequest(Request):
    +    """Represents a translated Event, from a Subscribed Topic, forwarded from the Nitric Membrane."""
    +
    +    def __init__(self, data: bytes, topic: str):
    +        """Construct a new EventRequest."""
    +        super().__init__(data)
    +        self.topic = topic
    +
    +    @property
    +    def payload(self) -> bytes:
    +        """Return the payload of this request as text."""
    +        return json.loads(self.data.decode("utf-8"))
    +
    +

    Ancestors

    + +

    Instance variables

    +
    +
    var payload : bytes
    +
    +

    Return the payload of this request as text.

    +
    + +Expand source code + +
    @property
    +def payload(self) -> bytes:
    +    """Return the payload of this request as text."""
    +    return json.loads(self.data.decode("utf-8"))
    +
    +
    +
    +
    +
    +class EventResponse +(success: bool = True) +
    +
    +

    Represents the response to a trigger from an Event as a result of a Topic subscription.

    +

    Construct a new EventResponse.

    +
    + +Expand source code + +
    class EventResponse(Response):
    +    """Represents the response to a trigger from an Event as a result of a Topic subscription."""
    +
    +    def __init__(self, success: bool = True):
    +        """Construct a new EventResponse."""
    +        self.success = success
    +
    +

    Ancestors

    + +
    +
    +class FunctionServer +
    +
    +

    A Function as a Service server, which acts as a faas handler for the Nitric Membrane.

    +

    Construct a new function server.

    +
    + +Expand source code + +
    class FunctionServer:
    +    """A Function as a Service server, which acts as a faas handler for the Nitric Membrane."""
    +
    +    def __init__(self):
    +        """Construct a new function server."""
    +        self.__http_handler = None
    +        self.__event_handler = None
    +        self._any_handler = None
    +
    +    def http(self, *handlers: Union[Middleware, List[Middleware]]) -> FunctionServer:
    +        """
    +        Register one or more HTTP Trigger Handlers or Middleware.
    +
    +        When multiple handlers are provided, they will be called in order.
    +        """
    +        self.__http_handler = compose_middleware(*handlers)
    +        return self
    +
    +    def event(self, *handlers: Union[Middleware, List[Middleware]]) -> FunctionServer:
    +        """
    +        Register one or more Event Trigger Handlers or Middleware.
    +
    +        When multiple handlers are provided, they will be called in order.
    +        """
    +        self.__event_handler = compose_middleware(*handlers)
    +        return self
    +
    +    def start(self, *handlers: Union[Middleware, List[Middleware]]):
    +        """Start the function server using the provided trigger handlers."""
    +        self._any_handler = compose_middleware(*handlers) if len(handlers) > 0 else None
    +        # TODO: implement the server
    +        if not self._any_handler and not self._http_handler and not self._event_handler:
    +            raise Exception("At least one handler function must be provided.")
    +
    +        asyncio.run(self.run())
    +
    +    @property
    +    def _http_handler(self):
    +        return self.__http_handler if self.__http_handler else self._any_handler
    +
    +    @property
    +    def _event_handler(self):
    +        return self.__event_handler if self.__event_handler else self._any_handler
    +
    +    async def run(self):
    +        """Register a new FaaS worker with the Membrane, using the provided function as the handler."""
    +        channel = new_default_channel()
    +        client = FaasServiceStub(channel)
    +        request_channel = AsyncChannel(close=True)
    +        # We can start be sending all the requests we already have
    +        try:
    +            await request_channel.send(ClientMessage(init_request=InitRequest()))
    +            async for srv_msg in client.trigger_stream(request_channel):
    +                # The response iterator will remain active until the connection is closed
    +                msg_type, val = betterproto.which_one_of(srv_msg, "content")
    +
    +                if msg_type == "init_response":
    +                    print("function connected to Membrane")
    +                    # We don't need to reply
    +                    # proceed to the next available message
    +                    continue
    +                if msg_type == "trigger_request":
    +                    ctx = _ctx_from_grpc_trigger_request(srv_msg.trigger_request)
    +
    +                    try:
    +                        if ctx.http():
    +                            func = self._http_handler
    +                        elif ctx.event():
    +                            func = self._event_handler
    +                        else:
    +                            func = self._any_handler
    +                        response_ctx = (await func(ctx)) if asyncio.iscoroutinefunction(func) else func(ctx)
    +                        # Send function response back to server
    +                        await request_channel.send(
    +                            ClientMessage(
    +                                id=srv_msg.id,
    +                                trigger_response=_grpc_response_from_ctx(response_ctx),
    +                            )
    +                        )
    +                    except Exception:
    +                        # Any unhandled exceptions in the above code will end the loop
    +                        # and stop processing future triggers, we catch them here as a last resort.
    +                        print("An unexpected error occurred processing trigger or response")
    +                        traceback.print_exc()
    +                        response = _create_internal_error_response(srv_msg.trigger_request)
    +                        await request_channel.send(ClientMessage(id=srv_msg.id, trigger_response=response))
    +                else:
    +                    print(f"unhandled message type {msg_type}, skipping")
    +                    continue
    +                if request_channel.done():
    +                    break
    +        except ConnectionRefusedError as cre:
    +            traceback.print_exc()
    +            raise ConnectionRefusedError("Failed to register function with Membrane") from cre
    +        except Exception as e:
    +            traceback.print_exc()
    +            raise Exception("An unexpected error occurred.") from e
    +        finally:
    +            print("stream from Membrane closed, closing client stream")
    +            # The channel must be closed to complete the gRPC connection
    +            request_channel.close()
    +            channel.close()
    +
    +

    Methods

    +
    +
    +def event(self, *handlers: Union[Middleware, List[Middleware]]) ‑> FunctionServer +
    +
    +

    Register one or more Event Trigger Handlers or Middleware.

    +

    When multiple handlers are provided, they will be called in order.

    +
    + +Expand source code + +
    def event(self, *handlers: Union[Middleware, List[Middleware]]) -> FunctionServer:
    +    """
    +    Register one or more Event Trigger Handlers or Middleware.
    +
    +    When multiple handlers are provided, they will be called in order.
    +    """
    +    self.__event_handler = compose_middleware(*handlers)
    +    return self
    +
    +
    +
    +def http(self, *handlers: Union[Middleware, List[Middleware]]) ‑> FunctionServer +
    +
    +

    Register one or more HTTP Trigger Handlers or Middleware.

    +

    When multiple handlers are provided, they will be called in order.

    +
    + +Expand source code + +
    def http(self, *handlers: Union[Middleware, List[Middleware]]) -> FunctionServer:
    +    """
    +    Register one or more HTTP Trigger Handlers or Middleware.
    +
    +    When multiple handlers are provided, they will be called in order.
    +    """
    +    self.__http_handler = compose_middleware(*handlers)
    +    return self
    +
    +
    +
    +async def run(self) +
    +
    +

    Register a new FaaS worker with the Membrane, using the provided function as the handler.

    +
    + +Expand source code + +
    async def run(self):
    +    """Register a new FaaS worker with the Membrane, using the provided function as the handler."""
    +    channel = new_default_channel()
    +    client = FaasServiceStub(channel)
    +    request_channel = AsyncChannel(close=True)
    +    # We can start be sending all the requests we already have
    +    try:
    +        await request_channel.send(ClientMessage(init_request=InitRequest()))
    +        async for srv_msg in client.trigger_stream(request_channel):
    +            # The response iterator will remain active until the connection is closed
    +            msg_type, val = betterproto.which_one_of(srv_msg, "content")
    +
    +            if msg_type == "init_response":
    +                print("function connected to Membrane")
    +                # We don't need to reply
    +                # proceed to the next available message
    +                continue
    +            if msg_type == "trigger_request":
    +                ctx = _ctx_from_grpc_trigger_request(srv_msg.trigger_request)
    +
    +                try:
    +                    if ctx.http():
    +                        func = self._http_handler
    +                    elif ctx.event():
    +                        func = self._event_handler
    +                    else:
    +                        func = self._any_handler
    +                    response_ctx = (await func(ctx)) if asyncio.iscoroutinefunction(func) else func(ctx)
    +                    # Send function response back to server
    +                    await request_channel.send(
    +                        ClientMessage(
    +                            id=srv_msg.id,
    +                            trigger_response=_grpc_response_from_ctx(response_ctx),
    +                        )
    +                    )
    +                except Exception:
    +                    # Any unhandled exceptions in the above code will end the loop
    +                    # and stop processing future triggers, we catch them here as a last resort.
    +                    print("An unexpected error occurred processing trigger or response")
    +                    traceback.print_exc()
    +                    response = _create_internal_error_response(srv_msg.trigger_request)
    +                    await request_channel.send(ClientMessage(id=srv_msg.id, trigger_response=response))
    +            else:
    +                print(f"unhandled message type {msg_type}, skipping")
    +                continue
    +            if request_channel.done():
    +                break
    +    except ConnectionRefusedError as cre:
    +        traceback.print_exc()
    +        raise ConnectionRefusedError("Failed to register function with Membrane") from cre
    +    except Exception as e:
    +        traceback.print_exc()
    +        raise Exception("An unexpected error occurred.") from e
    +    finally:
    +        print("stream from Membrane closed, closing client stream")
    +        # The channel must be closed to complete the gRPC connection
    +        request_channel.close()
    +        channel.close()
    +
    +
    +
    +def start(self, *handlers: Union[Middleware, List[Middleware]]) +
    +
    +

    Start the function server using the provided trigger handlers.

    +
    + +Expand source code + +
    def start(self, *handlers: Union[Middleware, List[Middleware]]):
    +    """Start the function server using the provided trigger handlers."""
    +    self._any_handler = compose_middleware(*handlers) if len(handlers) > 0 else None
    +    # TODO: implement the server
    +    if not self._any_handler and not self._http_handler and not self._event_handler:
    +        raise Exception("At least one handler function must be provided.")
    +
    +    asyncio.run(self.run())
    +
    +
    +
    +
    +
    +class HttpContext +(request: HttpRequest, response: HttpResponse = None) +
    +
    +

    Represents the full request/response context for an Http based trigger.

    +

    Construct a new HttpContext.

    +
    + +Expand source code + +
    class HttpContext(TriggerContext):
    +    """Represents the full request/response context for an Http based trigger."""
    +
    +    def __init__(self, request: HttpRequest, response: HttpResponse = None):
    +        """Construct a new HttpContext."""
    +        super().__init__()
    +        self.req = request
    +        self.res = response if response else HttpResponse()
    +
    +    def http(self) -> HttpContext:
    +        """Return this HttpContext, used when determining the context type of a trigger."""
    +        return self
    +
    +    @staticmethod
    +    def from_grpc_trigger_request(trigger_request: TriggerRequest) -> HttpContext:
    +        """Construct a new HttpContext from an Http trigger from the Nitric Membrane."""
    +        if len(trigger_request.http.headers.keys()) > 0:
    +            headers = {k: v[0].value for (k, v) in trigger_request.http.headers.items()}
    +        else:
    +            headers = trigger_request.http.headers_old
    +
    +        return HttpContext(
    +            request=HttpRequest(
    +                data=trigger_request.data,
    +                method=trigger_request.http.method,
    +                query=trigger_request.http.query_params,
    +                path=trigger_request.http.path,
    +                headers=headers,
    +            )
    +        )
    +
    +

    Ancestors

    + +

    Static methods

    +
    +
    +def from_grpc_trigger_request(trigger_request: TriggerRequest) ‑> HttpContext +
    +
    +

    Construct a new HttpContext from an Http trigger from the Nitric Membrane.

    +
    + +Expand source code + +
    @staticmethod
    +def from_grpc_trigger_request(trigger_request: TriggerRequest) -> HttpContext:
    +    """Construct a new HttpContext from an Http trigger from the Nitric Membrane."""
    +    if len(trigger_request.http.headers.keys()) > 0:
    +        headers = {k: v[0].value for (k, v) in trigger_request.http.headers.items()}
    +    else:
    +        headers = trigger_request.http.headers_old
    +
    +    return HttpContext(
    +        request=HttpRequest(
    +            data=trigger_request.data,
    +            method=trigger_request.http.method,
    +            query=trigger_request.http.query_params,
    +            path=trigger_request.http.path,
    +            headers=headers,
    +        )
    +    )
    +
    +
    +
    +

    Methods

    +
    +
    +def http(self) ‑> HttpContext +
    +
    +

    Return this HttpContext, used when determining the context type of a trigger.

    +
    + +Expand source code + +
    def http(self) -> HttpContext:
    +    """Return this HttpContext, used when determining the context type of a trigger."""
    +    return self
    +
    +
    +
    +

    Inherited members

    + +
    +
    +class HttpRequest +(data: bytes, method: str, path: str, query: Record, headers: Record) +
    +
    +

    Represents a translated Http Request forwarded from the Nitric Membrane.

    +

    Construct a new HttpRequest.

    +
    + +Expand source code + +
    class HttpRequest(Request):
    +    """Represents a translated Http Request forwarded from the Nitric Membrane."""
    +
    +    def __init__(self, data: bytes, method: str, path: str, query: Record, headers: Record):
    +        """Construct a new HttpRequest."""
    +        super().__init__(data)
    +        self.method = method
    +        self.path = path
    +        self.query = query
    +        self.headers = headers
    +
    +    @property
    +    def body(self):
    +        """Get the body of the request as text."""
    +        return self.data.decode("utf-8")
    +
    +

    Ancestors

    + +

    Instance variables

    +
    +
    var body
    +
    +

    Get the body of the request as text.

    +
    + +Expand source code + +
    @property
    +def body(self):
    +    """Get the body of the request as text."""
    +    return self.data.decode("utf-8")
    +
    +
    +
    +
    +
    +class HttpResponse +(status: int = 200, headers: Record = None, body: bytes = None) +
    +
    +

    Represents an Http Response to be generated by the Nitric Membrane in response to an Http Request Trigger.

    +

    Construct a new HttpResponse.

    +
    + +Expand source code + +
    class HttpResponse(Response):
    +    """Represents an Http Response to be generated by the Nitric Membrane in response to an Http Request Trigger."""
    +
    +    def __init__(self, status: int = 200, headers: Record = None, body: bytes = None):
    +        """Construct a new HttpResponse."""
    +        self.status = status
    +        self.headers = headers if headers else {}
    +        self.body = body if body else bytes()
    +
    +

    Ancestors

    + +
    +
    +class Request +(data: bytes) +
    +
    +

    Represents an abstract trigger request.

    +

    Construct a new Request.

    +
    + +Expand source code + +
    class Request(ABC):
    +    """Represents an abstract trigger request."""
    +
    +    def __init__(self, data: bytes):
    +        """Construct a new Request."""
    +        self.data = data
    +
    +

    Ancestors

    +
      +
    • abc.ABC
    • +
    +

    Subclasses

    + +
    +
    +class Response +
    +
    +

    Represents an abstract trigger response.

    +
    + +Expand source code + +
    class Response(ABC):
    +    """Represents an abstract trigger response."""
    +
    +    pass
    +
    +

    Ancestors

    +
      +
    • abc.ABC
    • +
    +

    Subclasses

    + +
    +
    +class TriggerContext +
    +
    +

    Represents an abstract request/response context for any trigger.

    +
    + +Expand source code + +
    class TriggerContext(ABC):
    +    """Represents an abstract request/response context for any trigger."""
    +
    +    def http(self) -> Union[HttpContext, None]:
    +        """Return this context as an HttpContext if it is one, otherwise returns None."""
    +        return None
    +
    +    def event(self) -> Union[EventContext, None]:
    +        """Return this context as an EventContext if it is one, otherwise returns None."""
    +        return None
    +
    +

    Ancestors

    +
      +
    • abc.ABC
    • +
    +

    Subclasses

    + +

    Methods

    +
    +
    +def event(self) ‑> Union[EventContext, NoneType] +
    +
    +

    Return this context as an EventContext if it is one, otherwise returns None.

    +
    + +Expand source code + +
    def event(self) -> Union[EventContext, None]:
    +    """Return this context as an EventContext if it is one, otherwise returns None."""
    +    return None
    +
    +
    +
    +def http(self) ‑> Union[HttpContext, NoneType] +
    +
    +

    Return this context as an HttpContext if it is one, otherwise returns None.

    +
    + +Expand source code + +
    def http(self) -> Union[HttpContext, None]:
    +    """Return this context as an HttpContext if it is one, otherwise returns None."""
    +    return None
    +
    +
    +
    +
    +
    +
    +
    + +
    + + + \ No newline at end of file diff --git a/docs/nitric/faas/faas.html b/docs/nitric/faas/faas.html deleted file mode 100644 index 09e23be..0000000 --- a/docs/nitric/faas/faas.html +++ /dev/null @@ -1,348 +0,0 @@ - - - - - - -nitric.faas.faas API documentation - - - - - - - - - - - -
    -
    -
    -

    Module nitric.faas.faas

    -
    -
    -
    - -Expand source code - -
    #
    -# Copyright (c) 2021 Nitric Technologies Pty Ltd.
    -#
    -# This file is part of Nitric Python 3 SDK.
    -# See https://github.com/nitrictech/python-sdk for further info.
    -#
    -# Licensed under the Apache License, Version 2.0 (the "License");
    -# you may not use this file except in compliance with the License.
    -# You may obtain a copy of the License at
    -#
    -#     http://www.apache.org/licenses/LICENSE-2.0
    -#
    -# Unless required by applicable law or agreed to in writing, software
    -# distributed under the License is distributed on an "AS IS" BASIS,
    -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    -# See the License for the specific language governing permissions and
    -# limitations under the License.
    -#
    -import traceback
    -from typing import Callable, Union, Coroutine, Any
    -
    -import betterproto
    -from betterproto.grpc.util.async_channel import AsyncChannel
    -
    -from nitric.utils import new_default_channel
    -from nitric.faas import Trigger, Response
    -from nitric.proto.nitric.faas.v1 import FaasStub, InitRequest, ClientMessage
    -import asyncio
    -
    -
    -async def _register_faas_worker(
    -    func: Callable[[Trigger], Union[Coroutine[Any, Any, Union[Response, None, dict]], Union[Response, None, dict]]]
    -):
    -    """
    -    Register a new FaaS worker with the Membrane, using the provided function as the handler.
    -
    -    :param func: handler function for incoming triggers. Can be sync or async, async is preferred.
    -    """
    -    channel = new_default_channel()
    -    client = FaasStub(channel)
    -    request_channel = AsyncChannel(close=True)
    -    # We can start be sending all the requests we already have
    -    try:
    -        await request_channel.send(ClientMessage(init_request=InitRequest()))
    -        async for srv_msg in client.trigger_stream(request_channel):
    -            # The response iterator will remain active until the connection is closed
    -            msg_type, val = betterproto.which_one_of(srv_msg, "content")
    -
    -            if msg_type == "init_response":
    -                print("function connected to Membrane")
    -                # We don't need to reply
    -                # proceed to the next available message
    -                continue
    -            if msg_type == "trigger_request":
    -                trigger = Trigger.from_trigger_request(srv_msg.trigger_request)
    -                try:
    -                    response = await func(trigger) if asyncio.iscoroutinefunction(func) else func(trigger)
    -                except Exception:
    -                    print("Error calling handler function")
    -                    traceback.print_exc()
    -                    response = trigger.default_response()
    -                    if response.context.is_http():
    -                        response.context.as_http().status = 500
    -                    else:
    -                        response.context.as_topic().success = False
    -
    -                # Handle lite responses with just data, assume a success in these cases
    -                if not isinstance(response, Response):
    -                    full_response = trigger.default_response()
    -                    full_response.data = bytes(str(response), "utf-8")
    -                    response = full_response
    -
    -                # Send function response back to server
    -                await request_channel.send(
    -                    ClientMessage(id=srv_msg.id, trigger_response=response.to_grpc_trigger_response_context())
    -                )
    -            else:
    -                print("unhandled message type {0}, skipping".format(msg_type))
    -                continue
    -            if request_channel.done():
    -                break
    -    except Exception:
    -        traceback.print_exc()
    -    finally:
    -        print("stream from Membrane closed, closing client stream")
    -        # The channel must be closed to complete the gRPC connection
    -        request_channel.close()
    -        channel.close()
    -
    -
    -def start(handler: Callable[[Trigger], Coroutine[Any, Any, Union[Response, None, dict]]]):
    -    """
    -    Register the provided function as the trigger handler and starts handling new trigger requests.
    -
    -    :param handler: handler function for incoming triggers. Can be sync or async, async is preferred.
    -    """
    -    asyncio.run(_register_faas_worker(handler))
    -
    -
    -
    -
    -
    -
    -
    -

    Functions

    -
    -
    -def start(handler: Callable[[Trigger], Coroutine[Any, Any, Union[Response, NoneType, dict]]]) -
    -
    -

    Register the provided function as the trigger handler and starts handling new trigger requests.

    -

    :param handler: handler function for incoming triggers. Can be sync or async, async is preferred.

    -
    - -Expand source code - -
    def start(handler: Callable[[Trigger], Coroutine[Any, Any, Union[Response, None, dict]]]):
    -    """
    -    Register the provided function as the trigger handler and starts handling new trigger requests.
    -
    -    :param handler: handler function for incoming triggers. Can be sync or async, async is preferred.
    -    """
    -    asyncio.run(_register_faas_worker(handler))
    -
    -
    -
    -
    -
    -<<<<<<< refs/remotes/origin/main -

    Classes

    -
    -
    -class Handler -(func: Callable[[Trigger], Union[Response, str]]) -
    -
    -

    Nitric Function handler.

    -

    Construct a new handler using the provided function to handle new requests.

    -
    - -Expand source code - -
    class Handler(object):
    -    """Nitric Function handler."""
    -
    -    def __init__(self, func: Callable[[Trigger], Union[Response, str]]):
    -        """Construct a new handler using the provided function to handle new requests."""
    -        self.func = func
    -
    -    def __call__(self, path="", *args):
    -        """Construct Nitric Request from HTTP Request."""
    -        trigger_request = construct_request()
    -
    -        grpc_trigger_response: TriggerResponse
    -
    -        # convert it to a trigger
    -        trigger = Trigger.from_trigger_request(trigger_request)
    -
    -        try:
    -            # Execute the handler function
    -            response: Union[Response, str] = self.func(trigger)
    -
    -            final_response: Response
    -            if isinstance(response, str):
    -                final_response = trigger.default_response()
    -                final_response.data = response.encode()
    -            elif isinstance(response, Response):
    -                final_response = response
    -            else:
    -                # assume None
    -                final_response = trigger.default_response()
    -                final_response.data = "".encode()
    -
    -            grpc_trigger_response = final_response.to_grpc_trigger_response_context()
    -
    -        except Exception:
    -            trigger_response = trigger.default_response()
    -            if trigger_response.context.is_http():
    -                trigger_response.context.as_http().status = 500
    -                trigger_response.context.as_http().headers = {"Content-Type": "text/html"}
    -                trigger_response.data = exception_to_html().encode()
    -            elif trigger_response.context.is_topic():
    -                trigger_response.data = "Error processing message"
    -                trigger_response.context.as_topic().success = False
    -
    -            grpc_trigger_response = trigger_response.to_grpc_trigger_response_context()
    -
    -        return http_response(grpc_trigger_response)
    -
    -
    -
    -class TriggerRequest -(*args, **kwargs) -
    -
    -

    A ProtocolMessage

    -

    Ancestors

    -
      -
    • google.protobuf.pyext._message.CMessage
    • -
    • google.protobuf.message.Message
    • -
    -

    Class variables

    -
    -
    var DESCRIPTOR
    -
    -
    -
    -
    -

    Instance variables

    -
    -
    var data
    -
    -

    Field nitric.faas.v1.TriggerRequest.data

    -
    -
    var http
    -
    -

    Field nitric.faas.v1.TriggerRequest.http

    -
    -
    var mime_type
    -
    -

    Field nitric.faas.v1.TriggerRequest.mime_type

    -
    -
    var topic
    -
    -

    Field nitric.faas.v1.TriggerRequest.topic

    -
    -
    -
    -
    -class TriggerResponse -(*args, **kwargs) -
    -
    -

    A ProtocolMessage

    -

    Ancestors

    -
      -
    • google.protobuf.pyext._message.CMessage
    • -
    • google.protobuf.message.Message
    • -
    -

    Class variables

    -
    -
    var DESCRIPTOR
    -
    -
    -
    -
    -

    Instance variables

    -
    -
    var data
    -
    -

    Field nitric.faas.v1.TriggerResponse.data

    -
    -
    var http
    -
    -

    Field nitric.faas.v1.TriggerResponse.http

    -
    -
    var topic
    -
    -

    Field nitric.faas.v1.TriggerResponse.topic

    -
    -
    -
    -
    -======= ->>>>>>> feat: port faas.start to bi-di streaming with membrane -
    -
    - -
    - - - \ No newline at end of file diff --git a/docs/nitric/faas/index.html b/docs/nitric/faas/index.html deleted file mode 100644 index e74007e..0000000 --- a/docs/nitric/faas/index.html +++ /dev/null @@ -1,874 +0,0 @@ - - - - - - -nitric.faas API documentation - - - - - - - - - - - -
    -
    -
    -

    Module nitric.faas

    -
    -
    -

    Nitric Function as a Service (FaaS) Package.

    -
    - -Expand source code - -
    #
    -# Copyright (c) 2021 Nitric Technologies Pty Ltd.
    -#
    -# This file is part of Nitric Python 3 SDK.
    -# See https://github.com/nitrictech/python-sdk for further info.
    -#
    -# Licensed under the Apache License, Version 2.0 (the "License");
    -# you may not use this file except in compliance with the License.
    -# You may obtain a copy of the License at
    -#
    -#     http://www.apache.org/licenses/LICENSE-2.0
    -#
    -# Unless required by applicable law or agreed to in writing, software
    -# distributed under the License is distributed on an "AS IS" BASIS,
    -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    -# See the License for the specific language governing permissions and
    -# limitations under the License.
    -#
    -"""Nitric Function as a Service (FaaS) Package."""
    -from nitric.faas.trigger import Trigger, TriggerContext
    -from nitric.faas.response import Response, ResponseContext, HttpResponseContext, TopicResponseContext
    -from nitric.faas.faas import start
    -
    -__all__ = [
    -    "Trigger",
    -    "Response",
    -    "ResponseContext",
    -    "HttpResponseContext",
    -    "TopicResponseContext",
    -    "TriggerContext",
    -    "start",
    -]
    -
    -
    -
    -

    Sub-modules

    -
    -
    nitric.faas.faas
    -
    -
    -
    -
    nitric.faas.response
    -
    -
    -
    -
    nitric.faas.trigger
    -
    -
    -
    -
    -
    -
    -
    -
    -

    Functions

    -
    -
    -def start(handler: Callable[[Trigger], Coroutine[Any, Any, Union[Response, NoneType, dict]]]) -
    -
    -

    Register the provided function as the trigger handler and starts handling new trigger requests.

    -

    :param handler: handler function for incoming triggers. Can be sync or async, async is preferred.

    -
    - -Expand source code - -
    def start(handler: Callable[[Trigger], Coroutine[Any, Any, Union[Response, None, dict]]]):
    -    """
    -    Register the provided function as the trigger handler and starts handling new trigger requests.
    -
    -    :param handler: handler function for incoming triggers. Can be sync or async, async is preferred.
    -    """
    -    asyncio.run(_register_faas_worker(handler))
    -
    -
    -
    -
    -
    -

    Classes

    -
    -
    -class HttpResponseContext -(headers: dict = <factory>, status: int = 200) -
    -
    -

    Represents HTTP specific response context data such as an HTTP status and headers.

    -
    - -Expand source code - -
    class HttpResponseContext(object):
    -    """Represents HTTP specific response context data such as an HTTP status and headers."""
    -
    -    headers: dict = field(default_factory=lambda: {})
    -    status: int = 200
    -
    -    def to_grpc_http_response_context(self) -> v1.HttpResponseContext:
    -        """Reformat this http response context for on the wire transfer."""
    -        return v1.HttpResponseContext(headers=self.headers, status=self.status)
    -
    -

    Class variables

    -
    -
    var headers : dict
    -
    -
    -
    -
    var status : int
    -
    -
    -
    -
    -

    Methods

    -
    -
    -def to_grpc_http_response_context(self) ‑> HttpResponseContext -
    -
    -

    Reformat this http response context for on the wire transfer.

    -
    - -Expand source code - -
    def to_grpc_http_response_context(self) -> v1.HttpResponseContext:
    -    """Reformat this http response context for on the wire transfer."""
    -    return v1.HttpResponseContext(headers=self.headers, status=self.status)
    -
    -
    -
    -
    -
    -class Response -(context: ResponseContext, data: bytes = <factory>) -
    -
    -

    Nitric Function as a Service (FaaS) response class.

    -
    - -Expand source code - -
    class Response(object):
    -    """Nitric Function as a Service (FaaS) response class."""
    -
    -    context: ResponseContext
    -    data: bytes = field(default_factory=bytes)
    -
    -    def to_grpc_trigger_response_context(self) -> TriggerResponse:
    -        """Translate a response object ready for on the wire transport."""
    -        response = TriggerResponse(data=self.data)
    -
    -        if self.context.is_http():
    -            ctx = self.context.as_http()
    -            response.http = ctx.to_grpc_http_response_context()
    -        elif self.context.is_topic():
    -            ctx = self.context.as_topic()
    -            response.topic = ctx.to_grpc_topic_response_context()
    -
    -        return response
    -
    -

    Class variables

    -
    -
    var contextResponseContext
    -
    -
    -
    -
    var data : bytes
    -
    -
    -
    -
    -

    Methods

    -
    -
    -def to_grpc_trigger_response_context(self) ‑> TriggerResponse -
    -
    -

    Translate a response object ready for on the wire transport.

    -
    - -Expand source code - -
    def to_grpc_trigger_response_context(self) -> TriggerResponse:
    -    """Translate a response object ready for on the wire transport."""
    -    response = TriggerResponse(data=self.data)
    -
    -    if self.context.is_http():
    -        ctx = self.context.as_http()
    -        response.http = ctx.to_grpc_http_response_context()
    -    elif self.context.is_topic():
    -        ctx = self.context.as_topic()
    -        response.topic = ctx.to_grpc_topic_response_context()
    -
    -    return response
    -
    -
    -
    -
    -
    -class ResponseContext -(context: Union[TopicResponseContextHttpResponseContext]) -
    -
    -

    Additional context data for a trigger response, specific to the original trigger type.

    -
    - -Expand source code - -
    class ResponseContext(object):
    -    """Additional context data for a trigger response, specific to the original trigger type."""
    -
    -    context: Union[TopicResponseContext, HttpResponseContext]
    -
    -    def is_http(self):
    -        """Indicate whether the trigger was from an HTTP request."""
    -        return isinstance(self.context, HttpResponseContext)
    -
    -    def is_topic(self):
    -        """Indicate whether the trigger was from a topic (event)."""
    -        return isinstance(self.context, TopicResponseContext)
    -
    -    def as_http(self) -> Union[HttpResponseContext, None]:
    -        """
    -        Return this context as an HTTP context type.
    -
    -        If the trigger wasn't an HTTP request, this function returns None.
    -        is_http() should be used first to determine if this was an HTTP request trigger.
    -        """
    -        if not self.is_http():
    -            return None
    -
    -        return self.context
    -
    -    def as_topic(self) -> Union[TopicResponseContext, None]:
    -        """
    -        Return this context as a topic context type.
    -
    -        If the trigger wasn't an event from a topic, this function returns None.
    -        is_topic() should be used first to determine if this was a topic trigger.
    -        """
    -        if not self.is_topic():
    -            return None
    -
    -        return self.context
    -
    -

    Class variables

    -
    -
    var context : Union[TopicResponseContextHttpResponseContext]
    -
    -
    -
    -
    -

    Methods

    -
    -
    -def as_http(self) ‑> Union[HttpResponseContext, NoneType] -
    -
    -

    Return this context as an HTTP context type.

    -

    If the trigger wasn't an HTTP request, this function returns None. -is_http() should be used first to determine if this was an HTTP request trigger.

    -
    - -Expand source code - -
    def as_http(self) -> Union[HttpResponseContext, None]:
    -    """
    -    Return this context as an HTTP context type.
    -
    -    If the trigger wasn't an HTTP request, this function returns None.
    -    is_http() should be used first to determine if this was an HTTP request trigger.
    -    """
    -    if not self.is_http():
    -        return None
    -
    -    return self.context
    -
    -
    -
    -def as_topic(self) ‑> Union[TopicResponseContext, NoneType] -
    -
    -

    Return this context as a topic context type.

    -

    If the trigger wasn't an event from a topic, this function returns None. -is_topic() should be used first to determine if this was a topic trigger.

    -
    - -Expand source code - -
    def as_topic(self) -> Union[TopicResponseContext, None]:
    -    """
    -    Return this context as a topic context type.
    -
    -    If the trigger wasn't an event from a topic, this function returns None.
    -    is_topic() should be used first to determine if this was a topic trigger.
    -    """
    -    if not self.is_topic():
    -        return None
    -
    -    return self.context
    -
    -
    -
    -def is_http(self) -
    -
    -

    Indicate whether the trigger was from an HTTP request.

    -
    - -Expand source code - -
    def is_http(self):
    -    """Indicate whether the trigger was from an HTTP request."""
    -    return isinstance(self.context, HttpResponseContext)
    -
    -
    -
    -def is_topic(self) -
    -
    -

    Indicate whether the trigger was from a topic (event).

    -
    - -Expand source code - -
    def is_topic(self):
    -    """Indicate whether the trigger was from a topic (event)."""
    -    return isinstance(self.context, TopicResponseContext)
    -
    -
    -
    -
    -
    -class TopicResponseContext -(success: bool = True) -
    -
    -

    Represents a topic/event specific response context data such as whether the event was processed successfully.

    -
    - -Expand source code - -
    class TopicResponseContext(object):
    -    """Represents a topic/event specific response context data such as whether the event was processed successfully."""
    -
    -    success: bool = True
    -
    -    def to_grpc_topic_response_context(self) -> v1.TopicResponseContext:
    -        """Reformat this topic response context for on the wire transfer."""
    -        return v1.TopicResponseContext(success=self.success)
    -
    -

    Class variables

    -
    -
    var success : bool
    -
    -
    -
    -
    -

    Methods

    -
    -
    -def to_grpc_topic_response_context(self) ‑> TopicResponseContext -
    -
    -

    Reformat this topic response context for on the wire transfer.

    -
    - -Expand source code - -
    def to_grpc_topic_response_context(self) -> v1.TopicResponseContext:
    -    """Reformat this topic response context for on the wire transfer."""
    -    return v1.TopicResponseContext(success=self.success)
    -
    -
    -
    -
    -
    -class Trigger -(context: TriggerContext, data: bytes = <factory>) -
    -
    -

    Represents a standard Nitric function request.

    -

    These requests are normalized from their original stack-specific structures.

    -
    - -Expand source code - -
    class Trigger(object):
    -    """
    -    Represents a standard Nitric function request.
    -
    -    These requests are normalized from their original stack-specific structures.
    -    """
    -
    -    context: TriggerContext
    -    data: bytes = field(default_factory=bytes)
    -
    -    def get_body(self) -> bytes:
    -        """Return the bytes of the body of the request."""
    -        return self.data
    -
    -    def get_object(self) -> dict:
    -        """
    -        Assume the payload is JSON and return the content deserialized into a dictionary.
    -
    -        :raises JSONDecodeError: raised when the request payload (body) is not valid JSON.
    -
    -        :return: the deserialized JSON request body as a dictionary
    -        """
    -        import json
    -
    -        return json.loads(self.data)
    -
    -    def default_response(self) -> Response:
    -        """
    -        Return the trigger response, based on the trigger context type.
    -
    -        The returned response can be interrogated with its context to determine the appropriate
    -        response context e.g. response.context.is_http() or response.context.is_topic().
    -        """
    -        response_ctx = None
    -
    -        if self.context.is_http():
    -            response_ctx = ResponseContext(context=HttpResponseContext())
    -        elif self.context.is_topic():
    -            response_ctx = ResponseContext(context=TopicResponseContext())
    -
    -        return Response(context=response_ctx)
    -
    -    @staticmethod
    -    def from_trigger_request(trigger_request: TriggerRequest):
    -        """Return the python SDK implementation of a Trigger from a protobuf representation."""
    -        context = TriggerContext.from_trigger_request(trigger_request)
    -
    -        return Trigger(context=context, data=trigger_request.data)
    -
    -

    Class variables

    -
    -
    var contextTriggerContext
    -
    -
    -
    -
    var data : bytes
    -
    -
    -
    -
    -

    Static methods

    -
    -
    -def from_trigger_request(trigger_request: TriggerRequest) -
    -
    -

    Return the python SDK implementation of a Trigger from a protobuf representation.

    -
    - -Expand source code - -
    @staticmethod
    -def from_trigger_request(trigger_request: TriggerRequest):
    -    """Return the python SDK implementation of a Trigger from a protobuf representation."""
    -    context = TriggerContext.from_trigger_request(trigger_request)
    -
    -    return Trigger(context=context, data=trigger_request.data)
    -
    -
    -
    -

    Methods

    -
    -
    -def default_response(self) ‑> Response -
    -
    -

    Return the trigger response, based on the trigger context type.

    -

    The returned response can be interrogated with its context to determine the appropriate -response context e.g. response.context.is_http() or response.context.is_topic().

    -
    - -Expand source code - -
    def default_response(self) -> Response:
    -    """
    -    Return the trigger response, based on the trigger context type.
    -
    -    The returned response can be interrogated with its context to determine the appropriate
    -    response context e.g. response.context.is_http() or response.context.is_topic().
    -    """
    -    response_ctx = None
    -
    -    if self.context.is_http():
    -        response_ctx = ResponseContext(context=HttpResponseContext())
    -    elif self.context.is_topic():
    -        response_ctx = ResponseContext(context=TopicResponseContext())
    -
    -    return Response(context=response_ctx)
    -
    -
    -
    -def get_body(self) ‑> bytes -
    -
    -

    Return the bytes of the body of the request.

    -
    - -Expand source code - -
    def get_body(self) -> bytes:
    -    """Return the bytes of the body of the request."""
    -    return self.data
    -
    -
    -
    -def get_object(self) ‑> dict -
    -
    -

    Assume the payload is JSON and return the content deserialized into a dictionary.

    -

    :raises JSONDecodeError: raised when the request payload (body) is not valid JSON.

    -

    :return: the deserialized JSON request body as a dictionary

    -
    - -Expand source code - -
    def get_object(self) -> dict:
    -    """
    -    Assume the payload is JSON and return the content deserialized into a dictionary.
    -
    -    :raises JSONDecodeError: raised when the request payload (body) is not valid JSON.
    -
    -    :return: the deserialized JSON request body as a dictionary
    -    """
    -    import json
    -
    -    return json.loads(self.data)
    -
    -
    -
    -
    -
    -class TriggerContext -(context: Union[TopicTriggerContextHttpTriggerContext]) -
    -
    -

    Represents the contextual metadata for a Nitric function request.

    -
    - -Expand source code - -
    class TriggerContext(object):
    -    """Represents the contextual metadata for a Nitric function request."""
    -
    -    context: typing.Union[TopicTriggerContext, HttpTriggerContext]
    -
    -    def is_http(self) -> bool:
    -        """
    -        Indicate whether the trigger was from an HTTP request.
    -
    -        This indicates the availability of additional HTTP specific context such as path, query parameters and headers.
    -        """
    -        return isinstance(self.context, HttpTriggerContext)
    -
    -    def as_http(self) -> typing.Union[HttpTriggerContext, None]:
    -        """
    -        Return this context as an HTTP context type.
    -
    -        If the trigger wasn't an HTTP request, this function returns None.
    -        is_http() should be used first to determine if this was an HTTP request trigger.
    -        """
    -        if not self.is_http():
    -            return None
    -
    -        return self.context
    -
    -    def is_topic(self) -> bool:
    -        """
    -        Indicate whether the trigger was from a topic (event).
    -
    -        This indicates the availability of additional topic/event specific context such as the topic name.
    -        """
    -        return isinstance(self.context, TriggerContext)
    -
    -    def as_topic(self) -> typing.Union[TopicTriggerContext, None]:
    -        """
    -        Return this context as a topic context type.
    -
    -        If the trigger wasn't an event from a topic, this function returns None.
    -        is_topic() should be used first to determine if this was a topic trigger.
    -        """
    -        if not self.is_topic():
    -            return None
    -
    -        return self.context
    -
    -    @staticmethod
    -    def from_trigger_request(trigger_request: TriggerRequest):
    -        """Return a TriggerContext from a TriggerRequest."""
    -        if trigger_request.http is not None:
    -            return TriggerContext(
    -                context=HttpTriggerContext(
    -<<<<<<< refs/remotes/origin/main
    -                    headers=dict(trigger_request.http.headers),
    -                    path=trigger_request.http.path,
    -                    method=trigger_request.http.method,
    -                    query_params=dict(trigger_request.http.query_params),
    -=======
    -                    headers=trigger_request.http.headers,
    -                    method=trigger_request.http.method,
    -                    query_params=trigger_request.http.query_params,
    -                    path=trigger_request.http.path,
    ->>>>>>> feat: port faas.start to bi-di streaming with membrane
    -                )
    -            )
    -        elif trigger_request.topic is not None:
    -            return TriggerContext(context=TopicTriggerContext(topic=trigger_request.topic.topic))
    -        else:
    -            # We have an error
    -            # should probably raise an exception
    -            return None
    -
    -

    Class variables

    -
    -
    var context : Union[TopicTriggerContextHttpTriggerContext]
    -
    -
    -
    -
    -

    Static methods

    -
    -
    -def from_trigger_request(trigger_request: TriggerRequest) -
    -
    -

    Return a TriggerContext from a TriggerRequest.

    -
    - -Expand source code - -
    @staticmethod
    -def from_trigger_request(trigger_request: TriggerRequest):
    -    """Return a TriggerContext from a TriggerRequest."""
    -    if trigger_request.http is not None:
    -        return TriggerContext(
    -            context=HttpTriggerContext(
    -<<<<<<< refs/remotes/origin/main
    -                headers=dict(trigger_request.http.headers),
    -                path=trigger_request.http.path,
    -                method=trigger_request.http.method,
    -                query_params=dict(trigger_request.http.query_params),
    -=======
    -                headers=trigger_request.http.headers,
    -                method=trigger_request.http.method,
    -                query_params=trigger_request.http.query_params,
    -                path=trigger_request.http.path,
    ->>>>>>> feat: port faas.start to bi-di streaming with membrane
    -            )
    -        )
    -    elif trigger_request.topic is not None:
    -        return TriggerContext(context=TopicTriggerContext(topic=trigger_request.topic.topic))
    -    else:
    -        # We have an error
    -        # should probably raise an exception
    -        return None
    -
    -
    -
    -

    Methods

    -
    -
    -def as_http(self) ‑> Union[HttpTriggerContext, NoneType] -
    -
    -

    Return this context as an HTTP context type.

    -

    If the trigger wasn't an HTTP request, this function returns None. -is_http() should be used first to determine if this was an HTTP request trigger.

    -
    - -Expand source code - -
    def as_http(self) -> typing.Union[HttpTriggerContext, None]:
    -    """
    -    Return this context as an HTTP context type.
    -
    -    If the trigger wasn't an HTTP request, this function returns None.
    -    is_http() should be used first to determine if this was an HTTP request trigger.
    -    """
    -    if not self.is_http():
    -        return None
    -
    -    return self.context
    -
    -
    -
    -def as_topic(self) ‑> Union[TopicTriggerContext, NoneType] -
    -
    -

    Return this context as a topic context type.

    -

    If the trigger wasn't an event from a topic, this function returns None. -is_topic() should be used first to determine if this was a topic trigger.

    -
    - -Expand source code - -
    def as_topic(self) -> typing.Union[TopicTriggerContext, None]:
    -    """
    -    Return this context as a topic context type.
    -
    -    If the trigger wasn't an event from a topic, this function returns None.
    -    is_topic() should be used first to determine if this was a topic trigger.
    -    """
    -    if not self.is_topic():
    -        return None
    -
    -    return self.context
    -
    -
    -
    -def is_http(self) ‑> bool -
    -
    -

    Indicate whether the trigger was from an HTTP request.

    -

    This indicates the availability of additional HTTP specific context such as path, query parameters and headers.

    -
    - -Expand source code - -
    def is_http(self) -> bool:
    -    """
    -    Indicate whether the trigger was from an HTTP request.
    -
    -    This indicates the availability of additional HTTP specific context such as path, query parameters and headers.
    -    """
    -    return isinstance(self.context, HttpTriggerContext)
    -
    -
    -
    -def is_topic(self) ‑> bool -
    -
    -

    Indicate whether the trigger was from a topic (event).

    -

    This indicates the availability of additional topic/event specific context such as the topic name.

    -
    - -Expand source code - -
    def is_topic(self) -> bool:
    -    """
    -    Indicate whether the trigger was from a topic (event).
    -
    -    This indicates the availability of additional topic/event specific context such as the topic name.
    -    """
    -    return isinstance(self.context, TriggerContext)
    -
    -
    -
    -
    -
    -
    -
    - -
    - - - \ No newline at end of file diff --git a/docs/nitric/faas/response.html b/docs/nitric/faas/response.html deleted file mode 100644 index a39fad2..0000000 --- a/docs/nitric/faas/response.html +++ /dev/null @@ -1,545 +0,0 @@ - - - - - - -nitric.faas.response API documentation - - - - - - - - - - - -
    -
    -
    -

    Module nitric.faas.response

    -
    -
    -
    - -Expand source code - -
    #
    -# Copyright (c) 2021 Nitric Technologies Pty Ltd.
    -#
    -# This file is part of Nitric Python 3 SDK.
    -# See https://github.com/nitrictech/python-sdk for further info.
    -#
    -# Licensed under the Apache License, Version 2.0 (the "License");
    -# you may not use this file except in compliance with the License.
    -# You may obtain a copy of the License at
    -#
    -#     http://www.apache.org/licenses/LICENSE-2.0
    -#
    -# Unless required by applicable law or agreed to in writing, software
    -# distributed under the License is distributed on an "AS IS" BASIS,
    -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    -# See the License for the specific language governing permissions and
    -# limitations under the License.
    -#
    -from dataclasses import dataclass, field
    -from typing import Union
    -from nitric.proto.nitric.faas import v1
    -from nitric.proto.nitric.faas.v1 import TriggerResponse
    -
    -
    -@dataclass(order=True)
    -class TopicResponseContext(object):
    -    """Represents a topic/event specific response context data such as whether the event was processed successfully."""
    -
    -    success: bool = True
    -
    -    def to_grpc_topic_response_context(self) -> v1.TopicResponseContext:
    -        """Reformat this topic response context for on the wire transfer."""
    -        return v1.TopicResponseContext(success=self.success)
    -
    -
    -@dataclass(order=True)
    -class HttpResponseContext(object):
    -    """Represents HTTP specific response context data such as an HTTP status and headers."""
    -
    -    headers: dict = field(default_factory=lambda: {})
    -    status: int = 200
    -
    -    def to_grpc_http_response_context(self) -> v1.HttpResponseContext:
    -        """Reformat this http response context for on the wire transfer."""
    -        return v1.HttpResponseContext(headers=self.headers, status=self.status)
    -
    -
    -@dataclass(order=True)
    -class ResponseContext(object):
    -    """Additional context data for a trigger response, specific to the original trigger type."""
    -
    -    context: Union[TopicResponseContext, HttpResponseContext]
    -
    -    def is_http(self):
    -        """Indicate whether the trigger was from an HTTP request."""
    -        return isinstance(self.context, HttpResponseContext)
    -
    -    def is_topic(self):
    -        """Indicate whether the trigger was from a topic (event)."""
    -        return isinstance(self.context, TopicResponseContext)
    -
    -    def as_http(self) -> Union[HttpResponseContext, None]:
    -        """
    -        Return this context as an HTTP context type.
    -
    -        If the trigger wasn't an HTTP request, this function returns None.
    -        is_http() should be used first to determine if this was an HTTP request trigger.
    -        """
    -        if not self.is_http():
    -            return None
    -
    -        return self.context
    -
    -    def as_topic(self) -> Union[TopicResponseContext, None]:
    -        """
    -        Return this context as a topic context type.
    -
    -        If the trigger wasn't an event from a topic, this function returns None.
    -        is_topic() should be used first to determine if this was a topic trigger.
    -        """
    -        if not self.is_topic():
    -            return None
    -
    -        return self.context
    -
    -
    -@dataclass(order=True)
    -class Response(object):
    -    """Nitric Function as a Service (FaaS) response class."""
    -
    -    context: ResponseContext
    -    data: bytes = field(default_factory=bytes)
    -
    -    def to_grpc_trigger_response_context(self) -> TriggerResponse:
    -        """Translate a response object ready for on the wire transport."""
    -        response = TriggerResponse(data=self.data)
    -
    -        if self.context.is_http():
    -            ctx = self.context.as_http()
    -            response.http = ctx.to_grpc_http_response_context()
    -        elif self.context.is_topic():
    -            ctx = self.context.as_topic()
    -            response.topic = ctx.to_grpc_topic_response_context()
    -
    -        return response
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -

    Classes

    -
    -
    -class HttpResponseContext -(headers: dict = <factory>, status: int = 200) -
    -
    -

    Represents HTTP specific response context data such as an HTTP status and headers.

    -
    - -Expand source code - -
    class HttpResponseContext(object):
    -    """Represents HTTP specific response context data such as an HTTP status and headers."""
    -
    -    headers: dict = field(default_factory=lambda: {})
    -    status: int = 200
    -
    -    def to_grpc_http_response_context(self) -> v1.HttpResponseContext:
    -        """Reformat this http response context for on the wire transfer."""
    -        return v1.HttpResponseContext(headers=self.headers, status=self.status)
    -
    -

    Class variables

    -
    -
    var headers : dict
    -
    -
    -
    -
    var status : int
    -
    -
    -
    -
    -

    Methods

    -
    -
    -def to_grpc_http_response_context(self) ‑> HttpResponseContext -
    -
    -

    Reformat this http response context for on the wire transfer.

    -
    - -Expand source code - -
    def to_grpc_http_response_context(self) -> v1.HttpResponseContext:
    -    """Reformat this http response context for on the wire transfer."""
    -    return v1.HttpResponseContext(headers=self.headers, status=self.status)
    -
    -
    -
    -
    -
    -class Response -(context: ResponseContext, data: bytes = <factory>) -
    -
    -

    Nitric Function as a Service (FaaS) response class.

    -
    - -Expand source code - -
    class Response(object):
    -    """Nitric Function as a Service (FaaS) response class."""
    -
    -    context: ResponseContext
    -    data: bytes = field(default_factory=bytes)
    -
    -    def to_grpc_trigger_response_context(self) -> TriggerResponse:
    -        """Translate a response object ready for on the wire transport."""
    -        response = TriggerResponse(data=self.data)
    -
    -        if self.context.is_http():
    -            ctx = self.context.as_http()
    -            response.http = ctx.to_grpc_http_response_context()
    -        elif self.context.is_topic():
    -            ctx = self.context.as_topic()
    -            response.topic = ctx.to_grpc_topic_response_context()
    -
    -        return response
    -
    -

    Class variables

    -
    -
    var contextResponseContext
    -
    -
    -
    -
    var data : bytes
    -
    -
    -
    -
    -

    Methods

    -
    -
    -def to_grpc_trigger_response_context(self) ‑> TriggerResponse -
    -
    -

    Translate a response object ready for on the wire transport.

    -
    - -Expand source code - -
    def to_grpc_trigger_response_context(self) -> TriggerResponse:
    -    """Translate a response object ready for on the wire transport."""
    -    response = TriggerResponse(data=self.data)
    -
    -    if self.context.is_http():
    -        ctx = self.context.as_http()
    -        response.http = ctx.to_grpc_http_response_context()
    -    elif self.context.is_topic():
    -        ctx = self.context.as_topic()
    -        response.topic = ctx.to_grpc_topic_response_context()
    -
    -    return response
    -
    -
    -
    -
    -
    -class ResponseContext -(context: Union[TopicResponseContextHttpResponseContext]) -
    -
    -

    Additional context data for a trigger response, specific to the original trigger type.

    -
    - -Expand source code - -
    class ResponseContext(object):
    -    """Additional context data for a trigger response, specific to the original trigger type."""
    -
    -    context: Union[TopicResponseContext, HttpResponseContext]
    -
    -    def is_http(self):
    -        """Indicate whether the trigger was from an HTTP request."""
    -        return isinstance(self.context, HttpResponseContext)
    -
    -    def is_topic(self):
    -        """Indicate whether the trigger was from a topic (event)."""
    -        return isinstance(self.context, TopicResponseContext)
    -
    -    def as_http(self) -> Union[HttpResponseContext, None]:
    -        """
    -        Return this context as an HTTP context type.
    -
    -        If the trigger wasn't an HTTP request, this function returns None.
    -        is_http() should be used first to determine if this was an HTTP request trigger.
    -        """
    -        if not self.is_http():
    -            return None
    -
    -        return self.context
    -
    -    def as_topic(self) -> Union[TopicResponseContext, None]:
    -        """
    -        Return this context as a topic context type.
    -
    -        If the trigger wasn't an event from a topic, this function returns None.
    -        is_topic() should be used first to determine if this was a topic trigger.
    -        """
    -        if not self.is_topic():
    -            return None
    -
    -        return self.context
    -
    -

    Class variables

    -
    -
    var context : Union[TopicResponseContextHttpResponseContext]
    -
    -
    -
    -
    -

    Methods

    -
    -
    -def as_http(self) ‑> Union[HttpResponseContext, NoneType] -
    -
    -

    Return this context as an HTTP context type.

    -

    If the trigger wasn't an HTTP request, this function returns None. -is_http() should be used first to determine if this was an HTTP request trigger.

    -
    - -Expand source code - -
    def as_http(self) -> Union[HttpResponseContext, None]:
    -    """
    -    Return this context as an HTTP context type.
    -
    -    If the trigger wasn't an HTTP request, this function returns None.
    -    is_http() should be used first to determine if this was an HTTP request trigger.
    -    """
    -    if not self.is_http():
    -        return None
    -
    -    return self.context
    -
    -
    -
    -def as_topic(self) ‑> Union[TopicResponseContext, NoneType] -
    -
    -

    Return this context as a topic context type.

    -

    If the trigger wasn't an event from a topic, this function returns None. -is_topic() should be used first to determine if this was a topic trigger.

    -
    - -Expand source code - -
    def as_topic(self) -> Union[TopicResponseContext, None]:
    -    """
    -    Return this context as a topic context type.
    -
    -    If the trigger wasn't an event from a topic, this function returns None.
    -    is_topic() should be used first to determine if this was a topic trigger.
    -    """
    -    if not self.is_topic():
    -        return None
    -
    -    return self.context
    -
    -
    -
    -def is_http(self) -
    -
    -

    Indicate whether the trigger was from an HTTP request.

    -
    - -Expand source code - -
    def is_http(self):
    -    """Indicate whether the trigger was from an HTTP request."""
    -    return isinstance(self.context, HttpResponseContext)
    -
    -
    -
    -def is_topic(self) -
    -
    -

    Indicate whether the trigger was from a topic (event).

    -
    - -Expand source code - -
    def is_topic(self):
    -    """Indicate whether the trigger was from a topic (event)."""
    -    return isinstance(self.context, TopicResponseContext)
    -
    -
    -
    -
    -
    -class TopicResponseContext -(success: bool = True) -
    -
    -

    Represents a topic/event specific response context data such as whether the event was processed successfully.

    -
    - -Expand source code - -
    class TopicResponseContext(object):
    -    """Represents a topic/event specific response context data such as whether the event was processed successfully."""
    -
    -    success: bool = True
    -
    -    def to_grpc_topic_response_context(self) -> v1.TopicResponseContext:
    -        """Reformat this topic response context for on the wire transfer."""
    -        return v1.TopicResponseContext(success=self.success)
    -
    -<<<<<<< refs/remotes/origin/main -
    -
    - -
    -class TriggerResponse -(*args, **kwargs) -
    -
    -

    A ProtocolMessage

    -

    Ancestors

    -
      -
    • google.protobuf.pyext._message.CMessage
    • -
    • google.protobuf.message.Message
    • -
    -

    Class variables

    -
    -
    var DESCRIPTOR
    -======= -

    Class variables

    -
    -
    var success : bool
    ->>>>>>> feat: port faas.start to bi-di streaming with membrane -
    -
    -
    -
    -<<<<<<< refs/remotes/origin/main -

    Instance variables

    -
    -
    var data
    -
    -

    Field nitric.faas.v1.TriggerResponse.data

    -
    -
    var http
    -
    -

    Field nitric.faas.v1.TriggerResponse.http

    -
    -
    var topic
    -
    -

    Field nitric.faas.v1.TriggerResponse.topic

    -======= -

    Methods

    -
    -
    -def to_grpc_topic_response_context(self) ‑> TopicResponseContext -
    -
    -

    Reformat this topic response context for on the wire transfer.

    -
    - -Expand source code - -
    def to_grpc_topic_response_context(self) -> v1.TopicResponseContext:
    -    """Reformat this topic response context for on the wire transfer."""
    -    return v1.TopicResponseContext(success=self.success)
    -
    ->>>>>>> feat: port faas.start to bi-di streaming with membrane -
    -
    -
    -
    -
    -
    - -
    - - - \ No newline at end of file diff --git a/docs/nitric/faas/trigger.html b/docs/nitric/faas/trigger.html deleted file mode 100644 index b3d8c70..0000000 --- a/docs/nitric/faas/trigger.html +++ /dev/null @@ -1,760 +0,0 @@ - - - - - - -nitric.faas.trigger API documentation - - - - - - - - - - - -
    -
    -
    -

    Module nitric.faas.trigger

    -
    -
    -
    - -Expand source code - -
    #
    -# Copyright (c) 2021 Nitric Technologies Pty Ltd.
    -#
    -# This file is part of Nitric Python 3 SDK.
    -# See https://github.com/nitrictech/python-sdk for further info.
    -#
    -# Licensed under the Apache License, Version 2.0 (the "License");
    -# you may not use this file except in compliance with the License.
    -# You may obtain a copy of the License at
    -#
    -#     http://www.apache.org/licenses/LICENSE-2.0
    -#
    -# Unless required by applicable law or agreed to in writing, software
    -# distributed under the License is distributed on an "AS IS" BASIS,
    -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    -# See the License for the specific language governing permissions and
    -# limitations under the License.
    -#
    -import typing
    -from dataclasses import dataclass, field
    -
    -import nitric.faas
    -from nitric.proto.nitric.faas.v1 import TriggerRequest
    -
    -from nitric.faas.response import Response, TopicResponseContext, HttpResponseContext, ResponseContext
    -
    -
    -@dataclass(order=True)
    -class HttpTriggerContext(object):
    -    """Represents Trigger metadata from a HTTP subscription."""
    -
    -    method: str
    -    path: str
    -    headers: typing.Dict[str, str]
    -    query_params: typing.Dict[str, str]
    -
    -
    -class TopicTriggerContext(object):
    -    """Represents Trigger metadata from a topic subscription."""
    -
    -    def __init__(self, topic: str):
    -        """Construct a new TopicTriggerContext, including the name of the source topic for this trigger."""
    -        self.topic = topic
    -
    -
    -@dataclass(order=True)
    -class TriggerContext(object):
    -    """Represents the contextual metadata for a Nitric function request."""
    -
    -    context: typing.Union[TopicTriggerContext, HttpTriggerContext]
    -
    -    def is_http(self) -> bool:
    -        """
    -        Indicate whether the trigger was from an HTTP request.
    -
    -        This indicates the availability of additional HTTP specific context such as path, query parameters and headers.
    -        """
    -        return isinstance(self.context, HttpTriggerContext)
    -
    -    def as_http(self) -> typing.Union[HttpTriggerContext, None]:
    -        """
    -        Return this context as an HTTP context type.
    -
    -        If the trigger wasn't an HTTP request, this function returns None.
    -        is_http() should be used first to determine if this was an HTTP request trigger.
    -        """
    -        if not self.is_http():
    -            return None
    -
    -        return self.context
    -
    -    def is_topic(self) -> bool:
    -        """
    -        Indicate whether the trigger was from a topic (event).
    -
    -        This indicates the availability of additional topic/event specific context such as the topic name.
    -        """
    -        return isinstance(self.context, TriggerContext)
    -
    -    def as_topic(self) -> typing.Union[TopicTriggerContext, None]:
    -        """
    -        Return this context as a topic context type.
    -
    -        If the trigger wasn't an event from a topic, this function returns None.
    -        is_topic() should be used first to determine if this was a topic trigger.
    -        """
    -        if not self.is_topic():
    -            return None
    -
    -        return self.context
    -
    -    @staticmethod
    -    def from_trigger_request(trigger_request: TriggerRequest):
    -        """Return a TriggerContext from a TriggerRequest."""
    -        if trigger_request.http is not None:
    -            return TriggerContext(
    -                context=HttpTriggerContext(
    -<<<<<<< refs/remotes/origin/main
    -                    headers=dict(trigger_request.http.headers),
    -                    path=trigger_request.http.path,
    -                    method=trigger_request.http.method,
    -                    query_params=dict(trigger_request.http.query_params),
    -=======
    -                    headers=trigger_request.http.headers,
    -                    method=trigger_request.http.method,
    -                    query_params=trigger_request.http.query_params,
    -                    path=trigger_request.http.path,
    ->>>>>>> feat: port faas.start to bi-di streaming with membrane
    -                )
    -            )
    -        elif trigger_request.topic is not None:
    -            return TriggerContext(context=TopicTriggerContext(topic=trigger_request.topic.topic))
    -        else:
    -            # We have an error
    -            # should probably raise an exception
    -            return None
    -
    -
    -def _clean_header(header_name: str):
    -    """Convert a Nitric HTTP request header name into the equivalent Context property name."""
    -    return header_name.lower().replace("x-nitric-", "").replace("-", "_")
    -
    -
    -@dataclass(order=True)
    -class Trigger(object):
    -    """
    -    Represents a standard Nitric function request.
    -
    -    These requests are normalized from their original stack-specific structures.
    -    """
    -
    -    context: TriggerContext
    -    data: bytes = field(default_factory=bytes)
    -
    -    def get_body(self) -> bytes:
    -        """Return the bytes of the body of the request."""
    -        return self.data
    -
    -    def get_object(self) -> dict:
    -        """
    -        Assume the payload is JSON and return the content deserialized into a dictionary.
    -
    -        :raises JSONDecodeError: raised when the request payload (body) is not valid JSON.
    -
    -        :return: the deserialized JSON request body as a dictionary
    -        """
    -        import json
    -
    -        return json.loads(self.data)
    -
    -    def default_response(self) -> Response:
    -        """
    -        Return the trigger response, based on the trigger context type.
    -
    -        The returned response can be interrogated with its context to determine the appropriate
    -        response context e.g. response.context.is_http() or response.context.is_topic().
    -        """
    -        response_ctx = None
    -
    -        if self.context.is_http():
    -            response_ctx = ResponseContext(context=HttpResponseContext())
    -        elif self.context.is_topic():
    -            response_ctx = ResponseContext(context=TopicResponseContext())
    -
    -        return Response(context=response_ctx)
    -
    -    @staticmethod
    -    def from_trigger_request(trigger_request: TriggerRequest):
    -        """Return the python SDK implementation of a Trigger from a protobuf representation."""
    -        context = TriggerContext.from_trigger_request(trigger_request)
    -
    -        return Trigger(context=context, data=trigger_request.data)
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -

    Classes

    -
    -
    -class HttpTriggerContext -(method: str, path: str, headers: Dict[str, str], query_params: Dict[str, str]) -
    -
    -

    Represents Trigger metadata from a HTTP subscription.

    -
    - -Expand source code - -
    class HttpTriggerContext(object):
    -    """Represents Trigger metadata from a HTTP subscription."""
    -
    -    method: str
    -    path: str
    -    headers: typing.Dict[str, str]
    -    query_params: typing.Dict[str, str]
    -
    -

    Class variables

    -
    -
    var headers : Dict[str, str]
    -
    -
    -
    -
    var method : str
    -
    -
    -
    -
    var path : str
    -
    -
    -
    -
    var query_params : Dict[str, str]
    -
    -
    -
    -
    -
    -
    -class TopicTriggerContext -(topic: str) -
    -
    -

    Represents Trigger metadata from a topic subscription.

    -

    Construct a new TopicTriggerContext, including the name of the source topic for this trigger.

    -
    - -Expand source code - -
    class TopicTriggerContext(object):
    -    """Represents Trigger metadata from a topic subscription."""
    -
    -    def __init__(self, topic: str):
    -        """Construct a new TopicTriggerContext, including the name of the source topic for this trigger."""
    -        self.topic = topic
    -
    -
    -
    -class Trigger -(context: TriggerContext, data: bytes = <factory>) -
    -
    -

    Represents a standard Nitric function request.

    -

    These requests are normalized from their original stack-specific structures.

    -
    - -Expand source code - -
    class Trigger(object):
    -    """
    -    Represents a standard Nitric function request.
    -
    -    These requests are normalized from their original stack-specific structures.
    -    """
    -
    -    context: TriggerContext
    -    data: bytes = field(default_factory=bytes)
    -
    -    def get_body(self) -> bytes:
    -        """Return the bytes of the body of the request."""
    -        return self.data
    -
    -    def get_object(self) -> dict:
    -        """
    -        Assume the payload is JSON and return the content deserialized into a dictionary.
    -
    -        :raises JSONDecodeError: raised when the request payload (body) is not valid JSON.
    -
    -        :return: the deserialized JSON request body as a dictionary
    -        """
    -        import json
    -
    -        return json.loads(self.data)
    -
    -    def default_response(self) -> Response:
    -        """
    -        Return the trigger response, based on the trigger context type.
    -
    -        The returned response can be interrogated with its context to determine the appropriate
    -        response context e.g. response.context.is_http() or response.context.is_topic().
    -        """
    -        response_ctx = None
    -
    -        if self.context.is_http():
    -            response_ctx = ResponseContext(context=HttpResponseContext())
    -        elif self.context.is_topic():
    -            response_ctx = ResponseContext(context=TopicResponseContext())
    -
    -        return Response(context=response_ctx)
    -
    -    @staticmethod
    -    def from_trigger_request(trigger_request: TriggerRequest):
    -        """Return the python SDK implementation of a Trigger from a protobuf representation."""
    -        context = TriggerContext.from_trigger_request(trigger_request)
    -
    -        return Trigger(context=context, data=trigger_request.data)
    -
    -

    Class variables

    -
    -
    var contextTriggerContext
    -
    -
    -
    -
    var data : bytes
    -
    -
    -
    -
    -

    Static methods

    -
    -
    -def from_trigger_request(trigger_request: TriggerRequest) -
    -
    -

    Return the python SDK implementation of a Trigger from a protobuf representation.

    -
    - -Expand source code - -
    @staticmethod
    -def from_trigger_request(trigger_request: TriggerRequest):
    -    """Return the python SDK implementation of a Trigger from a protobuf representation."""
    -    context = TriggerContext.from_trigger_request(trigger_request)
    -
    -    return Trigger(context=context, data=trigger_request.data)
    -
    -
    -
    -

    Methods

    -
    -
    -def default_response(self) ‑> Response -
    -
    -

    Return the trigger response, based on the trigger context type.

    -

    The returned response can be interrogated with its context to determine the appropriate -response context e.g. response.context.is_http() or response.context.is_topic().

    -
    - -Expand source code - -
    def default_response(self) -> Response:
    -    """
    -    Return the trigger response, based on the trigger context type.
    -
    -    The returned response can be interrogated with its context to determine the appropriate
    -    response context e.g. response.context.is_http() or response.context.is_topic().
    -    """
    -    response_ctx = None
    -
    -    if self.context.is_http():
    -        response_ctx = ResponseContext(context=HttpResponseContext())
    -    elif self.context.is_topic():
    -        response_ctx = ResponseContext(context=TopicResponseContext())
    -
    -    return Response(context=response_ctx)
    -
    -
    -
    -def get_body(self) ‑> bytes -
    -
    -

    Return the bytes of the body of the request.

    -
    - -Expand source code - -
    def get_body(self) -> bytes:
    -    """Return the bytes of the body of the request."""
    -    return self.data
    -
    -
    -
    -def get_object(self) ‑> dict -
    -
    -

    Assume the payload is JSON and return the content deserialized into a dictionary.

    -

    :raises JSONDecodeError: raised when the request payload (body) is not valid JSON.

    -

    :return: the deserialized JSON request body as a dictionary

    -
    - -Expand source code - -
    def get_object(self) -> dict:
    -    """
    -    Assume the payload is JSON and return the content deserialized into a dictionary.
    -
    -    :raises JSONDecodeError: raised when the request payload (body) is not valid JSON.
    -
    -    :return: the deserialized JSON request body as a dictionary
    -    """
    -    import json
    -
    -    return json.loads(self.data)
    -
    -
    -
    -
    -
    -class TriggerContext -(context: Union[TopicTriggerContextHttpTriggerContext]) -
    -
    -

    Represents the contextual metadata for a Nitric function request.

    -
    - -Expand source code - -
    class TriggerContext(object):
    -    """Represents the contextual metadata for a Nitric function request."""
    -
    -    context: typing.Union[TopicTriggerContext, HttpTriggerContext]
    -
    -    def is_http(self) -> bool:
    -        """
    -        Indicate whether the trigger was from an HTTP request.
    -
    -        This indicates the availability of additional HTTP specific context such as path, query parameters and headers.
    -        """
    -        return isinstance(self.context, HttpTriggerContext)
    -
    -    def as_http(self) -> typing.Union[HttpTriggerContext, None]:
    -        """
    -        Return this context as an HTTP context type.
    -
    -        If the trigger wasn't an HTTP request, this function returns None.
    -        is_http() should be used first to determine if this was an HTTP request trigger.
    -        """
    -        if not self.is_http():
    -            return None
    -
    -        return self.context
    -
    -    def is_topic(self) -> bool:
    -        """
    -        Indicate whether the trigger was from a topic (event).
    -
    -        This indicates the availability of additional topic/event specific context such as the topic name.
    -        """
    -        return isinstance(self.context, TriggerContext)
    -
    -    def as_topic(self) -> typing.Union[TopicTriggerContext, None]:
    -        """
    -        Return this context as a topic context type.
    -
    -        If the trigger wasn't an event from a topic, this function returns None.
    -        is_topic() should be used first to determine if this was a topic trigger.
    -        """
    -        if not self.is_topic():
    -            return None
    -
    -        return self.context
    -
    -    @staticmethod
    -    def from_trigger_request(trigger_request: TriggerRequest):
    -        """Return a TriggerContext from a TriggerRequest."""
    -        if trigger_request.http is not None:
    -            return TriggerContext(
    -                context=HttpTriggerContext(
    -<<<<<<< refs/remotes/origin/main
    -                    headers=dict(trigger_request.http.headers),
    -                    path=trigger_request.http.path,
    -                    method=trigger_request.http.method,
    -                    query_params=dict(trigger_request.http.query_params),
    -=======
    -                    headers=trigger_request.http.headers,
    -                    method=trigger_request.http.method,
    -                    query_params=trigger_request.http.query_params,
    -                    path=trigger_request.http.path,
    ->>>>>>> feat: port faas.start to bi-di streaming with membrane
    -                )
    -            )
    -        elif trigger_request.topic is not None:
    -            return TriggerContext(context=TopicTriggerContext(topic=trigger_request.topic.topic))
    -        else:
    -            # We have an error
    -            # should probably raise an exception
    -            return None
    -
    -

    Class variables

    -
    -
    var context : Union[TopicTriggerContextHttpTriggerContext]
    -
    -
    -
    -
    -

    Static methods

    -
    -
    -def from_trigger_request(trigger_request: TriggerRequest) -
    -
    -

    Return a TriggerContext from a TriggerRequest.

    -
    - -Expand source code - -
    @staticmethod
    -def from_trigger_request(trigger_request: TriggerRequest):
    -    """Return a TriggerContext from a TriggerRequest."""
    -    if trigger_request.http is not None:
    -        return TriggerContext(
    -            context=HttpTriggerContext(
    -<<<<<<< refs/remotes/origin/main
    -                headers=dict(trigger_request.http.headers),
    -                path=trigger_request.http.path,
    -                method=trigger_request.http.method,
    -                query_params=dict(trigger_request.http.query_params),
    -=======
    -                headers=trigger_request.http.headers,
    -                method=trigger_request.http.method,
    -                query_params=trigger_request.http.query_params,
    -                path=trigger_request.http.path,
    ->>>>>>> feat: port faas.start to bi-di streaming with membrane
    -            )
    -        )
    -    elif trigger_request.topic is not None:
    -        return TriggerContext(context=TopicTriggerContext(topic=trigger_request.topic.topic))
    -    else:
    -        # We have an error
    -        # should probably raise an exception
    -        return None
    -
    -
    -
    -

    Methods

    -
    -
    -def as_http(self) ‑> Union[HttpTriggerContext, NoneType] -
    -
    -

    Return this context as an HTTP context type.

    -

    If the trigger wasn't an HTTP request, this function returns None. -is_http() should be used first to determine if this was an HTTP request trigger.

    -
    - -Expand source code - -
    def as_http(self) -> typing.Union[HttpTriggerContext, None]:
    -    """
    -    Return this context as an HTTP context type.
    -
    -    If the trigger wasn't an HTTP request, this function returns None.
    -    is_http() should be used first to determine if this was an HTTP request trigger.
    -    """
    -    if not self.is_http():
    -        return None
    -
    -    return self.context
    -
    -
    -
    -def as_topic(self) ‑> Union[TopicTriggerContext, NoneType] -
    -
    -

    Return this context as a topic context type.

    -

    If the trigger wasn't an event from a topic, this function returns None. -is_topic() should be used first to determine if this was a topic trigger.

    -
    - -Expand source code - -
    def as_topic(self) -> typing.Union[TopicTriggerContext, None]:
    -    """
    -    Return this context as a topic context type.
    -
    -    If the trigger wasn't an event from a topic, this function returns None.
    -    is_topic() should be used first to determine if this was a topic trigger.
    -    """
    -    if not self.is_topic():
    -        return None
    -
    -    return self.context
    -
    -
    -
    -def is_http(self) ‑> bool -
    -
    -

    Indicate whether the trigger was from an HTTP request.

    -

    This indicates the availability of additional HTTP specific context such as path, query parameters and headers.

    -
    - -Expand source code - -
    def is_http(self) -> bool:
    -    """
    -    Indicate whether the trigger was from an HTTP request.
    -
    -    This indicates the availability of additional HTTP specific context such as path, query parameters and headers.
    -    """
    -    return isinstance(self.context, HttpTriggerContext)
    -
    -
    -
    -def is_topic(self) ‑> bool -
    -
    -

    Indicate whether the trigger was from a topic (event).

    -

    This indicates the availability of additional topic/event specific context such as the topic name.

    -
    - -Expand source code - -
    def is_topic(self) -> bool:
    -    """
    -    Indicate whether the trigger was from a topic (event).
    -
    -    This indicates the availability of additional topic/event specific context such as the topic name.
    -    """
    -    return isinstance(self.context, TriggerContext)
    -
    -
    -
    -
    -<<<<<<< refs/remotes/origin/main -
    -class TriggerRequest -(*args, **kwargs) -
    -
    -

    A ProtocolMessage

    -

    Ancestors

    -
      -
    • google.protobuf.pyext._message.CMessage
    • -
    • google.protobuf.message.Message
    • -
    -

    Class variables

    -
    -
    var DESCRIPTOR
    -
    -
    -
    -
    -

    Instance variables

    -
    -
    var data
    -
    -

    Field nitric.faas.v1.TriggerRequest.data

    -
    -
    var http
    -
    -

    Field nitric.faas.v1.TriggerRequest.http

    -
    -
    var mime_type
    -
    -

    Field nitric.faas.v1.TriggerRequest.mime_type

    -
    -
    var topic
    -
    -

    Field nitric.faas.v1.TriggerRequest.topic

    -
    -
    -
    -======= ->>>>>>> feat: port faas.start to bi-di streaming with membrane -
    -
    -
    - -
    - - - \ No newline at end of file diff --git a/docs/nitric/index.html b/docs/nitric/index.html index 4db1f09..2fbcc5a 100644 --- a/docs/nitric/index.html +++ b/docs/nitric/index.html @@ -59,11 +59,7 @@

    Sub-modules

    Nitric SDK Configuration Settings.

    -
    nitric.faas
    -
    -

    Nitric Function as a Service (FaaS) Package.

    -
    -
    nitric.proto
    +
    nitric.faas
    @@ -90,8 +86,7 @@

    Index

    diff --git a/docs/nitric/proto/event/v1/event_pb2.html b/docs/nitric/proto/event/v1/event_pb2.html deleted file mode 100644 index e636e12..0000000 --- a/docs/nitric/proto/event/v1/event_pb2.html +++ /dev/null @@ -1,632 +0,0 @@ - - - - - - -nitric.proto.event.v1.event_pb2 API documentation - - - - - - - - - - - -
    -
    -
    -

    Module nitric.proto.event.v1.event_pb2

    -
    -
    -

    Generated protocol buffer code.

    -
    - -Expand source code - -
    # -*- coding: utf-8 -*-
    -#
    -# Copyright (c) 2021 Nitric Technologies Pty Ltd.
    -#
    -# This file is part of Nitric Python 3 SDK.
    -# See https://github.com/nitrictech/python-sdk for further info.
    -#
    -# Licensed under the Apache License, Version 2.0 (the "License");
    -# you may not use this file except in compliance with the License.
    -# You may obtain a copy of the License at
    -#
    -#     http://www.apache.org/licenses/LICENSE-2.0
    -#
    -# Unless required by applicable law or agreed to in writing, software
    -# distributed under the License is distributed on an "AS IS" BASIS,
    -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    -# See the License for the specific language governing permissions and
    -# limitations under the License.
    -#
    -
    -# Generated by the protocol buffer compiler.  DO NOT EDIT!
    -# source: event/v1/event.proto
    -"""Generated protocol buffer code."""
    -from google.protobuf import descriptor as _descriptor
    -from google.protobuf import message as _message
    -from google.protobuf import reflection as _reflection
    -from google.protobuf import symbol_database as _symbol_database
    -# @@protoc_insertion_point(imports)
    -
    -_sym_db = _symbol_database.Default()
    -
    -
    -from google.protobuf import struct_pb2 as google_dot_protobuf_dot_struct__pb2
    -
    -
    -DESCRIPTOR = _descriptor.FileDescriptor(
    -  name='event/v1/event.proto',
    -  package='nitric.event.v1',
    -  syntax='proto3',
    -  serialized_options=b'\n\030io.nitric.proto.event.v1B\006EventsP\001Z\014nitric/v1;v1\252\002\025Nitric.Proto.Event.v1\312\002\025Nitric\\Proto\\Event\\V1',
    -  create_key=_descriptor._internal_create_key,
    -  serialized_pb=b'\n\x14\x65vent/v1/event.proto\x12\x0fnitric.event.v1\x1a\x1cgoogle/protobuf/struct.proto\"Q\n\x13\x45ventPublishRequest\x12\r\n\x05topic\x18\x01 \x01(\t\x12+\n\x05\x65vent\x18\x02 \x01(\x0b\x32\x1c.nitric.event.v1.NitricEvent\"\"\n\x14\x45ventPublishResponse\x12\n\n\x02id\x18\x01 \x01(\t\"\x12\n\x10TopicListRequest\"A\n\x11TopicListResponse\x12,\n\x06topics\x18\x01 \x03(\x0b\x32\x1c.nitric.event.v1.NitricTopic\"\x1b\n\x0bNitricTopic\x12\x0c\n\x04name\x18\x01 \x01(\t\"Y\n\x0bNitricEvent\x12\n\n\x02id\x18\x01 \x01(\t\x12\x14\n\x0cpayload_type\x18\x02 \x01(\t\x12(\n\x07payload\x18\x03 \x01(\x0b\x32\x17.google.protobuf.Struct2_\n\x05\x45vent\x12V\n\x07Publish\x12$.nitric.event.v1.EventPublishRequest\x1a%.nitric.event.v1.EventPublishResponse2V\n\x05Topic\x12M\n\x04List\x12!.nitric.event.v1.TopicListRequest\x1a\".nitric.event.v1.TopicListResponseBb\n\x18io.nitric.proto.event.v1B\x06\x45ventsP\x01Z\x0cnitric/v1;v1\xaa\x02\x15Nitric.Proto.Event.v1\xca\x02\x15Nitric\\Proto\\Event\\V1b\x06proto3'
    -  ,
    -  dependencies=[google_dot_protobuf_dot_struct__pb2.DESCRIPTOR,])
    -
    -
    -
    -
    -_EVENTPUBLISHREQUEST = _descriptor.Descriptor(
    -  name='EventPublishRequest',
    -  full_name='nitric.event.v1.EventPublishRequest',
    -  filename=None,
    -  file=DESCRIPTOR,
    -  containing_type=None,
    -  create_key=_descriptor._internal_create_key,
    -  fields=[
    -    _descriptor.FieldDescriptor(
    -      name='topic', full_name='nitric.event.v1.EventPublishRequest.topic', index=0,
    -      number=1, type=9, cpp_type=9, label=1,
    -      has_default_value=False, default_value=b"".decode('utf-8'),
    -      message_type=None, enum_type=None, containing_type=None,
    -      is_extension=False, extension_scope=None,
    -      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
    -    _descriptor.FieldDescriptor(
    -      name='event', full_name='nitric.event.v1.EventPublishRequest.event', index=1,
    -      number=2, type=11, cpp_type=10, label=1,
    -      has_default_value=False, default_value=None,
    -      message_type=None, enum_type=None, containing_type=None,
    -      is_extension=False, extension_scope=None,
    -      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
    -  ],
    -  extensions=[
    -  ],
    -  nested_types=[],
    -  enum_types=[
    -  ],
    -  serialized_options=None,
    -  is_extendable=False,
    -  syntax='proto3',
    -  extension_ranges=[],
    -  oneofs=[
    -  ],
    -  serialized_start=71,
    -  serialized_end=152,
    -)
    -
    -
    -_EVENTPUBLISHRESPONSE = _descriptor.Descriptor(
    -  name='EventPublishResponse',
    -  full_name='nitric.event.v1.EventPublishResponse',
    -  filename=None,
    -  file=DESCRIPTOR,
    -  containing_type=None,
    -  create_key=_descriptor._internal_create_key,
    -  fields=[
    -    _descriptor.FieldDescriptor(
    -      name='id', full_name='nitric.event.v1.EventPublishResponse.id', index=0,
    -      number=1, type=9, cpp_type=9, label=1,
    -      has_default_value=False, default_value=b"".decode('utf-8'),
    -      message_type=None, enum_type=None, containing_type=None,
    -      is_extension=False, extension_scope=None,
    -      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
    -  ],
    -  extensions=[
    -  ],
    -  nested_types=[],
    -  enum_types=[
    -  ],
    -  serialized_options=None,
    -  is_extendable=False,
    -  syntax='proto3',
    -  extension_ranges=[],
    -  oneofs=[
    -  ],
    -  serialized_start=154,
    -  serialized_end=188,
    -)
    -
    -
    -_TOPICLISTREQUEST = _descriptor.Descriptor(
    -  name='TopicListRequest',
    -  full_name='nitric.event.v1.TopicListRequest',
    -  filename=None,
    -  file=DESCRIPTOR,
    -  containing_type=None,
    -  create_key=_descriptor._internal_create_key,
    -  fields=[
    -  ],
    -  extensions=[
    -  ],
    -  nested_types=[],
    -  enum_types=[
    -  ],
    -  serialized_options=None,
    -  is_extendable=False,
    -  syntax='proto3',
    -  extension_ranges=[],
    -  oneofs=[
    -  ],
    -  serialized_start=190,
    -  serialized_end=208,
    -)
    -
    -
    -_TOPICLISTRESPONSE = _descriptor.Descriptor(
    -  name='TopicListResponse',
    -  full_name='nitric.event.v1.TopicListResponse',
    -  filename=None,
    -  file=DESCRIPTOR,
    -  containing_type=None,
    -  create_key=_descriptor._internal_create_key,
    -  fields=[
    -    _descriptor.FieldDescriptor(
    -      name='topics', full_name='nitric.event.v1.TopicListResponse.topics', index=0,
    -      number=1, type=11, cpp_type=10, label=3,
    -      has_default_value=False, default_value=[],
    -      message_type=None, enum_type=None, containing_type=None,
    -      is_extension=False, extension_scope=None,
    -      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
    -  ],
    -  extensions=[
    -  ],
    -  nested_types=[],
    -  enum_types=[
    -  ],
    -  serialized_options=None,
    -  is_extendable=False,
    -  syntax='proto3',
    -  extension_ranges=[],
    -  oneofs=[
    -  ],
    -  serialized_start=210,
    -  serialized_end=275,
    -)
    -
    -
    -_NITRICTOPIC = _descriptor.Descriptor(
    -  name='NitricTopic',
    -  full_name='nitric.event.v1.NitricTopic',
    -  filename=None,
    -  file=DESCRIPTOR,
    -  containing_type=None,
    -  create_key=_descriptor._internal_create_key,
    -  fields=[
    -    _descriptor.FieldDescriptor(
    -      name='name', full_name='nitric.event.v1.NitricTopic.name', index=0,
    -      number=1, type=9, cpp_type=9, label=1,
    -      has_default_value=False, default_value=b"".decode('utf-8'),
    -      message_type=None, enum_type=None, containing_type=None,
    -      is_extension=False, extension_scope=None,
    -      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
    -  ],
    -  extensions=[
    -  ],
    -  nested_types=[],
    -  enum_types=[
    -  ],
    -  serialized_options=None,
    -  is_extendable=False,
    -  syntax='proto3',
    -  extension_ranges=[],
    -  oneofs=[
    -  ],
    -  serialized_start=277,
    -  serialized_end=304,
    -)
    -
    -
    -_NITRICEVENT = _descriptor.Descriptor(
    -  name='NitricEvent',
    -  full_name='nitric.event.v1.NitricEvent',
    -  filename=None,
    -  file=DESCRIPTOR,
    -  containing_type=None,
    -  create_key=_descriptor._internal_create_key,
    -  fields=[
    -    _descriptor.FieldDescriptor(
    -      name='id', full_name='nitric.event.v1.NitricEvent.id', index=0,
    -      number=1, type=9, cpp_type=9, label=1,
    -      has_default_value=False, default_value=b"".decode('utf-8'),
    -      message_type=None, enum_type=None, containing_type=None,
    -      is_extension=False, extension_scope=None,
    -      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
    -    _descriptor.FieldDescriptor(
    -      name='payload_type', full_name='nitric.event.v1.NitricEvent.payload_type', index=1,
    -      number=2, type=9, cpp_type=9, label=1,
    -      has_default_value=False, default_value=b"".decode('utf-8'),
    -      message_type=None, enum_type=None, containing_type=None,
    -      is_extension=False, extension_scope=None,
    -      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
    -    _descriptor.FieldDescriptor(
    -      name='payload', full_name='nitric.event.v1.NitricEvent.payload', index=2,
    -      number=3, type=11, cpp_type=10, label=1,
    -      has_default_value=False, default_value=None,
    -      message_type=None, enum_type=None, containing_type=None,
    -      is_extension=False, extension_scope=None,
    -      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
    -  ],
    -  extensions=[
    -  ],
    -  nested_types=[],
    -  enum_types=[
    -  ],
    -  serialized_options=None,
    -  is_extendable=False,
    -  syntax='proto3',
    -  extension_ranges=[],
    -  oneofs=[
    -  ],
    -  serialized_start=306,
    -  serialized_end=395,
    -)
    -
    -_EVENTPUBLISHREQUEST.fields_by_name['event'].message_type = _NITRICEVENT
    -_TOPICLISTRESPONSE.fields_by_name['topics'].message_type = _NITRICTOPIC
    -_NITRICEVENT.fields_by_name['payload'].message_type = google_dot_protobuf_dot_struct__pb2._STRUCT
    -DESCRIPTOR.message_types_by_name['EventPublishRequest'] = _EVENTPUBLISHREQUEST
    -DESCRIPTOR.message_types_by_name['EventPublishResponse'] = _EVENTPUBLISHRESPONSE
    -DESCRIPTOR.message_types_by_name['TopicListRequest'] = _TOPICLISTREQUEST
    -DESCRIPTOR.message_types_by_name['TopicListResponse'] = _TOPICLISTRESPONSE
    -DESCRIPTOR.message_types_by_name['NitricTopic'] = _NITRICTOPIC
    -DESCRIPTOR.message_types_by_name['NitricEvent'] = _NITRICEVENT
    -_sym_db.RegisterFileDescriptor(DESCRIPTOR)
    -
    -EventPublishRequest = _reflection.GeneratedProtocolMessageType('EventPublishRequest', (_message.Message,), {
    -  'DESCRIPTOR' : _EVENTPUBLISHREQUEST,
    -  '__module__' : 'event.v1.event_pb2'
    -  # @@protoc_insertion_point(class_scope:nitric.event.v1.EventPublishRequest)
    -  })
    -_sym_db.RegisterMessage(EventPublishRequest)
    -
    -EventPublishResponse = _reflection.GeneratedProtocolMessageType('EventPublishResponse', (_message.Message,), {
    -  'DESCRIPTOR' : _EVENTPUBLISHRESPONSE,
    -  '__module__' : 'event.v1.event_pb2'
    -  # @@protoc_insertion_point(class_scope:nitric.event.v1.EventPublishResponse)
    -  })
    -_sym_db.RegisterMessage(EventPublishResponse)
    -
    -TopicListRequest = _reflection.GeneratedProtocolMessageType('TopicListRequest', (_message.Message,), {
    -  'DESCRIPTOR' : _TOPICLISTREQUEST,
    -  '__module__' : 'event.v1.event_pb2'
    -  # @@protoc_insertion_point(class_scope:nitric.event.v1.TopicListRequest)
    -  })
    -_sym_db.RegisterMessage(TopicListRequest)
    -
    -TopicListResponse = _reflection.GeneratedProtocolMessageType('TopicListResponse', (_message.Message,), {
    -  'DESCRIPTOR' : _TOPICLISTRESPONSE,
    -  '__module__' : 'event.v1.event_pb2'
    -  # @@protoc_insertion_point(class_scope:nitric.event.v1.TopicListResponse)
    -  })
    -_sym_db.RegisterMessage(TopicListResponse)
    -
    -NitricTopic = _reflection.GeneratedProtocolMessageType('NitricTopic', (_message.Message,), {
    -  'DESCRIPTOR' : _NITRICTOPIC,
    -  '__module__' : 'event.v1.event_pb2'
    -  # @@protoc_insertion_point(class_scope:nitric.event.v1.NitricTopic)
    -  })
    -_sym_db.RegisterMessage(NitricTopic)
    -
    -NitricEvent = _reflection.GeneratedProtocolMessageType('NitricEvent', (_message.Message,), {
    -  'DESCRIPTOR' : _NITRICEVENT,
    -  '__module__' : 'event.v1.event_pb2'
    -  # @@protoc_insertion_point(class_scope:nitric.event.v1.NitricEvent)
    -  })
    -_sym_db.RegisterMessage(NitricEvent)
    -
    -
    -DESCRIPTOR._options = None
    -
    -_EVENT = _descriptor.ServiceDescriptor(
    -  name='Event',
    -  full_name='nitric.event.v1.Event',
    -  file=DESCRIPTOR,
    -  index=0,
    -  serialized_options=None,
    -  create_key=_descriptor._internal_create_key,
    -  serialized_start=397,
    -  serialized_end=492,
    -  methods=[
    -  _descriptor.MethodDescriptor(
    -    name='Publish',
    -    full_name='nitric.event.v1.Event.Publish',
    -    index=0,
    -    containing_service=None,
    -    input_type=_EVENTPUBLISHREQUEST,
    -    output_type=_EVENTPUBLISHRESPONSE,
    -    serialized_options=None,
    -    create_key=_descriptor._internal_create_key,
    -  ),
    -])
    -_sym_db.RegisterServiceDescriptor(_EVENT)
    -
    -DESCRIPTOR.services_by_name['Event'] = _EVENT
    -
    -
    -_TOPIC = _descriptor.ServiceDescriptor(
    -  name='Topic',
    -  full_name='nitric.event.v1.Topic',
    -  file=DESCRIPTOR,
    -  index=1,
    -  serialized_options=None,
    -  create_key=_descriptor._internal_create_key,
    -  serialized_start=494,
    -  serialized_end=580,
    -  methods=[
    -  _descriptor.MethodDescriptor(
    -    name='List',
    -    full_name='nitric.event.v1.Topic.List',
    -    index=0,
    -    containing_service=None,
    -    input_type=_TOPICLISTREQUEST,
    -    output_type=_TOPICLISTRESPONSE,
    -    serialized_options=None,
    -    create_key=_descriptor._internal_create_key,
    -  ),
    -])
    -_sym_db.RegisterServiceDescriptor(_TOPIC)
    -
    -DESCRIPTOR.services_by_name['Topic'] = _TOPIC
    -
    -# @@protoc_insertion_point(module_scope)
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -

    Classes

    -
    -
    -class EventPublishRequest -(*args, **kwargs) -
    -
    -

    A ProtocolMessage

    -

    Ancestors

    -
      -
    • google.protobuf.pyext._message.CMessage
    • -
    • google.protobuf.message.Message
    • -
    -

    Class variables

    -
    -
    var DESCRIPTOR
    -
    -
    -
    -
    -

    Instance variables

    -
    -
    var event
    -
    -

    Field nitric.event.v1.EventPublishRequest.event

    -
    -
    var topic
    -
    -

    Field nitric.event.v1.EventPublishRequest.topic

    -
    -
    -
    -
    -class EventPublishResponse -(*args, **kwargs) -
    -
    -

    A ProtocolMessage

    -

    Ancestors

    -
      -
    • google.protobuf.pyext._message.CMessage
    • -
    • google.protobuf.message.Message
    • -
    -

    Class variables

    -
    -
    var DESCRIPTOR
    -
    -
    -
    -
    -

    Instance variables

    -
    -
    var id
    -
    -

    Field nitric.event.v1.EventPublishResponse.id

    -
    -
    -
    -
    -class NitricEvent -(*args, **kwargs) -
    -
    -

    A ProtocolMessage

    -

    Ancestors

    -
      -
    • google.protobuf.pyext._message.CMessage
    • -
    • google.protobuf.message.Message
    • -
    -

    Class variables

    -
    -
    var DESCRIPTOR
    -
    -
    -
    -
    -

    Instance variables

    -
    -
    var id
    -
    -

    Field nitric.event.v1.NitricEvent.id

    -
    -
    var payload
    -
    -

    Field nitric.event.v1.NitricEvent.payload

    -
    -
    var payload_type
    -
    -

    Field nitric.event.v1.NitricEvent.payload_type

    -
    -
    -
    -
    -class NitricTopic -(*args, **kwargs) -
    -
    -

    A ProtocolMessage

    -

    Ancestors

    -
      -
    • google.protobuf.pyext._message.CMessage
    • -
    • google.protobuf.message.Message
    • -
    -

    Class variables

    -
    -
    var DESCRIPTOR
    -
    -
    -
    -
    -

    Instance variables

    -
    -
    var name
    -
    -

    Field nitric.event.v1.NitricTopic.name

    -
    -
    -
    -
    -class TopicListRequest -(*args, **kwargs) -
    -
    -

    A ProtocolMessage

    -

    Ancestors

    -
      -
    • google.protobuf.pyext._message.CMessage
    • -
    • google.protobuf.message.Message
    • -
    -

    Class variables

    -
    -
    var DESCRIPTOR
    -
    -
    -
    -
    -
    -
    -class TopicListResponse -(*args, **kwargs) -
    -
    -

    A ProtocolMessage

    -

    Ancestors

    -
      -
    • google.protobuf.pyext._message.CMessage
    • -
    • google.protobuf.message.Message
    • -
    -

    Class variables

    -
    -
    var DESCRIPTOR
    -
    -
    -
    -
    -

    Instance variables

    -
    -
    var topics
    -
    -

    Field nitric.event.v1.TopicListResponse.topics

    -
    -
    -
    -
    -
    -
    - -
    - - - \ No newline at end of file diff --git a/docs/nitric/proto/faas/v1/faas_pb2.html b/docs/nitric/proto/faas/v1/faas_pb2.html deleted file mode 100644 index c32f45c..0000000 --- a/docs/nitric/proto/faas/v1/faas_pb2.html +++ /dev/null @@ -1,1212 +0,0 @@ - - - - - - -nitric.proto.faas.v1.faas_pb2 API documentation - - - - - - - - - - - -
    -
    -
    -

    Module nitric.proto.faas.v1.faas_pb2

    -
    -
    -

    Generated protocol buffer code.

    -
    - -Expand source code - -
    # -*- coding: utf-8 -*-
    -#
    -# Copyright (c) 2021 Nitric Technologies Pty Ltd.
    -#
    -# This file is part of Nitric Python 3 SDK.
    -# See https://github.com/nitrictech/python-sdk for further info.
    -#
    -# Licensed under the Apache License, Version 2.0 (the "License");
    -# you may not use this file except in compliance with the License.
    -# You may obtain a copy of the License at
    -#
    -#     http://www.apache.org/licenses/LICENSE-2.0
    -#
    -# Unless required by applicable law or agreed to in writing, software
    -# distributed under the License is distributed on an "AS IS" BASIS,
    -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    -# See the License for the specific language governing permissions and
    -# limitations under the License.
    -#
    -
    -# Generated by the protocol buffer compiler.  DO NOT EDIT!
    -# source: faas/v1/faas.proto
    -"""Generated protocol buffer code."""
    -from google.protobuf import descriptor as _descriptor
    -from google.protobuf import message as _message
    -from google.protobuf import reflection as _reflection
    -from google.protobuf import symbol_database as _symbol_database
    -# @@protoc_insertion_point(imports)
    -
    -_sym_db = _symbol_database.Default()
    -
    -
    -
    -
    -DESCRIPTOR = _descriptor.FileDescriptor(
    -  name='faas/v1/faas.proto',
    -  package='nitric.faas.v1',
    -  syntax='proto3',
    -  serialized_options=b'\n\027io.nitric.proto.faas.v1B\nNitricFaasP\001Z\014nitric/v1;v1\252\002\024Nitric.Proto.Faas.v1\312\002\024Nitric\\Proto\\Faas\\V1',
    -  create_key=_descriptor._internal_create_key,
    -  serialized_pb=b'\n\x12\x66\x61\x61s/v1/faas.proto\x12\x0enitric.faas.v1\"\x98\x01\n\rClientMessage\x12\n\n\x02id\x18\x01 \x01(\t\x12\x33\n\x0cinit_request\x18\x02 \x01(\x0b\x32\x1b.nitric.faas.v1.InitRequestH\x00\x12;\n\x10trigger_response\x18\x03 \x01(\x0b\x32\x1f.nitric.faas.v1.TriggerResponseH\x00\x42\t\n\x07\x63ontent\"\x98\x01\n\rServerMessage\x12\n\n\x02id\x18\x01 \x01(\t\x12\x35\n\rinit_response\x18\x02 \x01(\x0b\x32\x1c.nitric.faas.v1.InitResponseH\x00\x12\x39\n\x0ftrigger_request\x18\x03 \x01(\x0b\x32\x1e.nitric.faas.v1.TriggerRequestH\x00\x42\t\n\x07\x63ontent\"\r\n\x0bInitRequest\"\x0e\n\x0cInitResponse\"\xa6\x01\n\x0eTriggerRequest\x12\x0c\n\x04\x64\x61ta\x18\x01 \x01(\x0c\x12\x11\n\tmime_type\x18\x02 \x01(\t\x12\x32\n\x04http\x18\x03 \x01(\x0b\x32\".nitric.faas.v1.HttpTriggerContextH\x00\x12\x34\n\x05topic\x18\x04 \x01(\x0b\x32#.nitric.faas.v1.TopicTriggerContextH\x00\x42\t\n\x07\x63ontext\"\xa3\x02\n\x12HttpTriggerContext\x12\x0e\n\x06method\x18\x01 \x01(\t\x12\x0c\n\x04path\x18\x02 \x01(\t\x12@\n\x07headers\x18\x03 \x03(\x0b\x32/.nitric.faas.v1.HttpTriggerContext.HeadersEntry\x12I\n\x0cquery_params\x18\x04 \x03(\x0b\x32\x33.nitric.faas.v1.HttpTriggerContext.QueryParamsEntry\x1a.\n\x0cHeadersEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x1a\x32\n\x10QueryParamsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"$\n\x13TopicTriggerContext\x12\r\n\x05topic\x18\x01 \x01(\t\"\x96\x01\n\x0fTriggerResponse\x12\x0c\n\x04\x64\x61ta\x18\x01 \x01(\x0c\x12\x33\n\x04http\x18\n \x01(\x0b\x32#.nitric.faas.v1.HttpResponseContextH\x00\x12\x35\n\x05topic\x18\x0b \x01(\x0b\x32$.nitric.faas.v1.TopicResponseContextH\x00\x42\t\n\x07\x63ontext\"\x98\x01\n\x13HttpResponseContext\x12\x41\n\x07headers\x18\x01 \x03(\x0b\x32\x30.nitric.faas.v1.HttpResponseContext.HeadersEntry\x12\x0e\n\x06status\x18\x02 \x01(\x05\x1a.\n\x0cHeadersEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\'\n\x14TopicResponseContext\x12\x0f\n\x07success\x18\x01 \x01(\x08\x32Y\n\x04\x46\x61\x61s\x12Q\n\rTriggerStream\x12\x1d.nitric.faas.v1.ClientMessage\x1a\x1d.nitric.faas.v1.ServerMessage(\x01\x30\x01\x42\x63\n\x17io.nitric.proto.faas.v1B\nNitricFaasP\x01Z\x0cnitric/v1;v1\xaa\x02\x14Nitric.Proto.Faas.v1\xca\x02\x14Nitric\\Proto\\Faas\\V1b\x06proto3'
    -)
    -
    -
    -
    -
    -_CLIENTMESSAGE = _descriptor.Descriptor(
    -  name='ClientMessage',
    -  full_name='nitric.faas.v1.ClientMessage',
    -  filename=None,
    -  file=DESCRIPTOR,
    -  containing_type=None,
    -  create_key=_descriptor._internal_create_key,
    -  fields=[
    -    _descriptor.FieldDescriptor(
    -      name='id', full_name='nitric.faas.v1.ClientMessage.id', index=0,
    -      number=1, type=9, cpp_type=9, label=1,
    -      has_default_value=False, default_value=b"".decode('utf-8'),
    -      message_type=None, enum_type=None, containing_type=None,
    -      is_extension=False, extension_scope=None,
    -      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
    -    _descriptor.FieldDescriptor(
    -      name='init_request', full_name='nitric.faas.v1.ClientMessage.init_request', index=1,
    -      number=2, type=11, cpp_type=10, label=1,
    -      has_default_value=False, default_value=None,
    -      message_type=None, enum_type=None, containing_type=None,
    -      is_extension=False, extension_scope=None,
    -      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
    -    _descriptor.FieldDescriptor(
    -      name='trigger_response', full_name='nitric.faas.v1.ClientMessage.trigger_response', index=2,
    -      number=3, type=11, cpp_type=10, label=1,
    -      has_default_value=False, default_value=None,
    -      message_type=None, enum_type=None, containing_type=None,
    -      is_extension=False, extension_scope=None,
    -      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
    -  ],
    -  extensions=[
    -  ],
    -  nested_types=[],
    -  enum_types=[
    -  ],
    -  serialized_options=None,
    -  is_extendable=False,
    -  syntax='proto3',
    -  extension_ranges=[],
    -  oneofs=[
    -    _descriptor.OneofDescriptor(
    -      name='content', full_name='nitric.faas.v1.ClientMessage.content',
    -      index=0, containing_type=None,
    -      create_key=_descriptor._internal_create_key,
    -    fields=[]),
    -  ],
    -  serialized_start=39,
    -  serialized_end=191,
    -)
    -
    -
    -_SERVERMESSAGE = _descriptor.Descriptor(
    -  name='ServerMessage',
    -  full_name='nitric.faas.v1.ServerMessage',
    -  filename=None,
    -  file=DESCRIPTOR,
    -  containing_type=None,
    -  create_key=_descriptor._internal_create_key,
    -  fields=[
    -    _descriptor.FieldDescriptor(
    -      name='id', full_name='nitric.faas.v1.ServerMessage.id', index=0,
    -      number=1, type=9, cpp_type=9, label=1,
    -      has_default_value=False, default_value=b"".decode('utf-8'),
    -      message_type=None, enum_type=None, containing_type=None,
    -      is_extension=False, extension_scope=None,
    -      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
    -    _descriptor.FieldDescriptor(
    -      name='init_response', full_name='nitric.faas.v1.ServerMessage.init_response', index=1,
    -      number=2, type=11, cpp_type=10, label=1,
    -      has_default_value=False, default_value=None,
    -      message_type=None, enum_type=None, containing_type=None,
    -      is_extension=False, extension_scope=None,
    -      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
    -    _descriptor.FieldDescriptor(
    -      name='trigger_request', full_name='nitric.faas.v1.ServerMessage.trigger_request', index=2,
    -      number=3, type=11, cpp_type=10, label=1,
    -      has_default_value=False, default_value=None,
    -      message_type=None, enum_type=None, containing_type=None,
    -      is_extension=False, extension_scope=None,
    -      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
    -  ],
    -  extensions=[
    -  ],
    -  nested_types=[],
    -  enum_types=[
    -  ],
    -  serialized_options=None,
    -  is_extendable=False,
    -  syntax='proto3',
    -  extension_ranges=[],
    -  oneofs=[
    -    _descriptor.OneofDescriptor(
    -      name='content', full_name='nitric.faas.v1.ServerMessage.content',
    -      index=0, containing_type=None,
    -      create_key=_descriptor._internal_create_key,
    -    fields=[]),
    -  ],
    -  serialized_start=194,
    -  serialized_end=346,
    -)
    -
    -
    -_INITREQUEST = _descriptor.Descriptor(
    -  name='InitRequest',
    -  full_name='nitric.faas.v1.InitRequest',
    -  filename=None,
    -  file=DESCRIPTOR,
    -  containing_type=None,
    -  create_key=_descriptor._internal_create_key,
    -  fields=[
    -  ],
    -  extensions=[
    -  ],
    -  nested_types=[],
    -  enum_types=[
    -  ],
    -  serialized_options=None,
    -  is_extendable=False,
    -  syntax='proto3',
    -  extension_ranges=[],
    -  oneofs=[
    -  ],
    -  serialized_start=348,
    -  serialized_end=361,
    -)
    -
    -
    -_INITRESPONSE = _descriptor.Descriptor(
    -  name='InitResponse',
    -  full_name='nitric.faas.v1.InitResponse',
    -  filename=None,
    -  file=DESCRIPTOR,
    -  containing_type=None,
    -  create_key=_descriptor._internal_create_key,
    -  fields=[
    -  ],
    -  extensions=[
    -  ],
    -  nested_types=[],
    -  enum_types=[
    -  ],
    -  serialized_options=None,
    -  is_extendable=False,
    -  syntax='proto3',
    -  extension_ranges=[],
    -  oneofs=[
    -  ],
    -  serialized_start=363,
    -  serialized_end=377,
    -)
    -
    -
    -_TRIGGERREQUEST = _descriptor.Descriptor(
    -  name='TriggerRequest',
    -  full_name='nitric.faas.v1.TriggerRequest',
    -  filename=None,
    -  file=DESCRIPTOR,
    -  containing_type=None,
    -  create_key=_descriptor._internal_create_key,
    -  fields=[
    -    _descriptor.FieldDescriptor(
    -      name='data', full_name='nitric.faas.v1.TriggerRequest.data', index=0,
    -      number=1, type=12, cpp_type=9, label=1,
    -      has_default_value=False, default_value=b"",
    -      message_type=None, enum_type=None, containing_type=None,
    -      is_extension=False, extension_scope=None,
    -      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
    -    _descriptor.FieldDescriptor(
    -      name='mime_type', full_name='nitric.faas.v1.TriggerRequest.mime_type', index=1,
    -      number=2, type=9, cpp_type=9, label=1,
    -      has_default_value=False, default_value=b"".decode('utf-8'),
    -      message_type=None, enum_type=None, containing_type=None,
    -      is_extension=False, extension_scope=None,
    -      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
    -    _descriptor.FieldDescriptor(
    -      name='http', full_name='nitric.faas.v1.TriggerRequest.http', index=2,
    -      number=3, type=11, cpp_type=10, label=1,
    -      has_default_value=False, default_value=None,
    -      message_type=None, enum_type=None, containing_type=None,
    -      is_extension=False, extension_scope=None,
    -      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
    -    _descriptor.FieldDescriptor(
    -      name='topic', full_name='nitric.faas.v1.TriggerRequest.topic', index=3,
    -      number=4, type=11, cpp_type=10, label=1,
    -      has_default_value=False, default_value=None,
    -      message_type=None, enum_type=None, containing_type=None,
    -      is_extension=False, extension_scope=None,
    -      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
    -  ],
    -  extensions=[
    -  ],
    -  nested_types=[],
    -  enum_types=[
    -  ],
    -  serialized_options=None,
    -  is_extendable=False,
    -  syntax='proto3',
    -  extension_ranges=[],
    -  oneofs=[
    -    _descriptor.OneofDescriptor(
    -      name='context', full_name='nitric.faas.v1.TriggerRequest.context',
    -      index=0, containing_type=None,
    -      create_key=_descriptor._internal_create_key,
    -    fields=[]),
    -  ],
    -  serialized_start=380,
    -  serialized_end=546,
    -)
    -
    -
    -_HTTPTRIGGERCONTEXT_HEADERSENTRY = _descriptor.Descriptor(
    -  name='HeadersEntry',
    -  full_name='nitric.faas.v1.HttpTriggerContext.HeadersEntry',
    -  filename=None,
    -  file=DESCRIPTOR,
    -  containing_type=None,
    -  create_key=_descriptor._internal_create_key,
    -  fields=[
    -    _descriptor.FieldDescriptor(
    -      name='key', full_name='nitric.faas.v1.HttpTriggerContext.HeadersEntry.key', index=0,
    -      number=1, type=9, cpp_type=9, label=1,
    -      has_default_value=False, default_value=b"".decode('utf-8'),
    -      message_type=None, enum_type=None, containing_type=None,
    -      is_extension=False, extension_scope=None,
    -      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
    -    _descriptor.FieldDescriptor(
    -      name='value', full_name='nitric.faas.v1.HttpTriggerContext.HeadersEntry.value', index=1,
    -      number=2, type=9, cpp_type=9, label=1,
    -      has_default_value=False, default_value=b"".decode('utf-8'),
    -      message_type=None, enum_type=None, containing_type=None,
    -      is_extension=False, extension_scope=None,
    -      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
    -  ],
    -  extensions=[
    -  ],
    -  nested_types=[],
    -  enum_types=[
    -  ],
    -  serialized_options=b'8\001',
    -  is_extendable=False,
    -  syntax='proto3',
    -  extension_ranges=[],
    -  oneofs=[
    -  ],
    -  serialized_start=742,
    -  serialized_end=788,
    -)
    -
    -_HTTPTRIGGERCONTEXT_QUERYPARAMSENTRY = _descriptor.Descriptor(
    -  name='QueryParamsEntry',
    -  full_name='nitric.faas.v1.HttpTriggerContext.QueryParamsEntry',
    -  filename=None,
    -  file=DESCRIPTOR,
    -  containing_type=None,
    -  create_key=_descriptor._internal_create_key,
    -  fields=[
    -    _descriptor.FieldDescriptor(
    -      name='key', full_name='nitric.faas.v1.HttpTriggerContext.QueryParamsEntry.key', index=0,
    -      number=1, type=9, cpp_type=9, label=1,
    -      has_default_value=False, default_value=b"".decode('utf-8'),
    -      message_type=None, enum_type=None, containing_type=None,
    -      is_extension=False, extension_scope=None,
    -      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
    -    _descriptor.FieldDescriptor(
    -      name='value', full_name='nitric.faas.v1.HttpTriggerContext.QueryParamsEntry.value', index=1,
    -      number=2, type=9, cpp_type=9, label=1,
    -      has_default_value=False, default_value=b"".decode('utf-8'),
    -      message_type=None, enum_type=None, containing_type=None,
    -      is_extension=False, extension_scope=None,
    -      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
    -  ],
    -  extensions=[
    -  ],
    -  nested_types=[],
    -  enum_types=[
    -  ],
    -  serialized_options=b'8\001',
    -  is_extendable=False,
    -  syntax='proto3',
    -  extension_ranges=[],
    -  oneofs=[
    -  ],
    -  serialized_start=790,
    -  serialized_end=840,
    -)
    -
    -_HTTPTRIGGERCONTEXT = _descriptor.Descriptor(
    -  name='HttpTriggerContext',
    -  full_name='nitric.faas.v1.HttpTriggerContext',
    -  filename=None,
    -  file=DESCRIPTOR,
    -  containing_type=None,
    -  create_key=_descriptor._internal_create_key,
    -  fields=[
    -    _descriptor.FieldDescriptor(
    -      name='method', full_name='nitric.faas.v1.HttpTriggerContext.method', index=0,
    -      number=1, type=9, cpp_type=9, label=1,
    -      has_default_value=False, default_value=b"".decode('utf-8'),
    -      message_type=None, enum_type=None, containing_type=None,
    -      is_extension=False, extension_scope=None,
    -      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
    -    _descriptor.FieldDescriptor(
    -      name='path', full_name='nitric.faas.v1.HttpTriggerContext.path', index=1,
    -      number=2, type=9, cpp_type=9, label=1,
    -      has_default_value=False, default_value=b"".decode('utf-8'),
    -      message_type=None, enum_type=None, containing_type=None,
    -      is_extension=False, extension_scope=None,
    -      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
    -    _descriptor.FieldDescriptor(
    -      name='headers', full_name='nitric.faas.v1.HttpTriggerContext.headers', index=2,
    -      number=3, type=11, cpp_type=10, label=3,
    -      has_default_value=False, default_value=[],
    -      message_type=None, enum_type=None, containing_type=None,
    -      is_extension=False, extension_scope=None,
    -      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
    -    _descriptor.FieldDescriptor(
    -      name='query_params', full_name='nitric.faas.v1.HttpTriggerContext.query_params', index=3,
    -      number=4, type=11, cpp_type=10, label=3,
    -      has_default_value=False, default_value=[],
    -      message_type=None, enum_type=None, containing_type=None,
    -      is_extension=False, extension_scope=None,
    -      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
    -  ],
    -  extensions=[
    -  ],
    -  nested_types=[_HTTPTRIGGERCONTEXT_HEADERSENTRY, _HTTPTRIGGERCONTEXT_QUERYPARAMSENTRY, ],
    -  enum_types=[
    -  ],
    -  serialized_options=None,
    -  is_extendable=False,
    -  syntax='proto3',
    -  extension_ranges=[],
    -  oneofs=[
    -  ],
    -  serialized_start=549,
    -  serialized_end=840,
    -)
    -
    -
    -_TOPICTRIGGERCONTEXT = _descriptor.Descriptor(
    -  name='TopicTriggerContext',
    -  full_name='nitric.faas.v1.TopicTriggerContext',
    -  filename=None,
    -  file=DESCRIPTOR,
    -  containing_type=None,
    -  create_key=_descriptor._internal_create_key,
    -  fields=[
    -    _descriptor.FieldDescriptor(
    -      name='topic', full_name='nitric.faas.v1.TopicTriggerContext.topic', index=0,
    -      number=1, type=9, cpp_type=9, label=1,
    -      has_default_value=False, default_value=b"".decode('utf-8'),
    -      message_type=None, enum_type=None, containing_type=None,
    -      is_extension=False, extension_scope=None,
    -      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
    -  ],
    -  extensions=[
    -  ],
    -  nested_types=[],
    -  enum_types=[
    -  ],
    -  serialized_options=None,
    -  is_extendable=False,
    -  syntax='proto3',
    -  extension_ranges=[],
    -  oneofs=[
    -  ],
    -  serialized_start=842,
    -  serialized_end=878,
    -)
    -
    -
    -_TRIGGERRESPONSE = _descriptor.Descriptor(
    -  name='TriggerResponse',
    -  full_name='nitric.faas.v1.TriggerResponse',
    -  filename=None,
    -  file=DESCRIPTOR,
    -  containing_type=None,
    -  create_key=_descriptor._internal_create_key,
    -  fields=[
    -    _descriptor.FieldDescriptor(
    -      name='data', full_name='nitric.faas.v1.TriggerResponse.data', index=0,
    -      number=1, type=12, cpp_type=9, label=1,
    -      has_default_value=False, default_value=b"",
    -      message_type=None, enum_type=None, containing_type=None,
    -      is_extension=False, extension_scope=None,
    -      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
    -    _descriptor.FieldDescriptor(
    -      name='http', full_name='nitric.faas.v1.TriggerResponse.http', index=1,
    -      number=10, type=11, cpp_type=10, label=1,
    -      has_default_value=False, default_value=None,
    -      message_type=None, enum_type=None, containing_type=None,
    -      is_extension=False, extension_scope=None,
    -      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
    -    _descriptor.FieldDescriptor(
    -      name='topic', full_name='nitric.faas.v1.TriggerResponse.topic', index=2,
    -      number=11, type=11, cpp_type=10, label=1,
    -      has_default_value=False, default_value=None,
    -      message_type=None, enum_type=None, containing_type=None,
    -      is_extension=False, extension_scope=None,
    -      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
    -  ],
    -  extensions=[
    -  ],
    -  nested_types=[],
    -  enum_types=[
    -  ],
    -  serialized_options=None,
    -  is_extendable=False,
    -  syntax='proto3',
    -  extension_ranges=[],
    -  oneofs=[
    -    _descriptor.OneofDescriptor(
    -      name='context', full_name='nitric.faas.v1.TriggerResponse.context',
    -      index=0, containing_type=None,
    -      create_key=_descriptor._internal_create_key,
    -    fields=[]),
    -  ],
    -  serialized_start=881,
    -  serialized_end=1031,
    -)
    -
    -
    -_HTTPRESPONSECONTEXT_HEADERSENTRY = _descriptor.Descriptor(
    -  name='HeadersEntry',
    -  full_name='nitric.faas.v1.HttpResponseContext.HeadersEntry',
    -  filename=None,
    -  file=DESCRIPTOR,
    -  containing_type=None,
    -  create_key=_descriptor._internal_create_key,
    -  fields=[
    -    _descriptor.FieldDescriptor(
    -      name='key', full_name='nitric.faas.v1.HttpResponseContext.HeadersEntry.key', index=0,
    -      number=1, type=9, cpp_type=9, label=1,
    -      has_default_value=False, default_value=b"".decode('utf-8'),
    -      message_type=None, enum_type=None, containing_type=None,
    -      is_extension=False, extension_scope=None,
    -      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
    -    _descriptor.FieldDescriptor(
    -      name='value', full_name='nitric.faas.v1.HttpResponseContext.HeadersEntry.value', index=1,
    -      number=2, type=9, cpp_type=9, label=1,
    -      has_default_value=False, default_value=b"".decode('utf-8'),
    -      message_type=None, enum_type=None, containing_type=None,
    -      is_extension=False, extension_scope=None,
    -      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
    -  ],
    -  extensions=[
    -  ],
    -  nested_types=[],
    -  enum_types=[
    -  ],
    -  serialized_options=b'8\001',
    -  is_extendable=False,
    -  syntax='proto3',
    -  extension_ranges=[],
    -  oneofs=[
    -  ],
    -  serialized_start=742,
    -  serialized_end=788,
    -)
    -
    -_HTTPRESPONSECONTEXT = _descriptor.Descriptor(
    -  name='HttpResponseContext',
    -  full_name='nitric.faas.v1.HttpResponseContext',
    -  filename=None,
    -  file=DESCRIPTOR,
    -  containing_type=None,
    -  create_key=_descriptor._internal_create_key,
    -  fields=[
    -    _descriptor.FieldDescriptor(
    -      name='headers', full_name='nitric.faas.v1.HttpResponseContext.headers', index=0,
    -      number=1, type=11, cpp_type=10, label=3,
    -      has_default_value=False, default_value=[],
    -      message_type=None, enum_type=None, containing_type=None,
    -      is_extension=False, extension_scope=None,
    -      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
    -    _descriptor.FieldDescriptor(
    -      name='status', full_name='nitric.faas.v1.HttpResponseContext.status', index=1,
    -      number=2, type=5, cpp_type=1, label=1,
    -      has_default_value=False, default_value=0,
    -      message_type=None, enum_type=None, containing_type=None,
    -      is_extension=False, extension_scope=None,
    -      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
    -  ],
    -  extensions=[
    -  ],
    -  nested_types=[_HTTPRESPONSECONTEXT_HEADERSENTRY, ],
    -  enum_types=[
    -  ],
    -  serialized_options=None,
    -  is_extendable=False,
    -  syntax='proto3',
    -  extension_ranges=[],
    -  oneofs=[
    -  ],
    -  serialized_start=1034,
    -  serialized_end=1186,
    -)
    -
    -
    -_TOPICRESPONSECONTEXT = _descriptor.Descriptor(
    -  name='TopicResponseContext',
    -  full_name='nitric.faas.v1.TopicResponseContext',
    -  filename=None,
    -  file=DESCRIPTOR,
    -  containing_type=None,
    -  create_key=_descriptor._internal_create_key,
    -  fields=[
    -    _descriptor.FieldDescriptor(
    -      name='success', full_name='nitric.faas.v1.TopicResponseContext.success', index=0,
    -      number=1, type=8, cpp_type=7, label=1,
    -      has_default_value=False, default_value=False,
    -      message_type=None, enum_type=None, containing_type=None,
    -      is_extension=False, extension_scope=None,
    -      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
    -  ],
    -  extensions=[
    -  ],
    -  nested_types=[],
    -  enum_types=[
    -  ],
    -  serialized_options=None,
    -  is_extendable=False,
    -  syntax='proto3',
    -  extension_ranges=[],
    -  oneofs=[
    -  ],
    -  serialized_start=1188,
    -  serialized_end=1227,
    -)
    -
    -_CLIENTMESSAGE.fields_by_name['init_request'].message_type = _INITREQUEST
    -_CLIENTMESSAGE.fields_by_name['trigger_response'].message_type = _TRIGGERRESPONSE
    -_CLIENTMESSAGE.oneofs_by_name['content'].fields.append(
    -  _CLIENTMESSAGE.fields_by_name['init_request'])
    -_CLIENTMESSAGE.fields_by_name['init_request'].containing_oneof = _CLIENTMESSAGE.oneofs_by_name['content']
    -_CLIENTMESSAGE.oneofs_by_name['content'].fields.append(
    -  _CLIENTMESSAGE.fields_by_name['trigger_response'])
    -_CLIENTMESSAGE.fields_by_name['trigger_response'].containing_oneof = _CLIENTMESSAGE.oneofs_by_name['content']
    -_SERVERMESSAGE.fields_by_name['init_response'].message_type = _INITRESPONSE
    -_SERVERMESSAGE.fields_by_name['trigger_request'].message_type = _TRIGGERREQUEST
    -_SERVERMESSAGE.oneofs_by_name['content'].fields.append(
    -  _SERVERMESSAGE.fields_by_name['init_response'])
    -_SERVERMESSAGE.fields_by_name['init_response'].containing_oneof = _SERVERMESSAGE.oneofs_by_name['content']
    -_SERVERMESSAGE.oneofs_by_name['content'].fields.append(
    -  _SERVERMESSAGE.fields_by_name['trigger_request'])
    -_SERVERMESSAGE.fields_by_name['trigger_request'].containing_oneof = _SERVERMESSAGE.oneofs_by_name['content']
    -_TRIGGERREQUEST.fields_by_name['http'].message_type = _HTTPTRIGGERCONTEXT
    -_TRIGGERREQUEST.fields_by_name['topic'].message_type = _TOPICTRIGGERCONTEXT
    -_TRIGGERREQUEST.oneofs_by_name['context'].fields.append(
    -  _TRIGGERREQUEST.fields_by_name['http'])
    -_TRIGGERREQUEST.fields_by_name['http'].containing_oneof = _TRIGGERREQUEST.oneofs_by_name['context']
    -_TRIGGERREQUEST.oneofs_by_name['context'].fields.append(
    -  _TRIGGERREQUEST.fields_by_name['topic'])
    -_TRIGGERREQUEST.fields_by_name['topic'].containing_oneof = _TRIGGERREQUEST.oneofs_by_name['context']
    -_HTTPTRIGGERCONTEXT_HEADERSENTRY.containing_type = _HTTPTRIGGERCONTEXT
    -_HTTPTRIGGERCONTEXT_QUERYPARAMSENTRY.containing_type = _HTTPTRIGGERCONTEXT
    -_HTTPTRIGGERCONTEXT.fields_by_name['headers'].message_type = _HTTPTRIGGERCONTEXT_HEADERSENTRY
    -_HTTPTRIGGERCONTEXT.fields_by_name['query_params'].message_type = _HTTPTRIGGERCONTEXT_QUERYPARAMSENTRY
    -_TRIGGERRESPONSE.fields_by_name['http'].message_type = _HTTPRESPONSECONTEXT
    -_TRIGGERRESPONSE.fields_by_name['topic'].message_type = _TOPICRESPONSECONTEXT
    -_TRIGGERRESPONSE.oneofs_by_name['context'].fields.append(
    -  _TRIGGERRESPONSE.fields_by_name['http'])
    -_TRIGGERRESPONSE.fields_by_name['http'].containing_oneof = _TRIGGERRESPONSE.oneofs_by_name['context']
    -_TRIGGERRESPONSE.oneofs_by_name['context'].fields.append(
    -  _TRIGGERRESPONSE.fields_by_name['topic'])
    -_TRIGGERRESPONSE.fields_by_name['topic'].containing_oneof = _TRIGGERRESPONSE.oneofs_by_name['context']
    -_HTTPRESPONSECONTEXT_HEADERSENTRY.containing_type = _HTTPRESPONSECONTEXT
    -_HTTPRESPONSECONTEXT.fields_by_name['headers'].message_type = _HTTPRESPONSECONTEXT_HEADERSENTRY
    -DESCRIPTOR.message_types_by_name['ClientMessage'] = _CLIENTMESSAGE
    -DESCRIPTOR.message_types_by_name['ServerMessage'] = _SERVERMESSAGE
    -DESCRIPTOR.message_types_by_name['InitRequest'] = _INITREQUEST
    -DESCRIPTOR.message_types_by_name['InitResponse'] = _INITRESPONSE
    -DESCRIPTOR.message_types_by_name['TriggerRequest'] = _TRIGGERREQUEST
    -DESCRIPTOR.message_types_by_name['HttpTriggerContext'] = _HTTPTRIGGERCONTEXT
    -DESCRIPTOR.message_types_by_name['TopicTriggerContext'] = _TOPICTRIGGERCONTEXT
    -DESCRIPTOR.message_types_by_name['TriggerResponse'] = _TRIGGERRESPONSE
    -DESCRIPTOR.message_types_by_name['HttpResponseContext'] = _HTTPRESPONSECONTEXT
    -DESCRIPTOR.message_types_by_name['TopicResponseContext'] = _TOPICRESPONSECONTEXT
    -_sym_db.RegisterFileDescriptor(DESCRIPTOR)
    -
    -ClientMessage = _reflection.GeneratedProtocolMessageType('ClientMessage', (_message.Message,), {
    -  'DESCRIPTOR' : _CLIENTMESSAGE,
    -  '__module__' : 'faas.v1.faas_pb2'
    -  # @@protoc_insertion_point(class_scope:nitric.faas.v1.ClientMessage)
    -  })
    -_sym_db.RegisterMessage(ClientMessage)
    -
    -ServerMessage = _reflection.GeneratedProtocolMessageType('ServerMessage', (_message.Message,), {
    -  'DESCRIPTOR' : _SERVERMESSAGE,
    -  '__module__' : 'faas.v1.faas_pb2'
    -  # @@protoc_insertion_point(class_scope:nitric.faas.v1.ServerMessage)
    -  })
    -_sym_db.RegisterMessage(ServerMessage)
    -
    -InitRequest = _reflection.GeneratedProtocolMessageType('InitRequest', (_message.Message,), {
    -  'DESCRIPTOR' : _INITREQUEST,
    -  '__module__' : 'faas.v1.faas_pb2'
    -  # @@protoc_insertion_point(class_scope:nitric.faas.v1.InitRequest)
    -  })
    -_sym_db.RegisterMessage(InitRequest)
    -
    -InitResponse = _reflection.GeneratedProtocolMessageType('InitResponse', (_message.Message,), {
    -  'DESCRIPTOR' : _INITRESPONSE,
    -  '__module__' : 'faas.v1.faas_pb2'
    -  # @@protoc_insertion_point(class_scope:nitric.faas.v1.InitResponse)
    -  })
    -_sym_db.RegisterMessage(InitResponse)
    -
    -TriggerRequest = _reflection.GeneratedProtocolMessageType('TriggerRequest', (_message.Message,), {
    -  'DESCRIPTOR' : _TRIGGERREQUEST,
    -  '__module__' : 'faas.v1.faas_pb2'
    -  # @@protoc_insertion_point(class_scope:nitric.faas.v1.TriggerRequest)
    -  })
    -_sym_db.RegisterMessage(TriggerRequest)
    -
    -HttpTriggerContext = _reflection.GeneratedProtocolMessageType('HttpTriggerContext', (_message.Message,), {
    -
    -  'HeadersEntry' : _reflection.GeneratedProtocolMessageType('HeadersEntry', (_message.Message,), {
    -    'DESCRIPTOR' : _HTTPTRIGGERCONTEXT_HEADERSENTRY,
    -    '__module__' : 'faas.v1.faas_pb2'
    -    # @@protoc_insertion_point(class_scope:nitric.faas.v1.HttpTriggerContext.HeadersEntry)
    -    })
    -  ,
    -
    -  'QueryParamsEntry' : _reflection.GeneratedProtocolMessageType('QueryParamsEntry', (_message.Message,), {
    -    'DESCRIPTOR' : _HTTPTRIGGERCONTEXT_QUERYPARAMSENTRY,
    -    '__module__' : 'faas.v1.faas_pb2'
    -    # @@protoc_insertion_point(class_scope:nitric.faas.v1.HttpTriggerContext.QueryParamsEntry)
    -    })
    -  ,
    -  'DESCRIPTOR' : _HTTPTRIGGERCONTEXT,
    -  '__module__' : 'faas.v1.faas_pb2'
    -  # @@protoc_insertion_point(class_scope:nitric.faas.v1.HttpTriggerContext)
    -  })
    -_sym_db.RegisterMessage(HttpTriggerContext)
    -_sym_db.RegisterMessage(HttpTriggerContext.HeadersEntry)
    -_sym_db.RegisterMessage(HttpTriggerContext.QueryParamsEntry)
    -
    -TopicTriggerContext = _reflection.GeneratedProtocolMessageType('TopicTriggerContext', (_message.Message,), {
    -  'DESCRIPTOR' : _TOPICTRIGGERCONTEXT,
    -  '__module__' : 'faas.v1.faas_pb2'
    -  # @@protoc_insertion_point(class_scope:nitric.faas.v1.TopicTriggerContext)
    -  })
    -_sym_db.RegisterMessage(TopicTriggerContext)
    -
    -TriggerResponse = _reflection.GeneratedProtocolMessageType('TriggerResponse', (_message.Message,), {
    -  'DESCRIPTOR' : _TRIGGERRESPONSE,
    -  '__module__' : 'faas.v1.faas_pb2'
    -  # @@protoc_insertion_point(class_scope:nitric.faas.v1.TriggerResponse)
    -  })
    -_sym_db.RegisterMessage(TriggerResponse)
    -
    -HttpResponseContext = _reflection.GeneratedProtocolMessageType('HttpResponseContext', (_message.Message,), {
    -
    -  'HeadersEntry' : _reflection.GeneratedProtocolMessageType('HeadersEntry', (_message.Message,), {
    -    'DESCRIPTOR' : _HTTPRESPONSECONTEXT_HEADERSENTRY,
    -    '__module__' : 'faas.v1.faas_pb2'
    -    # @@protoc_insertion_point(class_scope:nitric.faas.v1.HttpResponseContext.HeadersEntry)
    -    })
    -  ,
    -  'DESCRIPTOR' : _HTTPRESPONSECONTEXT,
    -  '__module__' : 'faas.v1.faas_pb2'
    -  # @@protoc_insertion_point(class_scope:nitric.faas.v1.HttpResponseContext)
    -  })
    -_sym_db.RegisterMessage(HttpResponseContext)
    -_sym_db.RegisterMessage(HttpResponseContext.HeadersEntry)
    -
    -TopicResponseContext = _reflection.GeneratedProtocolMessageType('TopicResponseContext', (_message.Message,), {
    -  'DESCRIPTOR' : _TOPICRESPONSECONTEXT,
    -  '__module__' : 'faas.v1.faas_pb2'
    -  # @@protoc_insertion_point(class_scope:nitric.faas.v1.TopicResponseContext)
    -  })
    -_sym_db.RegisterMessage(TopicResponseContext)
    -
    -
    -DESCRIPTOR._options = None
    -_HTTPTRIGGERCONTEXT_HEADERSENTRY._options = None
    -_HTTPTRIGGERCONTEXT_QUERYPARAMSENTRY._options = None
    -_HTTPRESPONSECONTEXT_HEADERSENTRY._options = None
    -
    -_FAAS = _descriptor.ServiceDescriptor(
    -  name='Faas',
    -  full_name='nitric.faas.v1.Faas',
    -  file=DESCRIPTOR,
    -  index=0,
    -  serialized_options=None,
    -  create_key=_descriptor._internal_create_key,
    -  serialized_start=1229,
    -  serialized_end=1318,
    -  methods=[
    -  _descriptor.MethodDescriptor(
    -    name='TriggerStream',
    -    full_name='nitric.faas.v1.Faas.TriggerStream',
    -    index=0,
    -    containing_service=None,
    -    input_type=_CLIENTMESSAGE,
    -    output_type=_SERVERMESSAGE,
    -    serialized_options=None,
    -    create_key=_descriptor._internal_create_key,
    -  ),
    -])
    -_sym_db.RegisterServiceDescriptor(_FAAS)
    -
    -DESCRIPTOR.services_by_name['Faas'] = _FAAS
    -
    -# @@protoc_insertion_point(module_scope)
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -

    Classes

    -
    -
    -class ClientMessage -(*args, **kwargs) -
    -
    -

    A ProtocolMessage

    -

    Ancestors

    -
      -
    • google.protobuf.pyext._message.CMessage
    • -
    • google.protobuf.message.Message
    • -
    -

    Class variables

    -
    -
    var DESCRIPTOR
    -
    -
    -
    -
    -

    Instance variables

    -
    -
    var id
    -
    -

    Field nitric.faas.v1.ClientMessage.id

    -
    -
    var init_request
    -
    -

    Field nitric.faas.v1.ClientMessage.init_request

    -
    -
    var trigger_response
    -
    -

    Field nitric.faas.v1.ClientMessage.trigger_response

    -
    -
    -
    -
    -class HttpResponseContext -(*args, **kwargs) -
    -
    -

    A ProtocolMessage

    -

    Ancestors

    -
      -
    • google.protobuf.pyext._message.CMessage
    • -
    • google.protobuf.message.Message
    • -
    -

    Class variables

    -
    -
    var DESCRIPTOR
    -
    -
    -
    -
    var HeadersEntry
    -
    -

    A ProtocolMessage

    -
    -
    -

    Instance variables

    -
    -
    var headers
    -
    -

    Field nitric.faas.v1.HttpResponseContext.headers

    -
    -
    var status
    -
    -

    Field nitric.faas.v1.HttpResponseContext.status

    -
    -
    -
    -
    -class HttpTriggerContext -(*args, **kwargs) -
    -
    -

    A ProtocolMessage

    -

    Ancestors

    -
      -
    • google.protobuf.pyext._message.CMessage
    • -
    • google.protobuf.message.Message
    • -
    -

    Class variables

    -
    -
    var DESCRIPTOR
    -
    -
    -
    -
    var HeadersEntry
    -
    -

    A ProtocolMessage

    -
    -
    var QueryParamsEntry
    -
    -

    A ProtocolMessage

    -
    -
    -

    Instance variables

    -
    -
    var headers
    -
    -

    Field nitric.faas.v1.HttpTriggerContext.headers

    -
    -
    var method
    -
    -

    Field nitric.faas.v1.HttpTriggerContext.method

    -
    -
    var path
    -
    -

    Field nitric.faas.v1.HttpTriggerContext.path

    -
    -
    var query_params
    -
    -

    Field nitric.faas.v1.HttpTriggerContext.query_params

    -
    -
    -
    -
    -class InitRequest -(*args, **kwargs) -
    -
    -

    A ProtocolMessage

    -

    Ancestors

    -
      -
    • google.protobuf.pyext._message.CMessage
    • -
    • google.protobuf.message.Message
    • -
    -

    Class variables

    -
    -
    var DESCRIPTOR
    -
    -
    -
    -
    -
    -
    -class InitResponse -(*args, **kwargs) -
    -
    -

    A ProtocolMessage

    -

    Ancestors

    -
      -
    • google.protobuf.pyext._message.CMessage
    • -
    • google.protobuf.message.Message
    • -
    -

    Class variables

    -
    -
    var DESCRIPTOR
    -
    -
    -
    -
    -
    -
    -class ServerMessage -(*args, **kwargs) -
    -
    -

    A ProtocolMessage

    -

    Ancestors

    -
      -
    • google.protobuf.pyext._message.CMessage
    • -
    • google.protobuf.message.Message
    • -
    -

    Class variables

    -
    -
    var DESCRIPTOR
    -
    -
    -
    -
    -

    Instance variables

    -
    -
    var id
    -
    -

    Field nitric.faas.v1.ServerMessage.id

    -
    -
    var init_response
    -
    -

    Field nitric.faas.v1.ServerMessage.init_response

    -
    -
    var trigger_request
    -
    -

    Field nitric.faas.v1.ServerMessage.trigger_request

    -
    -
    -
    -
    -class TopicResponseContext -(*args, **kwargs) -
    -
    -

    A ProtocolMessage

    -

    Ancestors

    -
      -
    • google.protobuf.pyext._message.CMessage
    • -
    • google.protobuf.message.Message
    • -
    -

    Class variables

    -
    -
    var DESCRIPTOR
    -
    -
    -
    -
    -

    Instance variables

    -
    -
    var success
    -
    -

    Field nitric.faas.v1.TopicResponseContext.success

    -
    -
    -
    -
    -class TopicTriggerContext -(*args, **kwargs) -
    -
    -

    A ProtocolMessage

    -

    Ancestors

    -
      -
    • google.protobuf.pyext._message.CMessage
    • -
    • google.protobuf.message.Message
    • -
    -

    Class variables

    -
    -
    var DESCRIPTOR
    -
    -
    -
    -
    -

    Instance variables

    -
    -
    var topic
    -
    -

    Field nitric.faas.v1.TopicTriggerContext.topic

    -
    -
    -
    -
    -class TriggerRequest -(*args, **kwargs) -
    -
    -

    A ProtocolMessage

    -

    Ancestors

    -
      -
    • google.protobuf.pyext._message.CMessage
    • -
    • google.protobuf.message.Message
    • -
    -

    Class variables

    -
    -
    var DESCRIPTOR
    -
    -
    -
    -
    -

    Instance variables

    -
    -
    var data
    -
    -

    Field nitric.faas.v1.TriggerRequest.data

    -
    -
    var http
    -
    -

    Field nitric.faas.v1.TriggerRequest.http

    -
    -
    var mime_type
    -
    -

    Field nitric.faas.v1.TriggerRequest.mime_type

    -
    -
    var topic
    -
    -

    Field nitric.faas.v1.TriggerRequest.topic

    -
    -
    -
    -
    -class TriggerResponse -(*args, **kwargs) -
    -
    -

    A ProtocolMessage

    -

    Ancestors

    -
      -
    • google.protobuf.pyext._message.CMessage
    • -
    • google.protobuf.message.Message
    • -
    -

    Class variables

    -
    -
    var DESCRIPTOR
    -
    -
    -
    -
    -

    Instance variables

    -
    -
    var data
    -
    -

    Field nitric.faas.v1.TriggerResponse.data

    -
    -
    var http
    -
    -

    Field nitric.faas.v1.TriggerResponse.http

    -
    -
    var topic
    -
    -

    Field nitric.faas.v1.TriggerResponse.topic

    -
    -
    -
    -
    -
    -
    - -
    - - - \ No newline at end of file diff --git a/docs/nitric/proto/kv/v1/kv_pb2.html b/docs/nitric/proto/kv/v1/kv_pb2.html deleted file mode 100644 index 4bac26b..0000000 --- a/docs/nitric/proto/kv/v1/kv_pb2.html +++ /dev/null @@ -1,622 +0,0 @@ - - - - - - -nitric.proto.kv.v1.kv_pb2 API documentation - - - - - - - - - - - -
    -
    -
    -

    Module nitric.proto.kv.v1.kv_pb2

    -
    -
    -

    Generated protocol buffer code.

    -
    - -Expand source code - -
    # -*- coding: utf-8 -*-
    -#
    -# Copyright (c) 2021 Nitric Technologies Pty Ltd.
    -#
    -# This file is part of Nitric Python 3 SDK.
    -# See https://github.com/nitrictech/python-sdk for further info.
    -#
    -# Licensed under the Apache License, Version 2.0 (the "License");
    -# you may not use this file except in compliance with the License.
    -# You may obtain a copy of the License at
    -#
    -#     http://www.apache.org/licenses/LICENSE-2.0
    -#
    -# Unless required by applicable law or agreed to in writing, software
    -# distributed under the License is distributed on an "AS IS" BASIS,
    -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    -# See the License for the specific language governing permissions and
    -# limitations under the License.
    -#
    -
    -# Generated by the protocol buffer compiler.  DO NOT EDIT!
    -# source: kv/v1/kv.proto
    -"""Generated protocol buffer code."""
    -from google.protobuf import descriptor as _descriptor
    -from google.protobuf import message as _message
    -from google.protobuf import reflection as _reflection
    -from google.protobuf import symbol_database as _symbol_database
    -# @@protoc_insertion_point(imports)
    -
    -_sym_db = _symbol_database.Default()
    -
    -
    -from google.protobuf import struct_pb2 as google_dot_protobuf_dot_struct__pb2
    -
    -
    -DESCRIPTOR = _descriptor.FileDescriptor(
    -  name='kv/v1/kv.proto',
    -  package='nitric.kv.v1',
    -  syntax='proto3',
    -  serialized_options=b'\n\025io.nitric.proto.kv.v1B\tKeyValuesP\001Z\014nitric/v1;v1\252\002\030Nitric.Proto.KeyValue.v1\312\002\030Nitric\\Proto\\KeyValue\\V1',
    -  create_key=_descriptor._internal_create_key,
    -  serialized_pb=b'\n\x0ekv/v1/kv.proto\x12\x0cnitric.kv.v1\x1a\x1cgoogle/protobuf/struct.proto\"5\n\x12KeyValueGetRequest\x12\x12\n\ncollection\x18\x01 \x01(\t\x12\x0b\n\x03key\x18\x02 \x01(\t\"=\n\x13KeyValueGetResponse\x12&\n\x05value\x18\x01 \x01(\x0b\x32\x17.google.protobuf.Struct\"]\n\x12KeyValuePutRequest\x12\x12\n\ncollection\x18\x01 \x01(\t\x12\x0b\n\x03key\x18\x02 \x01(\t\x12&\n\x05value\x18\x03 \x01(\x0b\x32\x17.google.protobuf.Struct\"\x15\n\x13KeyValuePutResponse\"8\n\x15KeyValueDeleteRequest\x12\x12\n\ncollection\x18\x01 \x01(\t\x12\x0b\n\x03key\x18\x02 \x01(\t\"\x18\n\x16KeyValueDeleteResponse2\xf7\x01\n\x08KeyValue\x12J\n\x03Get\x12 .nitric.kv.v1.KeyValueGetRequest\x1a!.nitric.kv.v1.KeyValueGetResponse\x12J\n\x03Put\x12 .nitric.kv.v1.KeyValuePutRequest\x1a!.nitric.kv.v1.KeyValuePutResponse\x12S\n\x06\x44\x65lete\x12#.nitric.kv.v1.KeyValueDeleteRequest\x1a$.nitric.kv.v1.KeyValueDeleteResponseBh\n\x15io.nitric.proto.kv.v1B\tKeyValuesP\x01Z\x0cnitric/v1;v1\xaa\x02\x18Nitric.Proto.KeyValue.v1\xca\x02\x18Nitric\\Proto\\KeyValue\\V1b\x06proto3'
    -  ,
    -  dependencies=[google_dot_protobuf_dot_struct__pb2.DESCRIPTOR,])
    -
    -
    -
    -
    -_KEYVALUEGETREQUEST = _descriptor.Descriptor(
    -  name='KeyValueGetRequest',
    -  full_name='nitric.kv.v1.KeyValueGetRequest',
    -  filename=None,
    -  file=DESCRIPTOR,
    -  containing_type=None,
    -  create_key=_descriptor._internal_create_key,
    -  fields=[
    -    _descriptor.FieldDescriptor(
    -      name='collection', full_name='nitric.kv.v1.KeyValueGetRequest.collection', index=0,
    -      number=1, type=9, cpp_type=9, label=1,
    -      has_default_value=False, default_value=b"".decode('utf-8'),
    -      message_type=None, enum_type=None, containing_type=None,
    -      is_extension=False, extension_scope=None,
    -      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
    -    _descriptor.FieldDescriptor(
    -      name='key', full_name='nitric.kv.v1.KeyValueGetRequest.key', index=1,
    -      number=2, type=9, cpp_type=9, label=1,
    -      has_default_value=False, default_value=b"".decode('utf-8'),
    -      message_type=None, enum_type=None, containing_type=None,
    -      is_extension=False, extension_scope=None,
    -      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
    -  ],
    -  extensions=[
    -  ],
    -  nested_types=[],
    -  enum_types=[
    -  ],
    -  serialized_options=None,
    -  is_extendable=False,
    -  syntax='proto3',
    -  extension_ranges=[],
    -  oneofs=[
    -  ],
    -  serialized_start=62,
    -  serialized_end=115,
    -)
    -
    -
    -_KEYVALUEGETRESPONSE = _descriptor.Descriptor(
    -  name='KeyValueGetResponse',
    -  full_name='nitric.kv.v1.KeyValueGetResponse',
    -  filename=None,
    -  file=DESCRIPTOR,
    -  containing_type=None,
    -  create_key=_descriptor._internal_create_key,
    -  fields=[
    -    _descriptor.FieldDescriptor(
    -      name='value', full_name='nitric.kv.v1.KeyValueGetResponse.value', index=0,
    -      number=1, type=11, cpp_type=10, label=1,
    -      has_default_value=False, default_value=None,
    -      message_type=None, enum_type=None, containing_type=None,
    -      is_extension=False, extension_scope=None,
    -      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
    -  ],
    -  extensions=[
    -  ],
    -  nested_types=[],
    -  enum_types=[
    -  ],
    -  serialized_options=None,
    -  is_extendable=False,
    -  syntax='proto3',
    -  extension_ranges=[],
    -  oneofs=[
    -  ],
    -  serialized_start=117,
    -  serialized_end=178,
    -)
    -
    -
    -_KEYVALUEPUTREQUEST = _descriptor.Descriptor(
    -  name='KeyValuePutRequest',
    -  full_name='nitric.kv.v1.KeyValuePutRequest',
    -  filename=None,
    -  file=DESCRIPTOR,
    -  containing_type=None,
    -  create_key=_descriptor._internal_create_key,
    -  fields=[
    -    _descriptor.FieldDescriptor(
    -      name='collection', full_name='nitric.kv.v1.KeyValuePutRequest.collection', index=0,
    -      number=1, type=9, cpp_type=9, label=1,
    -      has_default_value=False, default_value=b"".decode('utf-8'),
    -      message_type=None, enum_type=None, containing_type=None,
    -      is_extension=False, extension_scope=None,
    -      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
    -    _descriptor.FieldDescriptor(
    -      name='key', full_name='nitric.kv.v1.KeyValuePutRequest.key', index=1,
    -      number=2, type=9, cpp_type=9, label=1,
    -      has_default_value=False, default_value=b"".decode('utf-8'),
    -      message_type=None, enum_type=None, containing_type=None,
    -      is_extension=False, extension_scope=None,
    -      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
    -    _descriptor.FieldDescriptor(
    -      name='value', full_name='nitric.kv.v1.KeyValuePutRequest.value', index=2,
    -      number=3, type=11, cpp_type=10, label=1,
    -      has_default_value=False, default_value=None,
    -      message_type=None, enum_type=None, containing_type=None,
    -      is_extension=False, extension_scope=None,
    -      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
    -  ],
    -  extensions=[
    -  ],
    -  nested_types=[],
    -  enum_types=[
    -  ],
    -  serialized_options=None,
    -  is_extendable=False,
    -  syntax='proto3',
    -  extension_ranges=[],
    -  oneofs=[
    -  ],
    -  serialized_start=180,
    -  serialized_end=273,
    -)
    -
    -
    -_KEYVALUEPUTRESPONSE = _descriptor.Descriptor(
    -  name='KeyValuePutResponse',
    -  full_name='nitric.kv.v1.KeyValuePutResponse',
    -  filename=None,
    -  file=DESCRIPTOR,
    -  containing_type=None,
    -  create_key=_descriptor._internal_create_key,
    -  fields=[
    -  ],
    -  extensions=[
    -  ],
    -  nested_types=[],
    -  enum_types=[
    -  ],
    -  serialized_options=None,
    -  is_extendable=False,
    -  syntax='proto3',
    -  extension_ranges=[],
    -  oneofs=[
    -  ],
    -  serialized_start=275,
    -  serialized_end=296,
    -)
    -
    -
    -_KEYVALUEDELETEREQUEST = _descriptor.Descriptor(
    -  name='KeyValueDeleteRequest',
    -  full_name='nitric.kv.v1.KeyValueDeleteRequest',
    -  filename=None,
    -  file=DESCRIPTOR,
    -  containing_type=None,
    -  create_key=_descriptor._internal_create_key,
    -  fields=[
    -    _descriptor.FieldDescriptor(
    -      name='collection', full_name='nitric.kv.v1.KeyValueDeleteRequest.collection', index=0,
    -      number=1, type=9, cpp_type=9, label=1,
    -      has_default_value=False, default_value=b"".decode('utf-8'),
    -      message_type=None, enum_type=None, containing_type=None,
    -      is_extension=False, extension_scope=None,
    -      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
    -    _descriptor.FieldDescriptor(
    -      name='key', full_name='nitric.kv.v1.KeyValueDeleteRequest.key', index=1,
    -      number=2, type=9, cpp_type=9, label=1,
    -      has_default_value=False, default_value=b"".decode('utf-8'),
    -      message_type=None, enum_type=None, containing_type=None,
    -      is_extension=False, extension_scope=None,
    -      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
    -  ],
    -  extensions=[
    -  ],
    -  nested_types=[],
    -  enum_types=[
    -  ],
    -  serialized_options=None,
    -  is_extendable=False,
    -  syntax='proto3',
    -  extension_ranges=[],
    -  oneofs=[
    -  ],
    -  serialized_start=298,
    -  serialized_end=354,
    -)
    -
    -
    -_KEYVALUEDELETERESPONSE = _descriptor.Descriptor(
    -  name='KeyValueDeleteResponse',
    -  full_name='nitric.kv.v1.KeyValueDeleteResponse',
    -  filename=None,
    -  file=DESCRIPTOR,
    -  containing_type=None,
    -  create_key=_descriptor._internal_create_key,
    -  fields=[
    -  ],
    -  extensions=[
    -  ],
    -  nested_types=[],
    -  enum_types=[
    -  ],
    -  serialized_options=None,
    -  is_extendable=False,
    -  syntax='proto3',
    -  extension_ranges=[],
    -  oneofs=[
    -  ],
    -  serialized_start=356,
    -  serialized_end=380,
    -)
    -
    -_KEYVALUEGETRESPONSE.fields_by_name['value'].message_type = google_dot_protobuf_dot_struct__pb2._STRUCT
    -_KEYVALUEPUTREQUEST.fields_by_name['value'].message_type = google_dot_protobuf_dot_struct__pb2._STRUCT
    -DESCRIPTOR.message_types_by_name['KeyValueGetRequest'] = _KEYVALUEGETREQUEST
    -DESCRIPTOR.message_types_by_name['KeyValueGetResponse'] = _KEYVALUEGETRESPONSE
    -DESCRIPTOR.message_types_by_name['KeyValuePutRequest'] = _KEYVALUEPUTREQUEST
    -DESCRIPTOR.message_types_by_name['KeyValuePutResponse'] = _KEYVALUEPUTRESPONSE
    -DESCRIPTOR.message_types_by_name['KeyValueDeleteRequest'] = _KEYVALUEDELETEREQUEST
    -DESCRIPTOR.message_types_by_name['KeyValueDeleteResponse'] = _KEYVALUEDELETERESPONSE
    -_sym_db.RegisterFileDescriptor(DESCRIPTOR)
    -
    -KeyValueGetRequest = _reflection.GeneratedProtocolMessageType('KeyValueGetRequest', (_message.Message,), {
    -  'DESCRIPTOR' : _KEYVALUEGETREQUEST,
    -  '__module__' : 'kv.v1.kv_pb2'
    -  # @@protoc_insertion_point(class_scope:nitric.kv.v1.KeyValueGetRequest)
    -  })
    -_sym_db.RegisterMessage(KeyValueGetRequest)
    -
    -KeyValueGetResponse = _reflection.GeneratedProtocolMessageType('KeyValueGetResponse', (_message.Message,), {
    -  'DESCRIPTOR' : _KEYVALUEGETRESPONSE,
    -  '__module__' : 'kv.v1.kv_pb2'
    -  # @@protoc_insertion_point(class_scope:nitric.kv.v1.KeyValueGetResponse)
    -  })
    -_sym_db.RegisterMessage(KeyValueGetResponse)
    -
    -KeyValuePutRequest = _reflection.GeneratedProtocolMessageType('KeyValuePutRequest', (_message.Message,), {
    -  'DESCRIPTOR' : _KEYVALUEPUTREQUEST,
    -  '__module__' : 'kv.v1.kv_pb2'
    -  # @@protoc_insertion_point(class_scope:nitric.kv.v1.KeyValuePutRequest)
    -  })
    -_sym_db.RegisterMessage(KeyValuePutRequest)
    -
    -KeyValuePutResponse = _reflection.GeneratedProtocolMessageType('KeyValuePutResponse', (_message.Message,), {
    -  'DESCRIPTOR' : _KEYVALUEPUTRESPONSE,
    -  '__module__' : 'kv.v1.kv_pb2'
    -  # @@protoc_insertion_point(class_scope:nitric.kv.v1.KeyValuePutResponse)
    -  })
    -_sym_db.RegisterMessage(KeyValuePutResponse)
    -
    -KeyValueDeleteRequest = _reflection.GeneratedProtocolMessageType('KeyValueDeleteRequest', (_message.Message,), {
    -  'DESCRIPTOR' : _KEYVALUEDELETEREQUEST,
    -  '__module__' : 'kv.v1.kv_pb2'
    -  # @@protoc_insertion_point(class_scope:nitric.kv.v1.KeyValueDeleteRequest)
    -  })
    -_sym_db.RegisterMessage(KeyValueDeleteRequest)
    -
    -KeyValueDeleteResponse = _reflection.GeneratedProtocolMessageType('KeyValueDeleteResponse', (_message.Message,), {
    -  'DESCRIPTOR' : _KEYVALUEDELETERESPONSE,
    -  '__module__' : 'kv.v1.kv_pb2'
    -  # @@protoc_insertion_point(class_scope:nitric.kv.v1.KeyValueDeleteResponse)
    -  })
    -_sym_db.RegisterMessage(KeyValueDeleteResponse)
    -
    -
    -DESCRIPTOR._options = None
    -
    -_KEYVALUE = _descriptor.ServiceDescriptor(
    -  name='KeyValue',
    -  full_name='nitric.kv.v1.KeyValue',
    -  file=DESCRIPTOR,
    -  index=0,
    -  serialized_options=None,
    -  create_key=_descriptor._internal_create_key,
    -  serialized_start=383,
    -  serialized_end=630,
    -  methods=[
    -  _descriptor.MethodDescriptor(
    -    name='Get',
    -    full_name='nitric.kv.v1.KeyValue.Get',
    -    index=0,
    -    containing_service=None,
    -    input_type=_KEYVALUEGETREQUEST,
    -    output_type=_KEYVALUEGETRESPONSE,
    -    serialized_options=None,
    -    create_key=_descriptor._internal_create_key,
    -  ),
    -  _descriptor.MethodDescriptor(
    -    name='Put',
    -    full_name='nitric.kv.v1.KeyValue.Put',
    -    index=1,
    -    containing_service=None,
    -    input_type=_KEYVALUEPUTREQUEST,
    -    output_type=_KEYVALUEPUTRESPONSE,
    -    serialized_options=None,
    -    create_key=_descriptor._internal_create_key,
    -  ),
    -  _descriptor.MethodDescriptor(
    -    name='Delete',
    -    full_name='nitric.kv.v1.KeyValue.Delete',
    -    index=2,
    -    containing_service=None,
    -    input_type=_KEYVALUEDELETEREQUEST,
    -    output_type=_KEYVALUEDELETERESPONSE,
    -    serialized_options=None,
    -    create_key=_descriptor._internal_create_key,
    -  ),
    -])
    -_sym_db.RegisterServiceDescriptor(_KEYVALUE)
    -
    -DESCRIPTOR.services_by_name['KeyValue'] = _KEYVALUE
    -
    -# @@protoc_insertion_point(module_scope)
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -

    Classes

    -
    -
    -class KeyValueDeleteRequest -(*args, **kwargs) -
    -
    -

    A ProtocolMessage

    -

    Ancestors

    -
      -
    • google.protobuf.pyext._message.CMessage
    • -
    • google.protobuf.message.Message
    • -
    -

    Class variables

    -
    -
    var DESCRIPTOR
    -
    -
    -
    -
    -

    Instance variables

    -
    -
    var collection
    -
    -

    Field nitric.kv.v1.KeyValueDeleteRequest.collection

    -
    -
    var key
    -
    -

    Field nitric.kv.v1.KeyValueDeleteRequest.key

    -
    -
    -
    -
    -class KeyValueDeleteResponse -(*args, **kwargs) -
    -
    -

    A ProtocolMessage

    -

    Ancestors

    -
      -
    • google.protobuf.pyext._message.CMessage
    • -
    • google.protobuf.message.Message
    • -
    -

    Class variables

    -
    -
    var DESCRIPTOR
    -
    -
    -
    -
    -
    -
    -class KeyValueGetRequest -(*args, **kwargs) -
    -
    -

    A ProtocolMessage

    -

    Ancestors

    -
      -
    • google.protobuf.pyext._message.CMessage
    • -
    • google.protobuf.message.Message
    • -
    -

    Class variables

    -
    -
    var DESCRIPTOR
    -
    -
    -
    -
    -

    Instance variables

    -
    -
    var collection
    -
    -

    Field nitric.kv.v1.KeyValueGetRequest.collection

    -
    -
    var key
    -
    -

    Field nitric.kv.v1.KeyValueGetRequest.key

    -
    -
    -
    -
    -class KeyValueGetResponse -(*args, **kwargs) -
    -
    -

    A ProtocolMessage

    -

    Ancestors

    -
      -
    • google.protobuf.pyext._message.CMessage
    • -
    • google.protobuf.message.Message
    • -
    -

    Class variables

    -
    -
    var DESCRIPTOR
    -
    -
    -
    -
    -

    Instance variables

    -
    -
    var value
    -
    -

    Field nitric.kv.v1.KeyValueGetResponse.value

    -
    -
    -
    -
    -class KeyValuePutRequest -(*args, **kwargs) -
    -
    -

    A ProtocolMessage

    -

    Ancestors

    -
      -
    • google.protobuf.pyext._message.CMessage
    • -
    • google.protobuf.message.Message
    • -
    -

    Class variables

    -
    -
    var DESCRIPTOR
    -
    -
    -
    -
    -

    Instance variables

    -
    -
    var collection
    -
    -

    Field nitric.kv.v1.KeyValuePutRequest.collection

    -
    -
    var key
    -
    -

    Field nitric.kv.v1.KeyValuePutRequest.key

    -
    -
    var value
    -
    -

    Field nitric.kv.v1.KeyValuePutRequest.value

    -
    -
    -
    -
    -class KeyValuePutResponse -(*args, **kwargs) -
    -
    -

    A ProtocolMessage

    -

    Ancestors

    -
      -
    • google.protobuf.pyext._message.CMessage
    • -
    • google.protobuf.message.Message
    • -
    -

    Class variables

    -
    -
    var DESCRIPTOR
    -
    -
    -
    -
    -
    -
    -
    -
    - -
    - - - \ No newline at end of file diff --git a/docs/nitric/proto/nitric/event/index.html b/docs/nitric/proto/nitric/event/index.html deleted file mode 100644 index 29bdace..0000000 --- a/docs/nitric/proto/nitric/event/index.html +++ /dev/null @@ -1,88 +0,0 @@ - - - - - - -nitric.proto.nitric.event API documentation - - - - - - - - - - - -
    -
    -
    -

    Module nitric.proto.nitric.event

    -
    -
    -
    - -Expand source code - -
    #
    -# Copyright (c) 2021 Nitric Technologies Pty Ltd.
    -#
    -# This file is part of Nitric Python 3 SDK.
    -# See https://github.com/nitrictech/python-sdk for further info.
    -#
    -# Licensed under the Apache License, Version 2.0 (the "License");
    -# you may not use this file except in compliance with the License.
    -# You may obtain a copy of the License at
    -#
    -#     http://www.apache.org/licenses/LICENSE-2.0
    -#
    -# Unless required by applicable law or agreed to in writing, software
    -# distributed under the License is distributed on an "AS IS" BASIS,
    -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    -# See the License for the specific language governing permissions and
    -# limitations under the License.
    -#
    -
    -
    -
    -

    Sub-modules

    -
    -
    nitric.proto.nitric.event.v1
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    - -
    - - - \ No newline at end of file diff --git a/docs/nitric/proto/nitric/event/v1/index.html b/docs/nitric/proto/nitric/event/v1/index.html deleted file mode 100644 index 6f896a6..0000000 --- a/docs/nitric/proto/nitric/event/v1/index.html +++ /dev/null @@ -1,661 +0,0 @@ - - - - - - -nitric.proto.nitric.event.v1 API documentation - - - - - - - - - - - -
    -
    -
    -

    Module nitric.proto.nitric.event.v1

    -
    -
    -
    - -Expand source code - -
    #
    -# Copyright (c) 2021 Nitric Technologies Pty Ltd.
    -#
    -# This file is part of Nitric Python 3 SDK.
    -# See https://github.com/nitrictech/python-sdk for further info.
    -#
    -# Licensed under the Apache License, Version 2.0 (the "License");
    -# you may not use this file except in compliance with the License.
    -# You may obtain a copy of the License at
    -#
    -#     http://www.apache.org/licenses/LICENSE-2.0
    -#
    -# Unless required by applicable law or agreed to in writing, software
    -# distributed under the License is distributed on an "AS IS" BASIS,
    -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    -# See the License for the specific language governing permissions and
    -# limitations under the License.
    -#
    -
    -# Generated by the protocol buffer compiler.  DO NOT EDIT!
    -# sources: event/v1/event.proto
    -# plugin: python-betterproto
    -from dataclasses import dataclass
    -from typing import Dict, List
    -
    -import betterproto
    -from betterproto.grpc.grpclib_server import ServiceBase
    -import grpclib
    -
    -
    -@dataclass(eq=False, repr=False)
    -class EventPublishRequest(betterproto.Message):
    -    """Request to publish an event to a topic"""
    -
    -    # The name of the topic to publish the event to
    -    topic: str = betterproto.string_field(1)
    -    # The event to be published
    -    event: "NitricEvent" = betterproto.message_field(2)
    -
    -
    -@dataclass(eq=False, repr=False)
    -class EventPublishResponse(betterproto.Message):
    -    """Result of publishing an event"""
    -
    -    # The id of the published message When an id was not supplied one should be
    -    # automatically generated
    -    id: str = betterproto.string_field(1)
    -
    -
    -@dataclass(eq=False, repr=False)
    -class TopicListRequest(betterproto.Message):
    -    """Request for the Topic List method"""
    -
    -    pass
    -
    -
    -@dataclass(eq=False, repr=False)
    -class TopicListResponse(betterproto.Message):
    -    """Topic List Response"""
    -
    -    # The list of found topics
    -    topics: List["NitricTopic"] = betterproto.message_field(1)
    -
    -
    -@dataclass(eq=False, repr=False)
    -class NitricTopic(betterproto.Message):
    -    """Represents an event topic"""
    -
    -    # The Nitric name for the topic
    -    name: str = betterproto.string_field(1)
    -
    -
    -@dataclass(eq=False, repr=False)
    -class NitricEvent(betterproto.Message):
    -    """Nitric Event Model"""
    -
    -    # A Unique ID for the Nitric Event
    -    id: str = betterproto.string_field(1)
    -    # A content hint for the events payload
    -    payload_type: str = betterproto.string_field(2)
    -    # The payload of the event
    -    payload: "betterproto_lib_google_protobuf.Struct" = betterproto.message_field(3)
    -
    -
    -class EventStub(betterproto.ServiceStub):
    -    async def publish(self, *, topic: str = "", event: "NitricEvent" = None) -> "EventPublishResponse":
    -
    -        request = EventPublishRequest()
    -        request.topic = topic
    -        if event is not None:
    -            request.event = event
    -
    -        return await self._unary_unary("/nitric.event.v1.Event/Publish", request, EventPublishResponse)
    -
    -
    -class TopicStub(betterproto.ServiceStub):
    -    async def list(self) -> "TopicListResponse":
    -
    -        request = TopicListRequest()
    -
    -        return await self._unary_unary("/nitric.event.v1.Topic/List", request, TopicListResponse)
    -
    -
    -class EventBase(ServiceBase):
    -    async def publish(self, topic: str, event: "NitricEvent") -> "EventPublishResponse":
    -        raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED)
    -
    -    async def __rpc_publish(self, stream: grpclib.server.Stream) -> None:
    -        request = await stream.recv_message()
    -
    -        request_kwargs = {
    -            "topic": request.topic,
    -            "event": request.event,
    -        }
    -
    -        response = await self.publish(**request_kwargs)
    -        await stream.send_message(response)
    -
    -    def __mapping__(self) -> Dict[str, grpclib.const.Handler]:
    -        return {
    -            "/nitric.event.v1.Event/Publish": grpclib.const.Handler(
    -                self.__rpc_publish,
    -                grpclib.const.Cardinality.UNARY_UNARY,
    -                EventPublishRequest,
    -                EventPublishResponse,
    -            ),
    -        }
    -
    -
    -class TopicBase(ServiceBase):
    -    async def list(self) -> "TopicListResponse":
    -        raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED)
    -
    -    async def __rpc_list(self, stream: grpclib.server.Stream) -> None:
    -        request = await stream.recv_message()
    -
    -        request_kwargs = {}
    -
    -        response = await self.list(**request_kwargs)
    -        await stream.send_message(response)
    -
    -    def __mapping__(self) -> Dict[str, grpclib.const.Handler]:
    -        return {
    -            "/nitric.event.v1.Topic/List": grpclib.const.Handler(
    -                self.__rpc_list,
    -                grpclib.const.Cardinality.UNARY_UNARY,
    -                TopicListRequest,
    -                TopicListResponse,
    -            ),
    -        }
    -
    -
    -import betterproto.lib.google.protobuf as betterproto_lib_google_protobuf
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -

    Classes

    -
    -
    -class EventBase -
    -
    -

    Base class for async gRPC servers.

    -
    - -Expand source code - -
    class EventBase(ServiceBase):
    -    async def publish(self, topic: str, event: "NitricEvent") -> "EventPublishResponse":
    -        raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED)
    -
    -    async def __rpc_publish(self, stream: grpclib.server.Stream) -> None:
    -        request = await stream.recv_message()
    -
    -        request_kwargs = {
    -            "topic": request.topic,
    -            "event": request.event,
    -        }
    -
    -        response = await self.publish(**request_kwargs)
    -        await stream.send_message(response)
    -
    -    def __mapping__(self) -> Dict[str, grpclib.const.Handler]:
    -        return {
    -            "/nitric.event.v1.Event/Publish": grpclib.const.Handler(
    -                self.__rpc_publish,
    -                grpclib.const.Cardinality.UNARY_UNARY,
    -                EventPublishRequest,
    -                EventPublishResponse,
    -            ),
    -        }
    -
    -

    Ancestors

    -
      -
    • betterproto.grpc.grpclib_server.ServiceBase
    • -
    • abc.ABC
    • -
    -

    Methods

    -
    -
    -async def publish(self, topic: str, event: NitricEvent) ‑> EventPublishResponse -
    -
    -
    -
    - -Expand source code - -
    async def publish(self, topic: str, event: "NitricEvent") -> "EventPublishResponse":
    -    raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED)
    -
    -
    -
    -
    -
    -class EventPublishRequest -(topic: str = <object object>, event: NitricEvent = <object object>) -
    -
    -

    Request to publish an event to a topic

    -
    - -Expand source code - -
    class EventPublishRequest(betterproto.Message):
    -    """Request to publish an event to a topic"""
    -
    -    # The name of the topic to publish the event to
    -    topic: str = betterproto.string_field(1)
    -    # The event to be published
    -    event: "NitricEvent" = betterproto.message_field(2)
    -
    -

    Ancestors

    -
      -
    • betterproto.Message
    • -
    • abc.ABC
    • -
    -

    Class variables

    -
    -
    var eventNitricEvent
    -
    -
    -
    -
    var topic : str
    -
    -
    -
    -
    -
    -
    -class EventPublishResponse -(id: str = <object object>) -
    -
    -

    Result of publishing an event

    -
    - -Expand source code - -
    class EventPublishResponse(betterproto.Message):
    -    """Result of publishing an event"""
    -
    -    # The id of the published message When an id was not supplied one should be
    -    # automatically generated
    -    id: str = betterproto.string_field(1)
    -
    -

    Ancestors

    -
      -
    • betterproto.Message
    • -
    • abc.ABC
    • -
    -

    Class variables

    -
    -
    var id : str
    -
    -
    -
    -
    -
    -
    -class EventStub -(channel: Channel, *, timeout: Union[float, NoneType] = None, deadline: Union[ForwardRef('Deadline'), NoneType] = None, metadata: Union[Mapping[str, Union[str, bytes]], Collection[Tuple[str, Union[str, bytes]]], NoneType] = None) -
    -
    -

    Base class for async gRPC clients.

    -
    - -Expand source code - -
    class EventStub(betterproto.ServiceStub):
    -    async def publish(self, *, topic: str = "", event: "NitricEvent" = None) -> "EventPublishResponse":
    -
    -        request = EventPublishRequest()
    -        request.topic = topic
    -        if event is not None:
    -            request.event = event
    -
    -        return await self._unary_unary("/nitric.event.v1.Event/Publish", request, EventPublishResponse)
    -
    -

    Ancestors

    -
      -
    • betterproto.grpc.grpclib_client.ServiceStub
    • -
    • abc.ABC
    • -
    -

    Methods

    -
    -
    -async def publish(self, *, topic: str = '', event: NitricEvent = None) ‑> EventPublishResponse -
    -
    -
    -
    - -Expand source code - -
    async def publish(self, *, topic: str = "", event: "NitricEvent" = None) -> "EventPublishResponse":
    -
    -    request = EventPublishRequest()
    -    request.topic = topic
    -    if event is not None:
    -        request.event = event
    -
    -    return await self._unary_unary("/nitric.event.v1.Event/Publish", request, EventPublishResponse)
    -
    -
    -
    -
    -
    -class NitricEvent -(id: str = <object object>, payload_type: str = <object object>, payload: betterproto_lib_google_protobuf.Struct = <object object>) -
    -
    -

    Nitric Event Model

    -
    - -Expand source code - -
    class NitricEvent(betterproto.Message):
    -    """Nitric Event Model"""
    -
    -    # A Unique ID for the Nitric Event
    -    id: str = betterproto.string_field(1)
    -    # A content hint for the events payload
    -    payload_type: str = betterproto.string_field(2)
    -    # The payload of the event
    -    payload: "betterproto_lib_google_protobuf.Struct" = betterproto.message_field(3)
    -
    -

    Ancestors

    -
      -
    • betterproto.Message
    • -
    • abc.ABC
    • -
    -

    Class variables

    -
    -
    var id : str
    -
    -
    -
    -
    var payload : betterproto.lib.google.protobuf.Struct
    -
    -
    -
    -
    var payload_type : str
    -
    -
    -
    -
    -
    -
    -class NitricTopic -(name: str = <object object>) -
    -
    -

    Represents an event topic

    -
    - -Expand source code - -
    class NitricTopic(betterproto.Message):
    -    """Represents an event topic"""
    -
    -    # The Nitric name for the topic
    -    name: str = betterproto.string_field(1)
    -
    -

    Ancestors

    -
      -
    • betterproto.Message
    • -
    • abc.ABC
    • -
    -

    Class variables

    -
    -
    var name : str
    -
    -
    -
    -
    -
    -
    -class TopicBase -
    -
    -

    Base class for async gRPC servers.

    -
    - -Expand source code - -
    class TopicBase(ServiceBase):
    -    async def list(self) -> "TopicListResponse":
    -        raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED)
    -
    -    async def __rpc_list(self, stream: grpclib.server.Stream) -> None:
    -        request = await stream.recv_message()
    -
    -        request_kwargs = {}
    -
    -        response = await self.list(**request_kwargs)
    -        await stream.send_message(response)
    -
    -    def __mapping__(self) -> Dict[str, grpclib.const.Handler]:
    -        return {
    -            "/nitric.event.v1.Topic/List": grpclib.const.Handler(
    -                self.__rpc_list,
    -                grpclib.const.Cardinality.UNARY_UNARY,
    -                TopicListRequest,
    -                TopicListResponse,
    -            ),
    -        }
    -
    -

    Ancestors

    -
      -
    • betterproto.grpc.grpclib_server.ServiceBase
    • -
    • abc.ABC
    • -
    -

    Methods

    -
    -
    -async def list(self) ‑> TopicListResponse -
    -
    -
    -
    - -Expand source code - -
    async def list(self) -> "TopicListResponse":
    -    raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED)
    -
    -
    -
    -
    -
    -class TopicListRequest -
    -
    -

    Request for the Topic List method

    -
    - -Expand source code - -
    class TopicListRequest(betterproto.Message):
    -    """Request for the Topic List method"""
    -
    -    pass
    -
    -

    Ancestors

    -
      -
    • betterproto.Message
    • -
    • abc.ABC
    • -
    -
    -
    -class TopicListResponse -(topics: List[ForwardRef('NitricTopic')] = <object object>) -
    -
    -

    Topic List Response

    -
    - -Expand source code - -
    class TopicListResponse(betterproto.Message):
    -    """Topic List Response"""
    -
    -    # The list of found topics
    -    topics: List["NitricTopic"] = betterproto.message_field(1)
    -
    -

    Ancestors

    -
      -
    • betterproto.Message
    • -
    • abc.ABC
    • -
    -

    Class variables

    -
    -
    var topics : List[NitricTopic]
    -
    -
    -
    -
    -
    -
    -class TopicStub -(channel: Channel, *, timeout: Union[float, NoneType] = None, deadline: Union[ForwardRef('Deadline'), NoneType] = None, metadata: Union[Mapping[str, Union[str, bytes]], Collection[Tuple[str, Union[str, bytes]]], NoneType] = None) -
    -
    -

    Base class for async gRPC clients.

    -
    - -Expand source code - -
    class TopicStub(betterproto.ServiceStub):
    -    async def list(self) -> "TopicListResponse":
    -
    -        request = TopicListRequest()
    -
    -        return await self._unary_unary("/nitric.event.v1.Topic/List", request, TopicListResponse)
    -
    -

    Ancestors

    -
      -
    • betterproto.grpc.grpclib_client.ServiceStub
    • -
    • abc.ABC
    • -
    -

    Methods

    -
    -
    -async def list(self) ‑> TopicListResponse -
    -
    -
    -
    - -Expand source code - -
    async def list(self) -> "TopicListResponse":
    -
    -    request = TopicListRequest()
    -
    -    return await self._unary_unary("/nitric.event.v1.Topic/List", request, TopicListResponse)
    -
    -
    -
    -
    -
    -
    -
    - -
    - - - \ No newline at end of file diff --git a/docs/nitric/proto/nitric/faas/index.html b/docs/nitric/proto/nitric/faas/index.html deleted file mode 100644 index 47bf2b3..0000000 --- a/docs/nitric/proto/nitric/faas/index.html +++ /dev/null @@ -1,88 +0,0 @@ - - - - - - -nitric.proto.nitric.faas API documentation - - - - - - - - - - - -
    -
    -
    -

    Module nitric.proto.nitric.faas

    -
    -
    -
    - -Expand source code - -
    #
    -# Copyright (c) 2021 Nitric Technologies Pty Ltd.
    -#
    -# This file is part of Nitric Python 3 SDK.
    -# See https://github.com/nitrictech/python-sdk for further info.
    -#
    -# Licensed under the Apache License, Version 2.0 (the "License");
    -# you may not use this file except in compliance with the License.
    -# You may obtain a copy of the License at
    -#
    -#     http://www.apache.org/licenses/LICENSE-2.0
    -#
    -# Unless required by applicable law or agreed to in writing, software
    -# distributed under the License is distributed on an "AS IS" BASIS,
    -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    -# See the License for the specific language governing permissions and
    -# limitations under the License.
    -#
    -
    -
    -
    -

    Sub-modules

    -
    -
    nitric.proto.nitric.faas.v1
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    - -
    - - - \ No newline at end of file diff --git a/docs/nitric/proto/nitric/faas/v1/index.html b/docs/nitric/proto/nitric/faas/v1/index.html deleted file mode 100644 index 0778488..0000000 --- a/docs/nitric/proto/nitric/faas/v1/index.html +++ /dev/null @@ -1,777 +0,0 @@ - - - - - - -nitric.proto.nitric.faas.v1 API documentation - - - - - - - - - - - -
    -
    -
    -

    Module nitric.proto.nitric.faas.v1

    -
    -
    -
    - -Expand source code - -
    #
    -# Copyright (c) 2021 Nitric Technologies Pty Ltd.
    -#
    -# This file is part of Nitric Python 3 SDK.
    -# See https://github.com/nitrictech/python-sdk for further info.
    -#
    -# Licensed under the Apache License, Version 2.0 (the "License");
    -# you may not use this file except in compliance with the License.
    -# You may obtain a copy of the License at
    -#
    -#     http://www.apache.org/licenses/LICENSE-2.0
    -#
    -# Unless required by applicable law or agreed to in writing, software
    -# distributed under the License is distributed on an "AS IS" BASIS,
    -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    -# See the License for the specific language governing permissions and
    -# limitations under the License.
    -#
    -
    -# Generated by the protocol buffer compiler.  DO NOT EDIT!
    -# sources: faas/v1/faas.proto
    -# plugin: python-betterproto
    -from dataclasses import dataclass
    -from typing import AsyncIterable, AsyncIterator, Dict, Iterable, Union
    -
    -import betterproto
    -from betterproto.grpc.grpclib_server import ServiceBase
    -import grpclib
    -
    -
    -@dataclass(eq=False, repr=False)
    -class ClientMessage(betterproto.Message):
    -    """Messages the client is able to send to the server"""
    -
    -    # Client message ID, used to pair requests/responses
    -    id: str = betterproto.string_field(1)
    -    # Client initialisation request
    -    init_request: "InitRequest" = betterproto.message_field(2, group="content")
    -    # Client responsding with result of a trigger
    -    trigger_response: "TriggerResponse" = betterproto.message_field(3, group="content")
    -
    -
    -@dataclass(eq=False, repr=False)
    -class ServerMessage(betterproto.Message):
    -    """Messages the server is able to send to the client"""
    -
    -    # Server message ID, used to pair requests/responses
    -    id: str = betterproto.string_field(1)
    -    # Server responding with client configuration details to an InitRequest
    -    init_response: "InitResponse" = betterproto.message_field(2, group="content")
    -    # Server requesting client to process a trigger
    -    trigger_request: "TriggerRequest" = betterproto.message_field(3, group="content")
    -
    -
    -@dataclass(eq=False, repr=False)
    -class InitRequest(betterproto.Message):
    -    """Placeholder message"""
    -
    -    pass
    -
    -
    -@dataclass(eq=False, repr=False)
    -class InitResponse(betterproto.Message):
    -    """Placeholder message"""
    -
    -    pass
    -
    -
    -@dataclass(eq=False, repr=False)
    -class TriggerRequest(betterproto.Message):
    -    """The server has a trigger for the client to handle"""
    -
    -    # The data in the trigger
    -    data: bytes = betterproto.bytes_field(1)
    -    # Should we supply a mime type for the data? Or rely on context?
    -    mime_type: str = betterproto.string_field(2)
    -    http: "HttpTriggerContext" = betterproto.message_field(3, group="context")
    -    topic: "TopicTriggerContext" = betterproto.message_field(4, group="context")
    -
    -
    -@dataclass(eq=False, repr=False)
    -class HttpTriggerContext(betterproto.Message):
    -    # The request method
    -    method: str = betterproto.string_field(1)
    -    # The path of the request
    -    path: str = betterproto.string_field(2)
    -    # The request headers
    -    headers: Dict[str, str] = betterproto.map_field(3, betterproto.TYPE_STRING, betterproto.TYPE_STRING)
    -    # The query params (if parseable by the membrane)
    -    query_params: Dict[str, str] = betterproto.map_field(4, betterproto.TYPE_STRING, betterproto.TYPE_STRING)
    -
    -
    -@dataclass(eq=False, repr=False)
    -class TopicTriggerContext(betterproto.Message):
    -    # The topic the message was published for
    -    topic: str = betterproto.string_field(1)
    -
    -
    -@dataclass(eq=False, repr=False)
    -class TriggerResponse(betterproto.Message):
    -    """The worker has successfully processed a trigger"""
    -
    -    # The data returned in the response
    -    data: bytes = betterproto.bytes_field(1)
    -    # response to a http request
    -    http: "HttpResponseContext" = betterproto.message_field(10, group="context")
    -    # response to a topic trigger
    -    topic: "TopicResponseContext" = betterproto.message_field(11, group="context")
    -
    -
    -@dataclass(eq=False, repr=False)
    -class HttpResponseContext(betterproto.Message):
    -    """
    -    Specific HttpResponse message Note this does not have to be handled by the
    -    User at all but they will have the option of control If they choose...
    -    """
    -
    -    # The request headers...
    -    headers: Dict[str, str] = betterproto.map_field(1, betterproto.TYPE_STRING, betterproto.TYPE_STRING)
    -    # The HTTP status of the request
    -    status: int = betterproto.int32_field(2)
    -
    -
    -@dataclass(eq=False, repr=False)
    -class TopicResponseContext(betterproto.Message):
    -    """
    -    Specific event response message We do not accept responses for events only
    -    whether or not they were successfully processed
    -    """
    -
    -    # Success status of the handled event
    -    success: bool = betterproto.bool_field(1)
    -
    -
    -class FaasStub(betterproto.ServiceStub):
    -    async def trigger_stream(
    -        self,
    -        request_iterator: Union[AsyncIterable["ClientMessage"], Iterable["ClientMessage"]],
    -    ) -> AsyncIterator["ServerMessage"]:
    -
    -        async for response in self._stream_stream(
    -            "/nitric.faas.v1.Faas/TriggerStream",
    -            request_iterator,
    -            ClientMessage,
    -            ServerMessage,
    -        ):
    -            yield response
    -
    -
    -class FaasBase(ServiceBase):
    -    async def trigger_stream(self, request_iterator: AsyncIterator["ClientMessage"]) -> AsyncIterator["ServerMessage"]:
    -        raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED)
    -
    -    async def __rpc_trigger_stream(self, stream: grpclib.server.Stream) -> None:
    -        request_kwargs = {"request_iterator": stream.__aiter__()}
    -
    -        await self._call_rpc_handler_server_stream(
    -            self.trigger_stream,
    -            stream,
    -            request_kwargs,
    -        )
    -
    -    def __mapping__(self) -> Dict[str, grpclib.const.Handler]:
    -        return {
    -            "/nitric.faas.v1.Faas/TriggerStream": grpclib.const.Handler(
    -                self.__rpc_trigger_stream,
    -                grpclib.const.Cardinality.STREAM_STREAM,
    -                ClientMessage,
    -                ServerMessage,
    -            ),
    -        }
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -

    Classes

    -
    -
    -class ClientMessage -(id: str = <object object>, init_request: InitRequest = <object object>, trigger_response: TriggerResponse = <object object>) -
    -
    -

    Messages the client is able to send to the server

    -
    - -Expand source code - -
    class ClientMessage(betterproto.Message):
    -    """Messages the client is able to send to the server"""
    -
    -    # Client message ID, used to pair requests/responses
    -    id: str = betterproto.string_field(1)
    -    # Client initialisation request
    -    init_request: "InitRequest" = betterproto.message_field(2, group="content")
    -    # Client responsding with result of a trigger
    -    trigger_response: "TriggerResponse" = betterproto.message_field(3, group="content")
    -
    -

    Ancestors

    -
      -
    • betterproto.Message
    • -
    • abc.ABC
    • -
    -

    Class variables

    -
    -
    var id : str
    -
    -
    -
    -
    var init_requestInitRequest
    -
    -
    -
    -
    var trigger_responseTriggerResponse
    -
    -
    -
    -
    -
    -
    -class FaasBase -
    -
    -

    Base class for async gRPC servers.

    -
    - -Expand source code - -
    class FaasBase(ServiceBase):
    -    async def trigger_stream(self, request_iterator: AsyncIterator["ClientMessage"]) -> AsyncIterator["ServerMessage"]:
    -        raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED)
    -
    -    async def __rpc_trigger_stream(self, stream: grpclib.server.Stream) -> None:
    -        request_kwargs = {"request_iterator": stream.__aiter__()}
    -
    -        await self._call_rpc_handler_server_stream(
    -            self.trigger_stream,
    -            stream,
    -            request_kwargs,
    -        )
    -
    -    def __mapping__(self) -> Dict[str, grpclib.const.Handler]:
    -        return {
    -            "/nitric.faas.v1.Faas/TriggerStream": grpclib.const.Handler(
    -                self.__rpc_trigger_stream,
    -                grpclib.const.Cardinality.STREAM_STREAM,
    -                ClientMessage,
    -                ServerMessage,
    -            ),
    -        }
    -
    -

    Ancestors

    -
      -
    • betterproto.grpc.grpclib_server.ServiceBase
    • -
    • abc.ABC
    • -
    -

    Methods

    -
    -
    -async def trigger_stream(self, request_iterator: AsyncIterator[ForwardRef('ClientMessage')]) ‑> AsyncIterator[ServerMessage] -
    -
    -
    -
    - -Expand source code - -
    async def trigger_stream(self, request_iterator: AsyncIterator["ClientMessage"]) -> AsyncIterator["ServerMessage"]:
    -    raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED)
    -
    -
    -
    -
    -
    -class FaasStub -(channel: Channel, *, timeout: Union[float, NoneType] = None, deadline: Union[ForwardRef('Deadline'), NoneType] = None, metadata: Union[Mapping[str, Union[str, bytes]], Collection[Tuple[str, Union[str, bytes]]], NoneType] = None) -
    -
    -

    Base class for async gRPC clients.

    -
    - -Expand source code - -
    class FaasStub(betterproto.ServiceStub):
    -    async def trigger_stream(
    -        self,
    -        request_iterator: Union[AsyncIterable["ClientMessage"], Iterable["ClientMessage"]],
    -    ) -> AsyncIterator["ServerMessage"]:
    -
    -        async for response in self._stream_stream(
    -            "/nitric.faas.v1.Faas/TriggerStream",
    -            request_iterator,
    -            ClientMessage,
    -            ServerMessage,
    -        ):
    -            yield response
    -
    -

    Ancestors

    -
      -
    • betterproto.grpc.grpclib_client.ServiceStub
    • -
    • abc.ABC
    • -
    -

    Methods

    -
    -
    -async def trigger_stream(self, request_iterator: Union[AsyncIterable[ForwardRef('ClientMessage')], Iterable[ForwardRef('ClientMessage')]]) ‑> AsyncIterator[ServerMessage] -
    -
    -
    -
    - -Expand source code - -
    async def trigger_stream(
    -    self,
    -    request_iterator: Union[AsyncIterable["ClientMessage"], Iterable["ClientMessage"]],
    -) -> AsyncIterator["ServerMessage"]:
    -
    -    async for response in self._stream_stream(
    -        "/nitric.faas.v1.Faas/TriggerStream",
    -        request_iterator,
    -        ClientMessage,
    -        ServerMessage,
    -    ):
    -        yield response
    -
    -
    -
    -
    -
    -class HttpResponseContext -(headers: Dict[str, str] = <object object>, status: int = <object object>) -
    -
    -

    Specific HttpResponse message Note this does not have to be handled by the -User at all but they will have the option of control If they choose…

    -
    - -Expand source code - -
    class HttpResponseContext(betterproto.Message):
    -    """
    -    Specific HttpResponse message Note this does not have to be handled by the
    -    User at all but they will have the option of control If they choose...
    -    """
    -
    -    # The request headers...
    -    headers: Dict[str, str] = betterproto.map_field(1, betterproto.TYPE_STRING, betterproto.TYPE_STRING)
    -    # The HTTP status of the request
    -    status: int = betterproto.int32_field(2)
    -
    -

    Ancestors

    -
      -
    • betterproto.Message
    • -
    • abc.ABC
    • -
    -

    Class variables

    -
    -
    var headers : Dict[str, str]
    -
    -
    -
    -
    var status : int
    -
    -
    -
    -
    -
    -
    -class HttpTriggerContext -(method: str = <object object>, path: str = <object object>, headers: Dict[str, str] = <object object>, query_params: Dict[str, str] = <object object>) -
    -
    -

    HttpTriggerContext(method: str = , path: str = , headers: Dict[str, str] = , query_params: Dict[str, str] = )

    -
    - -Expand source code - -
    class HttpTriggerContext(betterproto.Message):
    -    # The request method
    -    method: str = betterproto.string_field(1)
    -    # The path of the request
    -    path: str = betterproto.string_field(2)
    -    # The request headers
    -    headers: Dict[str, str] = betterproto.map_field(3, betterproto.TYPE_STRING, betterproto.TYPE_STRING)
    -    # The query params (if parseable by the membrane)
    -    query_params: Dict[str, str] = betterproto.map_field(4, betterproto.TYPE_STRING, betterproto.TYPE_STRING)
    -
    -

    Ancestors

    -
      -
    • betterproto.Message
    • -
    • abc.ABC
    • -
    -

    Class variables

    -
    -
    var headers : Dict[str, str]
    -
    -
    -
    -
    var method : str
    -
    -
    -
    -
    var path : str
    -
    -
    -
    -
    var query_params : Dict[str, str]
    -
    -
    -
    -
    - -
    -class InitRequest -
    -
    -

    Placeholder message

    -
    - -Expand source code - -
    class InitRequest(betterproto.Message):
    -    """Placeholder message"""
    -
    -    pass
    -
    -

    Ancestors

    -
      -
    • betterproto.Message
    • -
    • abc.ABC
    • -
    -
    -
    -class InitResponse -
    -
    -

    Placeholder message

    -
    - -Expand source code - -
    class InitResponse(betterproto.Message):
    -    """Placeholder message"""
    -
    -    pass
    -
    -

    Ancestors

    -
      -
    • betterproto.Message
    • -
    • abc.ABC
    • -
    -
    -
    -class ServerMessage -(id: str = <object object>, init_response: InitResponse = <object object>, trigger_request: TriggerRequest = <object object>) -
    -
    -

    Messages the server is able to send to the client

    -
    - -Expand source code - -
    class ServerMessage(betterproto.Message):
    -    """Messages the server is able to send to the client"""
    -
    -    # Server message ID, used to pair requests/responses
    -    id: str = betterproto.string_field(1)
    -    # Server responding with client configuration details to an InitRequest
    -    init_response: "InitResponse" = betterproto.message_field(2, group="content")
    -    # Server requesting client to process a trigger
    -    trigger_request: "TriggerRequest" = betterproto.message_field(3, group="content")
    -
    -

    Ancestors

    -
      -
    • betterproto.Message
    • -
    • abc.ABC
    • -
    -

    Class variables

    -
    -
    var id : str
    -
    -
    -
    -
    var init_responseInitResponse
    -
    -
    -
    -
    var trigger_requestTriggerRequest
    -
    -
    -
    -
    -
    -
    -class TopicResponseContext -(success: bool = <object object>) -
    -
    -

    Specific event response message We do not accept responses for events only -whether or not they were successfully processed

    -
    - -Expand source code - -
    class TopicResponseContext(betterproto.Message):
    -    """
    -    Specific event response message We do not accept responses for events only
    -    whether or not they were successfully processed
    -    """
    -
    -    # Success status of the handled event
    -    success: bool = betterproto.bool_field(1)
    -
    -

    Ancestors

    -
      -
    • betterproto.Message
    • -
    • abc.ABC
    • -
    -

    Class variables

    -
    -
    var success : bool
    -
    -
    -
    -
    -
    -
    -class TopicTriggerContext -(topic: str = <object object>) -
    -
    -

    TopicTriggerContext(topic: str = )

    -
    - -Expand source code - -
    class TopicTriggerContext(betterproto.Message):
    -    # The topic the message was published for
    -    topic: str = betterproto.string_field(1)
    -
    -

    Ancestors

    -
      -
    • betterproto.Message
    • -
    • abc.ABC
    • -
    -

    Class variables

    -
    -
    var topic : str
    -
    -
    -
    -
    - -
    -class TriggerRequest -(data: bytes = <object object>, mime_type: str = <object object>, http: HttpTriggerContext = <object object>, topic: TopicTriggerContext = <object object>) -
    -
    -

    The server has a trigger for the client to handle

    -
    - -Expand source code - -
    class TriggerRequest(betterproto.Message):
    -    """The server has a trigger for the client to handle"""
    -
    -    # The data in the trigger
    -    data: bytes = betterproto.bytes_field(1)
    -    # Should we supply a mime type for the data? Or rely on context?
    -    mime_type: str = betterproto.string_field(2)
    -    http: "HttpTriggerContext" = betterproto.message_field(3, group="context")
    -    topic: "TopicTriggerContext" = betterproto.message_field(4, group="context")
    -
    -

    Ancestors

    -
      -
    • betterproto.Message
    • -
    • abc.ABC
    • -
    -

    Class variables

    -
    -
    var data : bytes
    -
    -
    -
    -
    var httpHttpTriggerContext
    -
    -
    -
    -
    var mime_type : str
    -
    -
    -
    -
    var topicTopicTriggerContext
    -
    -
    -
    -
    -
    -
    -class TriggerResponse -(data: bytes = <object object>, http: HttpResponseContext = <object object>, topic: TopicResponseContext = <object object>) -
    -
    -

    The worker has successfully processed a trigger

    -
    - -Expand source code - -
    class TriggerResponse(betterproto.Message):
    -    """The worker has successfully processed a trigger"""
    -
    -    # The data returned in the response
    -    data: bytes = betterproto.bytes_field(1)
    -    # response to a http request
    -    http: "HttpResponseContext" = betterproto.message_field(10, group="context")
    -    # response to a topic trigger
    -    topic: "TopicResponseContext" = betterproto.message_field(11, group="context")
    -
    -

    Ancestors

    -
      -
    • betterproto.Message
    • -
    • abc.ABC
    • -
    -

    Class variables

    -
    -
    var data : bytes
    -
    -
    -
    -
    var httpHttpResponseContext
    -
    -
    -
    -
    var topicTopicResponseContext
    -
    -
    -
    -
    -
    - - - - - - - - \ No newline at end of file diff --git a/docs/nitric/proto/nitric/index.html b/docs/nitric/proto/nitric/index.html deleted file mode 100644 index 6b92d37..0000000 --- a/docs/nitric/proto/nitric/index.html +++ /dev/null @@ -1,108 +0,0 @@ - - - - - - -nitric.proto.nitric API documentation - - - - - - - - - - - -
    -
    -
    -

    Module nitric.proto.nitric

    -
    -
    -
    - -Expand source code - -
    #
    -# Copyright (c) 2021 Nitric Technologies Pty Ltd.
    -#
    -# This file is part of Nitric Python 3 SDK.
    -# See https://github.com/nitrictech/python-sdk for further info.
    -#
    -# Licensed under the Apache License, Version 2.0 (the "License");
    -# you may not use this file except in compliance with the License.
    -# You may obtain a copy of the License at
    -#
    -#     http://www.apache.org/licenses/LICENSE-2.0
    -#
    -# Unless required by applicable law or agreed to in writing, software
    -# distributed under the License is distributed on an "AS IS" BASIS,
    -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    -# See the License for the specific language governing permissions and
    -# limitations under the License.
    -#
    -
    -
    -
    -

    Sub-modules

    -
    -
    nitric.proto.nitric.event
    -
    -
    -
    -
    nitric.proto.nitric.faas
    -
    -
    -
    -
    nitric.proto.nitric.kv
    -
    -
    -
    -
    nitric.proto.nitric.queue
    -
    -
    -
    -
    nitric.proto.nitric.storage
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    - -
    - - - \ No newline at end of file diff --git a/docs/nitric/proto/nitric/kv/index.html b/docs/nitric/proto/nitric/kv/index.html deleted file mode 100644 index e5c044e..0000000 --- a/docs/nitric/proto/nitric/kv/index.html +++ /dev/null @@ -1,88 +0,0 @@ - - - - - - -nitric.proto.nitric.kv API documentation - - - - - - - - - - - -
    -
    -
    -

    Module nitric.proto.nitric.kv

    -
    -
    -
    - -Expand source code - -
    #
    -# Copyright (c) 2021 Nitric Technologies Pty Ltd.
    -#
    -# This file is part of Nitric Python 3 SDK.
    -# See https://github.com/nitrictech/python-sdk for further info.
    -#
    -# Licensed under the Apache License, Version 2.0 (the "License");
    -# you may not use this file except in compliance with the License.
    -# You may obtain a copy of the License at
    -#
    -#     http://www.apache.org/licenses/LICENSE-2.0
    -#
    -# Unless required by applicable law or agreed to in writing, software
    -# distributed under the License is distributed on an "AS IS" BASIS,
    -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    -# See the License for the specific language governing permissions and
    -# limitations under the License.
    -#
    -
    -
    -
    -

    Sub-modules

    -
    -
    nitric.proto.nitric.kv.v1
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    - -
    - - - \ No newline at end of file diff --git a/docs/nitric/proto/nitric/kv/v1/index.html b/docs/nitric/proto/nitric/kv/v1/index.html deleted file mode 100644 index 1cf31ba..0000000 --- a/docs/nitric/proto/nitric/kv/v1/index.html +++ /dev/null @@ -1,700 +0,0 @@ - - - - - - -nitric.proto.nitric.kv.v1 API documentation - - - - - - - - - - - -
    -
    -
    -

    Module nitric.proto.nitric.kv.v1

    -
    -
    -
    - -Expand source code - -
    #
    -# Copyright (c) 2021 Nitric Technologies Pty Ltd.
    -#
    -# This file is part of Nitric Python 3 SDK.
    -# See https://github.com/nitrictech/python-sdk for further info.
    -#
    -# Licensed under the Apache License, Version 2.0 (the "License");
    -# you may not use this file except in compliance with the License.
    -# You may obtain a copy of the License at
    -#
    -#     http://www.apache.org/licenses/LICENSE-2.0
    -#
    -# Unless required by applicable law or agreed to in writing, software
    -# distributed under the License is distributed on an "AS IS" BASIS,
    -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    -# See the License for the specific language governing permissions and
    -# limitations under the License.
    -#
    -
    -# Generated by the protocol buffer compiler.  DO NOT EDIT!
    -# sources: kv/v1/kv.proto
    -# plugin: python-betterproto
    -from dataclasses import dataclass
    -from typing import Dict
    -
    -import betterproto
    -from betterproto.grpc.grpclib_server import ServiceBase
    -import grpclib
    -
    -
    -@dataclass(eq=False, repr=False)
    -class KeyValueGetRequest(betterproto.Message):
    -    # The collection to retrieve the keyValue from
    -    collection: str = betterproto.string_field(1)
    -    # The unique key of the keyValue to retrieve
    -    key: str = betterproto.string_field(2)
    -
    -
    -@dataclass(eq=False, repr=False)
    -class KeyValueGetResponse(betterproto.Message):
    -    # The retrieved value
    -    value: "betterproto_lib_google_protobuf.Struct" = betterproto.message_field(1)
    -
    -
    -@dataclass(eq=False, repr=False)
    -class KeyValuePutRequest(betterproto.Message):
    -    # The collection containing the existing keyValue to be inserted or updated.
    -    collection: str = betterproto.string_field(1)
    -    # The unique key of the keyValue to put
    -    key: str = betterproto.string_field(2)
    -    # A simple JSON object
    -    value: "betterproto_lib_google_protobuf.Struct" = betterproto.message_field(3)
    -
    -
    -@dataclass(eq=False, repr=False)
    -class KeyValuePutResponse(betterproto.Message):
    -    pass
    -
    -
    -@dataclass(eq=False, repr=False)
    -class KeyValueDeleteRequest(betterproto.Message):
    -    # The collection containing the existing keyValue to be deleted
    -    collection: str = betterproto.string_field(1)
    -    # The unique key of the keyValue to delete
    -    key: str = betterproto.string_field(2)
    -
    -
    -@dataclass(eq=False, repr=False)
    -class KeyValueDeleteResponse(betterproto.Message):
    -    pass
    -
    -
    -class KeyValueStub(betterproto.ServiceStub):
    -    async def get(self, *, collection: str = "", key: str = "") -> "KeyValueGetResponse":
    -
    -        request = KeyValueGetRequest()
    -        request.collection = collection
    -        request.key = key
    -
    -        return await self._unary_unary("/nitric.kv.v1.KeyValue/Get", request, KeyValueGetResponse)
    -
    -    async def put(
    -        self,
    -        *,
    -        collection: str = "",
    -        key: str = "",
    -        value: "betterproto_lib_google_protobuf.Struct" = None,
    -    ) -> "KeyValuePutResponse":
    -
    -        request = KeyValuePutRequest()
    -        request.collection = collection
    -        request.key = key
    -        if value is not None:
    -            request.value = value
    -
    -        return await self._unary_unary("/nitric.kv.v1.KeyValue/Put", request, KeyValuePutResponse)
    -
    -    async def delete(self, *, collection: str = "", key: str = "") -> "KeyValueDeleteResponse":
    -
    -        request = KeyValueDeleteRequest()
    -        request.collection = collection
    -        request.key = key
    -
    -        return await self._unary_unary("/nitric.kv.v1.KeyValue/Delete", request, KeyValueDeleteResponse)
    -
    -
    -class KeyValueBase(ServiceBase):
    -    async def get(self, collection: str, key: str) -> "KeyValueGetResponse":
    -        raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED)
    -
    -    async def put(
    -        self, collection: str, key: str, value: "betterproto_lib_google_protobuf.Struct"
    -    ) -> "KeyValuePutResponse":
    -        raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED)
    -
    -    async def delete(self, collection: str, key: str) -> "KeyValueDeleteResponse":
    -        raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED)
    -
    -    async def __rpc_get(self, stream: grpclib.server.Stream) -> None:
    -        request = await stream.recv_message()
    -
    -        request_kwargs = {
    -            "collection": request.collection,
    -            "key": request.key,
    -        }
    -
    -        response = await self.get(**request_kwargs)
    -        await stream.send_message(response)
    -
    -    async def __rpc_put(self, stream: grpclib.server.Stream) -> None:
    -        request = await stream.recv_message()
    -
    -        request_kwargs = {
    -            "collection": request.collection,
    -            "key": request.key,
    -            "value": request.value,
    -        }
    -
    -        response = await self.put(**request_kwargs)
    -        await stream.send_message(response)
    -
    -    async def __rpc_delete(self, stream: grpclib.server.Stream) -> None:
    -        request = await stream.recv_message()
    -
    -        request_kwargs = {
    -            "collection": request.collection,
    -            "key": request.key,
    -        }
    -
    -        response = await self.delete(**request_kwargs)
    -        await stream.send_message(response)
    -
    -    def __mapping__(self) -> Dict[str, grpclib.const.Handler]:
    -        return {
    -            "/nitric.kv.v1.KeyValue/Get": grpclib.const.Handler(
    -                self.__rpc_get,
    -                grpclib.const.Cardinality.UNARY_UNARY,
    -                KeyValueGetRequest,
    -                KeyValueGetResponse,
    -            ),
    -            "/nitric.kv.v1.KeyValue/Put": grpclib.const.Handler(
    -                self.__rpc_put,
    -                grpclib.const.Cardinality.UNARY_UNARY,
    -                KeyValuePutRequest,
    -                KeyValuePutResponse,
    -            ),
    -            "/nitric.kv.v1.KeyValue/Delete": grpclib.const.Handler(
    -                self.__rpc_delete,
    -                grpclib.const.Cardinality.UNARY_UNARY,
    -                KeyValueDeleteRequest,
    -                KeyValueDeleteResponse,
    -            ),
    -        }
    -
    -
    -import betterproto.lib.google.protobuf as betterproto_lib_google_protobuf
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -

    Classes

    -
    -
    -class KeyValueBase -
    -
    -

    Base class for async gRPC servers.

    -
    - -Expand source code - -
    class KeyValueBase(ServiceBase):
    -    async def get(self, collection: str, key: str) -> "KeyValueGetResponse":
    -        raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED)
    -
    -    async def put(
    -        self, collection: str, key: str, value: "betterproto_lib_google_protobuf.Struct"
    -    ) -> "KeyValuePutResponse":
    -        raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED)
    -
    -    async def delete(self, collection: str, key: str) -> "KeyValueDeleteResponse":
    -        raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED)
    -
    -    async def __rpc_get(self, stream: grpclib.server.Stream) -> None:
    -        request = await stream.recv_message()
    -
    -        request_kwargs = {
    -            "collection": request.collection,
    -            "key": request.key,
    -        }
    -
    -        response = await self.get(**request_kwargs)
    -        await stream.send_message(response)
    -
    -    async def __rpc_put(self, stream: grpclib.server.Stream) -> None:
    -        request = await stream.recv_message()
    -
    -        request_kwargs = {
    -            "collection": request.collection,
    -            "key": request.key,
    -            "value": request.value,
    -        }
    -
    -        response = await self.put(**request_kwargs)
    -        await stream.send_message(response)
    -
    -    async def __rpc_delete(self, stream: grpclib.server.Stream) -> None:
    -        request = await stream.recv_message()
    -
    -        request_kwargs = {
    -            "collection": request.collection,
    -            "key": request.key,
    -        }
    -
    -        response = await self.delete(**request_kwargs)
    -        await stream.send_message(response)
    -
    -    def __mapping__(self) -> Dict[str, grpclib.const.Handler]:
    -        return {
    -            "/nitric.kv.v1.KeyValue/Get": grpclib.const.Handler(
    -                self.__rpc_get,
    -                grpclib.const.Cardinality.UNARY_UNARY,
    -                KeyValueGetRequest,
    -                KeyValueGetResponse,
    -            ),
    -            "/nitric.kv.v1.KeyValue/Put": grpclib.const.Handler(
    -                self.__rpc_put,
    -                grpclib.const.Cardinality.UNARY_UNARY,
    -                KeyValuePutRequest,
    -                KeyValuePutResponse,
    -            ),
    -            "/nitric.kv.v1.KeyValue/Delete": grpclib.const.Handler(
    -                self.__rpc_delete,
    -                grpclib.const.Cardinality.UNARY_UNARY,
    -                KeyValueDeleteRequest,
    -                KeyValueDeleteResponse,
    -            ),
    -        }
    -
    -

    Ancestors

    -
      -
    • betterproto.grpc.grpclib_server.ServiceBase
    • -
    • abc.ABC
    • -
    -

    Methods

    -
    -
    -async def delete(self, collection: str, key: str) ‑> KeyValueDeleteResponse -
    -
    -
    -
    - -Expand source code - -
    async def delete(self, collection: str, key: str) -> "KeyValueDeleteResponse":
    -    raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED)
    -
    -
    -
    -async def get(self, collection: str, key: str) ‑> KeyValueGetResponse -
    -
    -
    -
    - -Expand source code - -
    async def get(self, collection: str, key: str) -> "KeyValueGetResponse":
    -    raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED)
    -
    -
    -
    -async def put(self, collection: str, key: str, value: betterproto_lib_google_protobuf.Struct) ‑> KeyValuePutResponse -
    -
    -
    -
    - -Expand source code - -
    async def put(
    -    self, collection: str, key: str, value: "betterproto_lib_google_protobuf.Struct"
    -) -> "KeyValuePutResponse":
    -    raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED)
    -
    -
    -
    -
    -
    -class KeyValueDeleteRequest -(collection: str = <object object>, key: str = <object object>) -
    -
    -

    KeyValueDeleteRequest(collection: str = , key: str = )

    -
    - -Expand source code - -
    class KeyValueDeleteRequest(betterproto.Message):
    -    # The collection containing the existing keyValue to be deleted
    -    collection: str = betterproto.string_field(1)
    -    # The unique key of the keyValue to delete
    -    key: str = betterproto.string_field(2)
    -
    -

    Ancestors

    -
      -
    • betterproto.Message
    • -
    • abc.ABC
    • -
    -

    Class variables

    -
    -
    var collection : str
    -
    -
    -
    -
    var key : str
    -
    -
    -
    -
    - -
    -class KeyValueDeleteResponse -
    -
    -

    KeyValueDeleteResponse()

    -
    - -Expand source code - -
    class KeyValueDeleteResponse(betterproto.Message):
    -    pass
    -
    -

    Ancestors

    -
      -
    • betterproto.Message
    • -
    • abc.ABC
    • -
    -
    -
    -class KeyValueGetRequest -(collection: str = <object object>, key: str = <object object>) -
    -
    -

    KeyValueGetRequest(collection: str = , key: str = )

    -
    - -Expand source code - -
    class KeyValueGetRequest(betterproto.Message):
    -    # The collection to retrieve the keyValue from
    -    collection: str = betterproto.string_field(1)
    -    # The unique key of the keyValue to retrieve
    -    key: str = betterproto.string_field(2)
    -
    -

    Ancestors

    -
      -
    • betterproto.Message
    • -
    • abc.ABC
    • -
    -

    Class variables

    -
    -
    var collection : str
    -
    -
    -
    -
    var key : str
    -
    -
    -
    -
    - -
    -class KeyValueGetResponse -(value: betterproto_lib_google_protobuf.Struct = <object object>) -
    -
    -

    KeyValueGetResponse(value: 'betterproto_lib_google_protobuf.Struct' = )

    -
    - -Expand source code - -
    class KeyValueGetResponse(betterproto.Message):
    -    # The retrieved value
    -    value: "betterproto_lib_google_protobuf.Struct" = betterproto.message_field(1)
    -
    -

    Ancestors

    -
      -
    • betterproto.Message
    • -
    • abc.ABC
    • -
    -

    Class variables

    -
    -
    var value : betterproto.lib.google.protobuf.Struct
    -
    -
    -
    -
    - -
    -class KeyValuePutRequest -(collection: str = <object object>, key: str = <object object>, value: betterproto_lib_google_protobuf.Struct = <object object>) -
    -
    -

    KeyValuePutRequest(collection: str = , key: str = , value: 'betterproto_lib_google_protobuf.Struct' = )

    -
    - -Expand source code - -
    class KeyValuePutRequest(betterproto.Message):
    -    # The collection containing the existing keyValue to be inserted or updated.
    -    collection: str = betterproto.string_field(1)
    -    # The unique key of the keyValue to put
    -    key: str = betterproto.string_field(2)
    -    # A simple JSON object
    -    value: "betterproto_lib_google_protobuf.Struct" = betterproto.message_field(3)
    -
    -

    Ancestors

    -
      -
    • betterproto.Message
    • -
    • abc.ABC
    • -
    -

    Class variables

    -
    -
    var collection : str
    -
    -
    -
    -
    var key : str
    -
    -
    -
    -
    var value : betterproto.lib.google.protobuf.Struct
    -
    -
    -
    -
    - -
    -class KeyValuePutResponse -
    -
    -

    KeyValuePutResponse()

    -
    - -Expand source code - -
    class KeyValuePutResponse(betterproto.Message):
    -    pass
    -
    -

    Ancestors

    -
      -
    • betterproto.Message
    • -
    • abc.ABC
    • -
    -
    -
    -class KeyValueStub -(channel: Channel, *, timeout: Union[float, NoneType] = None, deadline: Union[ForwardRef('Deadline'), NoneType] = None, metadata: Union[Mapping[str, Union[str, bytes]], Collection[Tuple[str, Union[str, bytes]]], NoneType] = None) -
    -
    -

    Base class for async gRPC clients.

    -
    - -Expand source code - -
    class KeyValueStub(betterproto.ServiceStub):
    -    async def get(self, *, collection: str = "", key: str = "") -> "KeyValueGetResponse":
    -
    -        request = KeyValueGetRequest()
    -        request.collection = collection
    -        request.key = key
    -
    -        return await self._unary_unary("/nitric.kv.v1.KeyValue/Get", request, KeyValueGetResponse)
    -
    -    async def put(
    -        self,
    -        *,
    -        collection: str = "",
    -        key: str = "",
    -        value: "betterproto_lib_google_protobuf.Struct" = None,
    -    ) -> "KeyValuePutResponse":
    -
    -        request = KeyValuePutRequest()
    -        request.collection = collection
    -        request.key = key
    -        if value is not None:
    -            request.value = value
    -
    -        return await self._unary_unary("/nitric.kv.v1.KeyValue/Put", request, KeyValuePutResponse)
    -
    -    async def delete(self, *, collection: str = "", key: str = "") -> "KeyValueDeleteResponse":
    -
    -        request = KeyValueDeleteRequest()
    -        request.collection = collection
    -        request.key = key
    -
    -        return await self._unary_unary("/nitric.kv.v1.KeyValue/Delete", request, KeyValueDeleteResponse)
    -
    -

    Ancestors

    -
      -
    • betterproto.grpc.grpclib_client.ServiceStub
    • -
    • abc.ABC
    • -
    -

    Methods

    -
    -
    -async def delete(self, *, collection: str = '', key: str = '') ‑> KeyValueDeleteResponse -
    -
    -
    -
    - -Expand source code - -
    async def delete(self, *, collection: str = "", key: str = "") -> "KeyValueDeleteResponse":
    -
    -    request = KeyValueDeleteRequest()
    -    request.collection = collection
    -    request.key = key
    -
    -    return await self._unary_unary("/nitric.kv.v1.KeyValue/Delete", request, KeyValueDeleteResponse)
    -
    -
    -
    -async def get(self, *, collection: str = '', key: str = '') ‑> KeyValueGetResponse -
    -
    -
    -
    - -Expand source code - -
    async def get(self, *, collection: str = "", key: str = "") -> "KeyValueGetResponse":
    -
    -    request = KeyValueGetRequest()
    -    request.collection = collection
    -    request.key = key
    -
    -    return await self._unary_unary("/nitric.kv.v1.KeyValue/Get", request, KeyValueGetResponse)
    -
    -
    -
    -async def put(self, *, collection: str = '', key: str = '', value: betterproto_lib_google_protobuf.Struct = None) ‑> KeyValuePutResponse -
    -
    -
    -
    - -Expand source code - -
    async def put(
    -    self,
    -    *,
    -    collection: str = "",
    -    key: str = "",
    -    value: "betterproto_lib_google_protobuf.Struct" = None,
    -) -> "KeyValuePutResponse":
    -
    -    request = KeyValuePutRequest()
    -    request.collection = collection
    -    request.key = key
    -    if value is not None:
    -        request.value = value
    -
    -    return await self._unary_unary("/nitric.kv.v1.KeyValue/Put", request, KeyValuePutResponse)
    -
    -
    -
    -
    - - - - - - - - \ No newline at end of file diff --git a/docs/nitric/proto/nitric/queue/index.html b/docs/nitric/proto/nitric/queue/index.html deleted file mode 100644 index de96adb..0000000 --- a/docs/nitric/proto/nitric/queue/index.html +++ /dev/null @@ -1,88 +0,0 @@ - - - - - - -nitric.proto.nitric.queue API documentation - - - - - - - - - - - -
    -
    -
    -

    Module nitric.proto.nitric.queue

    -
    -
    -
    - -Expand source code - -
    #
    -# Copyright (c) 2021 Nitric Technologies Pty Ltd.
    -#
    -# This file is part of Nitric Python 3 SDK.
    -# See https://github.com/nitrictech/python-sdk for further info.
    -#
    -# Licensed under the Apache License, Version 2.0 (the "License");
    -# you may not use this file except in compliance with the License.
    -# You may obtain a copy of the License at
    -#
    -#     http://www.apache.org/licenses/LICENSE-2.0
    -#
    -# Unless required by applicable law or agreed to in writing, software
    -# distributed under the License is distributed on an "AS IS" BASIS,
    -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    -# See the License for the specific language governing permissions and
    -# limitations under the License.
    -#
    -
    -
    -
    -

    Sub-modules

    -
    -
    nitric.proto.nitric.queue.v1
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    - -
    - - - \ No newline at end of file diff --git a/docs/nitric/proto/nitric/queue/v1/index.html b/docs/nitric/proto/nitric/queue/v1/index.html deleted file mode 100644 index 00f1003..0000000 --- a/docs/nitric/proto/nitric/queue/v1/index.html +++ /dev/null @@ -1,992 +0,0 @@ - - - - - - -nitric.proto.nitric.queue.v1 API documentation - - - - - - - - - - - -
    -
    -
    -

    Module nitric.proto.nitric.queue.v1

    -
    -
    -
    - -Expand source code - -
    #
    -# Copyright (c) 2021 Nitric Technologies Pty Ltd.
    -#
    -# This file is part of Nitric Python 3 SDK.
    -# See https://github.com/nitrictech/python-sdk for further info.
    -#
    -# Licensed under the Apache License, Version 2.0 (the "License");
    -# you may not use this file except in compliance with the License.
    -# You may obtain a copy of the License at
    -#
    -#     http://www.apache.org/licenses/LICENSE-2.0
    -#
    -# Unless required by applicable law or agreed to in writing, software
    -# distributed under the License is distributed on an "AS IS" BASIS,
    -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    -# See the License for the specific language governing permissions and
    -# limitations under the License.
    -#
    -
    -# Generated by the protocol buffer compiler.  DO NOT EDIT!
    -# sources: queue/v1/queue.proto
    -# plugin: python-betterproto
    -from dataclasses import dataclass
    -from typing import Dict, List, Optional
    -
    -import betterproto
    -from betterproto.grpc.grpclib_server import ServiceBase
    -import grpclib
    -
    -
    -@dataclass(eq=False, repr=False)
    -class QueueSendRequest(betterproto.Message):
    -    """Request to push a single event to a queue"""
    -
    -    # The Nitric name for the queue this will automatically be resolved to the
    -    # provider specific queue identifier.
    -    queue: str = betterproto.string_field(1)
    -    # The task to push to the queue
    -    task: "NitricTask" = betterproto.message_field(2)
    -
    -
    -@dataclass(eq=False, repr=False)
    -class QueueSendResponse(betterproto.Message):
    -    """Result of pushing a single task to a queue"""
    -
    -    pass
    -
    -
    -@dataclass(eq=False, repr=False)
    -class QueueSendBatchRequest(betterproto.Message):
    -    # The Nitric name for the queue this will automatically be resolved to the
    -    # provider specific queue identifier.
    -    queue: str = betterproto.string_field(1)
    -    # Array of tasks to push to the queue
    -    tasks: List["NitricTask"] = betterproto.message_field(2)
    -
    -
    -@dataclass(eq=False, repr=False)
    -class QueueSendBatchResponse(betterproto.Message):
    -    """Response for sending a collection of tasks"""
    -
    -    # A list of tasks that failed to be queued
    -    failed_tasks: List["FailedTask"] = betterproto.message_field(1)
    -
    -
    -@dataclass(eq=False, repr=False)
    -class QueueReceiveRequest(betterproto.Message):
    -    # The nitric name for the queue this will automatically be resolved to the
    -    # provider specific queue identifier.
    -    queue: str = betterproto.string_field(1)
    -    # The max number of items to pop off the queue, may be capped by provider
    -    # specific limitations
    -    depth: int = betterproto.int32_field(2)
    -
    -
    -@dataclass(eq=False, repr=False)
    -class QueueReceiveResponse(betterproto.Message):
    -    # Array of tasks popped off the queue
    -    tasks: List["NitricTask"] = betterproto.message_field(1)
    -
    -
    -@dataclass(eq=False, repr=False)
    -class QueueCompleteRequest(betterproto.Message):
    -    # The nitric name for the queue  this will automatically be resolved to the
    -    # provider specific queue identifier.
    -    queue: str = betterproto.string_field(1)
    -    # Lease id of the task to be completed
    -    lease_id: str = betterproto.string_field(2)
    -
    -
    -@dataclass(eq=False, repr=False)
    -class QueueCompleteResponse(betterproto.Message):
    -    pass
    -
    -
    -@dataclass(eq=False, repr=False)
    -class FailedTask(betterproto.Message):
    -    # The task that failed to be pushed
    -    task: "NitricTask" = betterproto.message_field(1)
    -    # A message describing the failure
    -    message: str = betterproto.string_field(2)
    -
    -
    -@dataclass(eq=False, repr=False)
    -class NitricTask(betterproto.Message):
    -    """A task to be sent or received from a queue."""
    -
    -    # A unique id for the task
    -    id: str = betterproto.string_field(1)
    -    # The lease id unique to the pop request, this must be used to complete,
    -    # extend the lease or release the task.
    -    lease_id: str = betterproto.string_field(2)
    -    # A content hint for the tasks payload
    -    payload_type: str = betterproto.string_field(3)
    -    # The payload of the task
    -    payload: "betterproto_lib_google_protobuf.Struct" = betterproto.message_field(4)
    -
    -
    -class QueueStub(betterproto.ServiceStub):
    -    async def send(self, *, queue: str = "", task: "NitricTask" = None) -> "QueueSendResponse":
    -
    -        request = QueueSendRequest()
    -        request.queue = queue
    -        if task is not None:
    -            request.task = task
    -
    -        return await self._unary_unary("/nitric.queue.v1.Queue/Send", request, QueueSendResponse)
    -
    -    async def send_batch(
    -        self, *, queue: str = "", tasks: Optional[List["NitricTask"]] = None
    -    ) -> "QueueSendBatchResponse":
    -        tasks = tasks or []
    -
    -        request = QueueSendBatchRequest()
    -        request.queue = queue
    -        if tasks is not None:
    -            request.tasks = tasks
    -
    -        return await self._unary_unary("/nitric.queue.v1.Queue/SendBatch", request, QueueSendBatchResponse)
    -
    -    async def receive(self, *, queue: str = "", depth: int = 0) -> "QueueReceiveResponse":
    -
    -        request = QueueReceiveRequest()
    -        request.queue = queue
    -        request.depth = depth
    -
    -        return await self._unary_unary("/nitric.queue.v1.Queue/Receive", request, QueueReceiveResponse)
    -
    -    async def complete(self, *, queue: str = "", lease_id: str = "") -> "QueueCompleteResponse":
    -
    -        request = QueueCompleteRequest()
    -        request.queue = queue
    -        request.lease_id = lease_id
    -
    -        return await self._unary_unary("/nitric.queue.v1.Queue/Complete", request, QueueCompleteResponse)
    -
    -
    -class QueueBase(ServiceBase):
    -    async def send(self, queue: str, task: "NitricTask") -> "QueueSendResponse":
    -        raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED)
    -
    -    async def send_batch(self, queue: str, tasks: Optional[List["NitricTask"]]) -> "QueueSendBatchResponse":
    -        raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED)
    -
    -    async def receive(self, queue: str, depth: int) -> "QueueReceiveResponse":
    -        raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED)
    -
    -    async def complete(self, queue: str, lease_id: str) -> "QueueCompleteResponse":
    -        raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED)
    -
    -    async def __rpc_send(self, stream: grpclib.server.Stream) -> None:
    -        request = await stream.recv_message()
    -
    -        request_kwargs = {
    -            "queue": request.queue,
    -            "task": request.task,
    -        }
    -
    -        response = await self.send(**request_kwargs)
    -        await stream.send_message(response)
    -
    -    async def __rpc_send_batch(self, stream: grpclib.server.Stream) -> None:
    -        request = await stream.recv_message()
    -
    -        request_kwargs = {
    -            "queue": request.queue,
    -            "tasks": request.tasks,
    -        }
    -
    -        response = await self.send_batch(**request_kwargs)
    -        await stream.send_message(response)
    -
    -    async def __rpc_receive(self, stream: grpclib.server.Stream) -> None:
    -        request = await stream.recv_message()
    -
    -        request_kwargs = {
    -            "queue": request.queue,
    -            "depth": request.depth,
    -        }
    -
    -        response = await self.receive(**request_kwargs)
    -        await stream.send_message(response)
    -
    -    async def __rpc_complete(self, stream: grpclib.server.Stream) -> None:
    -        request = await stream.recv_message()
    -
    -        request_kwargs = {
    -            "queue": request.queue,
    -            "lease_id": request.lease_id,
    -        }
    -
    -        response = await self.complete(**request_kwargs)
    -        await stream.send_message(response)
    -
    -    def __mapping__(self) -> Dict[str, grpclib.const.Handler]:
    -        return {
    -            "/nitric.queue.v1.Queue/Send": grpclib.const.Handler(
    -                self.__rpc_send,
    -                grpclib.const.Cardinality.UNARY_UNARY,
    -                QueueSendRequest,
    -                QueueSendResponse,
    -            ),
    -            "/nitric.queue.v1.Queue/SendBatch": grpclib.const.Handler(
    -                self.__rpc_send_batch,
    -                grpclib.const.Cardinality.UNARY_UNARY,
    -                QueueSendBatchRequest,
    -                QueueSendBatchResponse,
    -            ),
    -            "/nitric.queue.v1.Queue/Receive": grpclib.const.Handler(
    -                self.__rpc_receive,
    -                grpclib.const.Cardinality.UNARY_UNARY,
    -                QueueReceiveRequest,
    -                QueueReceiveResponse,
    -            ),
    -            "/nitric.queue.v1.Queue/Complete": grpclib.const.Handler(
    -                self.__rpc_complete,
    -                grpclib.const.Cardinality.UNARY_UNARY,
    -                QueueCompleteRequest,
    -                QueueCompleteResponse,
    -            ),
    -        }
    -
    -
    -import betterproto.lib.google.protobuf as betterproto_lib_google_protobuf
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -

    Classes

    -
    -
    -class FailedTask -(task: NitricTask = <object object>, message: str = <object object>) -
    -
    -

    FailedTask(task: 'NitricTask' = , message: str = )

    -
    - -Expand source code - -
    class FailedTask(betterproto.Message):
    -    # The task that failed to be pushed
    -    task: "NitricTask" = betterproto.message_field(1)
    -    # A message describing the failure
    -    message: str = betterproto.string_field(2)
    -
    -

    Ancestors

    -
      -
    • betterproto.Message
    • -
    • abc.ABC
    • -
    -

    Class variables

    -
    -
    var message : str
    -
    -
    -
    -
    var taskNitricTask
    -
    -
    -
    -
    - -
    -class NitricTask -(id: str = <object object>, lease_id: str = <object object>, payload_type: str = <object object>, payload: betterproto_lib_google_protobuf.Struct = <object object>) -
    -
    -

    A task to be sent or received from a queue.

    -
    - -Expand source code - -
    class NitricTask(betterproto.Message):
    -    """A task to be sent or received from a queue."""
    -
    -    # A unique id for the task
    -    id: str = betterproto.string_field(1)
    -    # The lease id unique to the pop request, this must be used to complete,
    -    # extend the lease or release the task.
    -    lease_id: str = betterproto.string_field(2)
    -    # A content hint for the tasks payload
    -    payload_type: str = betterproto.string_field(3)
    -    # The payload of the task
    -    payload: "betterproto_lib_google_protobuf.Struct" = betterproto.message_field(4)
    -
    -

    Ancestors

    -
      -
    • betterproto.Message
    • -
    • abc.ABC
    • -
    -

    Class variables

    -
    -
    var id : str
    -
    -
    -
    -
    var lease_id : str
    -
    -
    -
    -
    var payload : betterproto.lib.google.protobuf.Struct
    -
    -
    -
    -
    var payload_type : str
    -
    -
    -
    -
    -
    -
    -class QueueBase -
    -
    -

    Base class for async gRPC servers.

    -
    - -Expand source code - -
    class QueueBase(ServiceBase):
    -    async def send(self, queue: str, task: "NitricTask") -> "QueueSendResponse":
    -        raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED)
    -
    -    async def send_batch(self, queue: str, tasks: Optional[List["NitricTask"]]) -> "QueueSendBatchResponse":
    -        raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED)
    -
    -    async def receive(self, queue: str, depth: int) -> "QueueReceiveResponse":
    -        raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED)
    -
    -    async def complete(self, queue: str, lease_id: str) -> "QueueCompleteResponse":
    -        raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED)
    -
    -    async def __rpc_send(self, stream: grpclib.server.Stream) -> None:
    -        request = await stream.recv_message()
    -
    -        request_kwargs = {
    -            "queue": request.queue,
    -            "task": request.task,
    -        }
    -
    -        response = await self.send(**request_kwargs)
    -        await stream.send_message(response)
    -
    -    async def __rpc_send_batch(self, stream: grpclib.server.Stream) -> None:
    -        request = await stream.recv_message()
    -
    -        request_kwargs = {
    -            "queue": request.queue,
    -            "tasks": request.tasks,
    -        }
    -
    -        response = await self.send_batch(**request_kwargs)
    -        await stream.send_message(response)
    -
    -    async def __rpc_receive(self, stream: grpclib.server.Stream) -> None:
    -        request = await stream.recv_message()
    -
    -        request_kwargs = {
    -            "queue": request.queue,
    -            "depth": request.depth,
    -        }
    -
    -        response = await self.receive(**request_kwargs)
    -        await stream.send_message(response)
    -
    -    async def __rpc_complete(self, stream: grpclib.server.Stream) -> None:
    -        request = await stream.recv_message()
    -
    -        request_kwargs = {
    -            "queue": request.queue,
    -            "lease_id": request.lease_id,
    -        }
    -
    -        response = await self.complete(**request_kwargs)
    -        await stream.send_message(response)
    -
    -    def __mapping__(self) -> Dict[str, grpclib.const.Handler]:
    -        return {
    -            "/nitric.queue.v1.Queue/Send": grpclib.const.Handler(
    -                self.__rpc_send,
    -                grpclib.const.Cardinality.UNARY_UNARY,
    -                QueueSendRequest,
    -                QueueSendResponse,
    -            ),
    -            "/nitric.queue.v1.Queue/SendBatch": grpclib.const.Handler(
    -                self.__rpc_send_batch,
    -                grpclib.const.Cardinality.UNARY_UNARY,
    -                QueueSendBatchRequest,
    -                QueueSendBatchResponse,
    -            ),
    -            "/nitric.queue.v1.Queue/Receive": grpclib.const.Handler(
    -                self.__rpc_receive,
    -                grpclib.const.Cardinality.UNARY_UNARY,
    -                QueueReceiveRequest,
    -                QueueReceiveResponse,
    -            ),
    -            "/nitric.queue.v1.Queue/Complete": grpclib.const.Handler(
    -                self.__rpc_complete,
    -                grpclib.const.Cardinality.UNARY_UNARY,
    -                QueueCompleteRequest,
    -                QueueCompleteResponse,
    -            ),
    -        }
    -
    -

    Ancestors

    -
      -
    • betterproto.grpc.grpclib_server.ServiceBase
    • -
    • abc.ABC
    • -
    -

    Methods

    -
    -
    -async def complete(self, queue: str, lease_id: str) ‑> QueueCompleteResponse -
    -
    -
    -
    - -Expand source code - -
    async def complete(self, queue: str, lease_id: str) -> "QueueCompleteResponse":
    -    raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED)
    -
    -
    -
    -async def receive(self, queue: str, depth: int) ‑> QueueReceiveResponse -
    -
    -
    -
    - -Expand source code - -
    async def receive(self, queue: str, depth: int) -> "QueueReceiveResponse":
    -    raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED)
    -
    -
    -
    -async def send(self, queue: str, task: NitricTask) ‑> QueueSendResponse -
    -
    -
    -
    - -Expand source code - -
    async def send(self, queue: str, task: "NitricTask") -> "QueueSendResponse":
    -    raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED)
    -
    -
    -
    -async def send_batch(self, queue: str, tasks: Union[List[ForwardRef('NitricTask')], NoneType]) ‑> QueueSendBatchResponse -
    -
    -
    -
    - -Expand source code - -
    async def send_batch(self, queue: str, tasks: Optional[List["NitricTask"]]) -> "QueueSendBatchResponse":
    -    raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED)
    -
    -
    -
    -
    -
    -class QueueCompleteRequest -(queue: str = <object object>, lease_id: str = <object object>) -
    -
    -

    QueueCompleteRequest(queue: str = , lease_id: str = )

    -
    - -Expand source code - -
    class QueueCompleteRequest(betterproto.Message):
    -    # The nitric name for the queue  this will automatically be resolved to the
    -    # provider specific queue identifier.
    -    queue: str = betterproto.string_field(1)
    -    # Lease id of the task to be completed
    -    lease_id: str = betterproto.string_field(2)
    -
    -

    Ancestors

    -
      -
    • betterproto.Message
    • -
    • abc.ABC
    • -
    -

    Class variables

    -
    -
    var lease_id : str
    -
    -
    -
    -
    var queue : str
    -
    -
    -
    -
    - -
    -class QueueCompleteResponse -
    -
    -

    QueueCompleteResponse()

    -
    - -Expand source code - -
    class QueueCompleteResponse(betterproto.Message):
    -    pass
    -
    -

    Ancestors

    -
      -
    • betterproto.Message
    • -
    • abc.ABC
    • -
    -
    -
    -class QueueReceiveRequest -(queue: str = <object object>, depth: int = <object object>) -
    -
    -

    QueueReceiveRequest(queue: str = , depth: int = )

    -
    - -Expand source code - -
    class QueueReceiveRequest(betterproto.Message):
    -    # The nitric name for the queue this will automatically be resolved to the
    -    # provider specific queue identifier.
    -    queue: str = betterproto.string_field(1)
    -    # The max number of items to pop off the queue, may be capped by provider
    -    # specific limitations
    -    depth: int = betterproto.int32_field(2)
    -
    -

    Ancestors

    -
      -
    • betterproto.Message
    • -
    • abc.ABC
    • -
    -

    Class variables

    -
    -
    var depth : int
    -
    -
    -
    -
    var queue : str
    -
    -
    -
    -
    - -
    -class QueueReceiveResponse -(tasks: List[ForwardRef('NitricTask')] = <object object>) -
    -
    -

    QueueReceiveResponse(tasks: List[ForwardRef('NitricTask')] = )

    -
    - -Expand source code - -
    class QueueReceiveResponse(betterproto.Message):
    -    # Array of tasks popped off the queue
    -    tasks: List["NitricTask"] = betterproto.message_field(1)
    -
    -

    Ancestors

    -
      -
    • betterproto.Message
    • -
    • abc.ABC
    • -
    -

    Class variables

    -
    -
    var tasks : List[NitricTask]
    -
    -
    -
    -
    - -
    -class QueueSendBatchRequest -(queue: str = <object object>, tasks: List[ForwardRef('NitricTask')] = <object object>) -
    -
    -

    QueueSendBatchRequest(queue: str = , tasks: List[ForwardRef('NitricTask')] = )

    -
    - -Expand source code - -
    class QueueSendBatchRequest(betterproto.Message):
    -    # The Nitric name for the queue this will automatically be resolved to the
    -    # provider specific queue identifier.
    -    queue: str = betterproto.string_field(1)
    -    # Array of tasks to push to the queue
    -    tasks: List["NitricTask"] = betterproto.message_field(2)
    -
    -

    Ancestors

    -
      -
    • betterproto.Message
    • -
    • abc.ABC
    • -
    -

    Class variables

    -
    -
    var queue : str
    -
    -
    -
    -
    var tasks : List[NitricTask]
    -
    -
    -
    -
    - -
    -class QueueSendBatchResponse -(failed_tasks: List[ForwardRef('FailedTask')] = <object object>) -
    -
    -

    Response for sending a collection of tasks

    -
    - -Expand source code - -
    class QueueSendBatchResponse(betterproto.Message):
    -    """Response for sending a collection of tasks"""
    -
    -    # A list of tasks that failed to be queued
    -    failed_tasks: List["FailedTask"] = betterproto.message_field(1)
    -
    -

    Ancestors

    -
      -
    • betterproto.Message
    • -
    • abc.ABC
    • -
    -

    Class variables

    -
    -
    var failed_tasks : List[FailedTask]
    -
    -
    -
    -
    -
    -
    -class QueueSendRequest -(queue: str = <object object>, task: NitricTask = <object object>) -
    -
    -

    Request to push a single event to a queue

    -
    - -Expand source code - -
    class QueueSendRequest(betterproto.Message):
    -    """Request to push a single event to a queue"""
    -
    -    # The Nitric name for the queue this will automatically be resolved to the
    -    # provider specific queue identifier.
    -    queue: str = betterproto.string_field(1)
    -    # The task to push to the queue
    -    task: "NitricTask" = betterproto.message_field(2)
    -
    -

    Ancestors

    -
      -
    • betterproto.Message
    • -
    • abc.ABC
    • -
    -

    Class variables

    -
    -
    var queue : str
    -
    -
    -
    -
    var taskNitricTask
    -
    -
    -
    -
    -
    -
    -class QueueSendResponse -
    -
    -

    Result of pushing a single task to a queue

    -
    - -Expand source code - -
    class QueueSendResponse(betterproto.Message):
    -    """Result of pushing a single task to a queue"""
    -
    -    pass
    -
    -

    Ancestors

    -
      -
    • betterproto.Message
    • -
    • abc.ABC
    • -
    -
    -
    -class QueueStub -(channel: Channel, *, timeout: Union[float, NoneType] = None, deadline: Union[ForwardRef('Deadline'), NoneType] = None, metadata: Union[Mapping[str, Union[str, bytes]], Collection[Tuple[str, Union[str, bytes]]], NoneType] = None) -
    -
    -

    Base class for async gRPC clients.

    -
    - -Expand source code - -
    class QueueStub(betterproto.ServiceStub):
    -    async def send(self, *, queue: str = "", task: "NitricTask" = None) -> "QueueSendResponse":
    -
    -        request = QueueSendRequest()
    -        request.queue = queue
    -        if task is not None:
    -            request.task = task
    -
    -        return await self._unary_unary("/nitric.queue.v1.Queue/Send", request, QueueSendResponse)
    -
    -    async def send_batch(
    -        self, *, queue: str = "", tasks: Optional[List["NitricTask"]] = None
    -    ) -> "QueueSendBatchResponse":
    -        tasks = tasks or []
    -
    -        request = QueueSendBatchRequest()
    -        request.queue = queue
    -        if tasks is not None:
    -            request.tasks = tasks
    -
    -        return await self._unary_unary("/nitric.queue.v1.Queue/SendBatch", request, QueueSendBatchResponse)
    -
    -    async def receive(self, *, queue: str = "", depth: int = 0) -> "QueueReceiveResponse":
    -
    -        request = QueueReceiveRequest()
    -        request.queue = queue
    -        request.depth = depth
    -
    -        return await self._unary_unary("/nitric.queue.v1.Queue/Receive", request, QueueReceiveResponse)
    -
    -    async def complete(self, *, queue: str = "", lease_id: str = "") -> "QueueCompleteResponse":
    -
    -        request = QueueCompleteRequest()
    -        request.queue = queue
    -        request.lease_id = lease_id
    -
    -        return await self._unary_unary("/nitric.queue.v1.Queue/Complete", request, QueueCompleteResponse)
    -
    -

    Ancestors

    -
      -
    • betterproto.grpc.grpclib_client.ServiceStub
    • -
    • abc.ABC
    • -
    -

    Methods

    -
    -
    -async def complete(self, *, queue: str = '', lease_id: str = '') ‑> QueueCompleteResponse -
    -
    -
    -
    - -Expand source code - -
    async def complete(self, *, queue: str = "", lease_id: str = "") -> "QueueCompleteResponse":
    -
    -    request = QueueCompleteRequest()
    -    request.queue = queue
    -    request.lease_id = lease_id
    -
    -    return await self._unary_unary("/nitric.queue.v1.Queue/Complete", request, QueueCompleteResponse)
    -
    -
    -
    -async def receive(self, *, queue: str = '', depth: int = 0) ‑> QueueReceiveResponse -
    -
    -
    -
    - -Expand source code - -
    async def receive(self, *, queue: str = "", depth: int = 0) -> "QueueReceiveResponse":
    -
    -    request = QueueReceiveRequest()
    -    request.queue = queue
    -    request.depth = depth
    -
    -    return await self._unary_unary("/nitric.queue.v1.Queue/Receive", request, QueueReceiveResponse)
    -
    -
    -
    -async def send(self, *, queue: str = '', task: NitricTask = None) ‑> QueueSendResponse -
    -
    -
    -
    - -Expand source code - -
    async def send(self, *, queue: str = "", task: "NitricTask" = None) -> "QueueSendResponse":
    -
    -    request = QueueSendRequest()
    -    request.queue = queue
    -    if task is not None:
    -        request.task = task
    -
    -    return await self._unary_unary("/nitric.queue.v1.Queue/Send", request, QueueSendResponse)
    -
    -
    -
    -async def send_batch(self, *, queue: str = '', tasks: Union[List[ForwardRef('NitricTask')], NoneType] = None) ‑> QueueSendBatchResponse -
    -
    -
    -
    - -Expand source code - -
    async def send_batch(
    -    self, *, queue: str = "", tasks: Optional[List["NitricTask"]] = None
    -) -> "QueueSendBatchResponse":
    -    tasks = tasks or []
    -
    -    request = QueueSendBatchRequest()
    -    request.queue = queue
    -    if tasks is not None:
    -        request.tasks = tasks
    -
    -    return await self._unary_unary("/nitric.queue.v1.Queue/SendBatch", request, QueueSendBatchResponse)
    -
    -
    -
    -
    - - - - - - - - \ No newline at end of file diff --git a/docs/nitric/proto/nitric/storage/index.html b/docs/nitric/proto/nitric/storage/index.html deleted file mode 100644 index d936a53..0000000 --- a/docs/nitric/proto/nitric/storage/index.html +++ /dev/null @@ -1,88 +0,0 @@ - - - - - - -nitric.proto.nitric.storage API documentation - - - - - - - - - - - -
    -
    -
    -

    Module nitric.proto.nitric.storage

    -
    -
    -
    - -Expand source code - -
    #
    -# Copyright (c) 2021 Nitric Technologies Pty Ltd.
    -#
    -# This file is part of Nitric Python 3 SDK.
    -# See https://github.com/nitrictech/python-sdk for further info.
    -#
    -# Licensed under the Apache License, Version 2.0 (the "License");
    -# you may not use this file except in compliance with the License.
    -# You may obtain a copy of the License at
    -#
    -#     http://www.apache.org/licenses/LICENSE-2.0
    -#
    -# Unless required by applicable law or agreed to in writing, software
    -# distributed under the License is distributed on an "AS IS" BASIS,
    -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    -# See the License for the specific language governing permissions and
    -# limitations under the License.
    -#
    -
    -
    -
    -

    Sub-modules

    -
    -
    nitric.proto.nitric.storage.v1
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    - -
    - - - \ No newline at end of file diff --git a/docs/nitric/proto/nitric/storage/v1/index.html b/docs/nitric/proto/nitric/storage/v1/index.html deleted file mode 100644 index c5c6abd..0000000 --- a/docs/nitric/proto/nitric/storage/v1/index.html +++ /dev/null @@ -1,698 +0,0 @@ - - - - - - -nitric.proto.nitric.storage.v1 API documentation - - - - - - - - - - - -
    -
    -
    -

    Module nitric.proto.nitric.storage.v1

    -
    -
    -
    - -Expand source code - -
    #
    -# Copyright (c) 2021 Nitric Technologies Pty Ltd.
    -#
    -# This file is part of Nitric Python 3 SDK.
    -# See https://github.com/nitrictech/python-sdk for further info.
    -#
    -# Licensed under the Apache License, Version 2.0 (the "License");
    -# you may not use this file except in compliance with the License.
    -# You may obtain a copy of the License at
    -#
    -#     http://www.apache.org/licenses/LICENSE-2.0
    -#
    -# Unless required by applicable law or agreed to in writing, software
    -# distributed under the License is distributed on an "AS IS" BASIS,
    -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    -# See the License for the specific language governing permissions and
    -# limitations under the License.
    -#
    -
    -# Generated by the protocol buffer compiler.  DO NOT EDIT!
    -# sources: storage/v1/storage.proto
    -# plugin: python-betterproto
    -from dataclasses import dataclass
    -from typing import Dict
    -
    -import betterproto
    -from betterproto.grpc.grpclib_server import ServiceBase
    -import grpclib
    -
    -
    -@dataclass(eq=False, repr=False)
    -class StorageWriteRequest(betterproto.Message):
    -    """Request to put (create/update) a storage item"""
    -
    -    # Nitric name of the bucket to store in  this will be automatically resolved
    -    # to the provider specific bucket identifier.
    -    bucket_name: str = betterproto.string_field(1)
    -    # Key to store the item under
    -    key: str = betterproto.string_field(2)
    -    # bytes array to store
    -    body: bytes = betterproto.bytes_field(3)
    -
    -
    -@dataclass(eq=False, repr=False)
    -class StorageWriteResponse(betterproto.Message):
    -    """Result of putting a storage item"""
    -
    -    pass
    -
    -
    -@dataclass(eq=False, repr=False)
    -class StorageReadRequest(betterproto.Message):
    -    """Request to retrieve a storage item"""
    -
    -    # Nitric name of the bucket to retrieve from  this will be automatically
    -    # resolved to the provider specific bucket identifier.
    -    bucket_name: str = betterproto.string_field(1)
    -    # Key of item to retrieve
    -    key: str = betterproto.string_field(2)
    -
    -
    -@dataclass(eq=False, repr=False)
    -class StorageReadResponse(betterproto.Message):
    -    """Returned storage item"""
    -
    -    # The body bytes of the retrieved storage item
    -    body: bytes = betterproto.bytes_field(1)
    -
    -
    -@dataclass(eq=False, repr=False)
    -class StorageDeleteRequest(betterproto.Message):
    -    """Request to delete a storage item"""
    -
    -    # Name of the bucket to delete from
    -    bucket_name: str = betterproto.string_field(1)
    -    # Key of item to delete
    -    key: str = betterproto.string_field(2)
    -
    -
    -@dataclass(eq=False, repr=False)
    -class StorageDeleteResponse(betterproto.Message):
    -    """Result of deleting a storage item"""
    -
    -    pass
    -
    -
    -class StorageStub(betterproto.ServiceStub):
    -    async def read(self, *, bucket_name: str = "", key: str = "") -> "StorageReadResponse":
    -
    -        request = StorageReadRequest()
    -        request.bucket_name = bucket_name
    -        request.key = key
    -
    -        return await self._unary_unary("/nitric.storage.v1.Storage/Read", request, StorageReadResponse)
    -
    -    async def write(self, *, bucket_name: str = "", key: str = "", body: bytes = b"") -> "StorageWriteResponse":
    -
    -        request = StorageWriteRequest()
    -        request.bucket_name = bucket_name
    -        request.key = key
    -        request.body = body
    -
    -        return await self._unary_unary("/nitric.storage.v1.Storage/Write", request, StorageWriteResponse)
    -
    -    async def delete(self, *, bucket_name: str = "", key: str = "") -> "StorageDeleteResponse":
    -
    -        request = StorageDeleteRequest()
    -        request.bucket_name = bucket_name
    -        request.key = key
    -
    -        return await self._unary_unary("/nitric.storage.v1.Storage/Delete", request, StorageDeleteResponse)
    -
    -
    -class StorageBase(ServiceBase):
    -    async def read(self, bucket_name: str, key: str) -> "StorageReadResponse":
    -        raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED)
    -
    -    async def write(self, bucket_name: str, key: str, body: bytes) -> "StorageWriteResponse":
    -        raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED)
    -
    -    async def delete(self, bucket_name: str, key: str) -> "StorageDeleteResponse":
    -        raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED)
    -
    -    async def __rpc_read(self, stream: grpclib.server.Stream) -> None:
    -        request = await stream.recv_message()
    -
    -        request_kwargs = {
    -            "bucket_name": request.bucket_name,
    -            "key": request.key,
    -        }
    -
    -        response = await self.read(**request_kwargs)
    -        await stream.send_message(response)
    -
    -    async def __rpc_write(self, stream: grpclib.server.Stream) -> None:
    -        request = await stream.recv_message()
    -
    -        request_kwargs = {
    -            "bucket_name": request.bucket_name,
    -            "key": request.key,
    -            "body": request.body,
    -        }
    -
    -        response = await self.write(**request_kwargs)
    -        await stream.send_message(response)
    -
    -    async def __rpc_delete(self, stream: grpclib.server.Stream) -> None:
    -        request = await stream.recv_message()
    -
    -        request_kwargs = {
    -            "bucket_name": request.bucket_name,
    -            "key": request.key,
    -        }
    -
    -        response = await self.delete(**request_kwargs)
    -        await stream.send_message(response)
    -
    -    def __mapping__(self) -> Dict[str, grpclib.const.Handler]:
    -        return {
    -            "/nitric.storage.v1.Storage/Read": grpclib.const.Handler(
    -                self.__rpc_read,
    -                grpclib.const.Cardinality.UNARY_UNARY,
    -                StorageReadRequest,
    -                StorageReadResponse,
    -            ),
    -            "/nitric.storage.v1.Storage/Write": grpclib.const.Handler(
    -                self.__rpc_write,
    -                grpclib.const.Cardinality.UNARY_UNARY,
    -                StorageWriteRequest,
    -                StorageWriteResponse,
    -            ),
    -            "/nitric.storage.v1.Storage/Delete": grpclib.const.Handler(
    -                self.__rpc_delete,
    -                grpclib.const.Cardinality.UNARY_UNARY,
    -                StorageDeleteRequest,
    -                StorageDeleteResponse,
    -            ),
    -        }
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -

    Classes

    -
    -
    -class StorageBase -
    -
    -

    Base class for async gRPC servers.

    -
    - -Expand source code - -
    class StorageBase(ServiceBase):
    -    async def read(self, bucket_name: str, key: str) -> "StorageReadResponse":
    -        raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED)
    -
    -    async def write(self, bucket_name: str, key: str, body: bytes) -> "StorageWriteResponse":
    -        raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED)
    -
    -    async def delete(self, bucket_name: str, key: str) -> "StorageDeleteResponse":
    -        raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED)
    -
    -    async def __rpc_read(self, stream: grpclib.server.Stream) -> None:
    -        request = await stream.recv_message()
    -
    -        request_kwargs = {
    -            "bucket_name": request.bucket_name,
    -            "key": request.key,
    -        }
    -
    -        response = await self.read(**request_kwargs)
    -        await stream.send_message(response)
    -
    -    async def __rpc_write(self, stream: grpclib.server.Stream) -> None:
    -        request = await stream.recv_message()
    -
    -        request_kwargs = {
    -            "bucket_name": request.bucket_name,
    -            "key": request.key,
    -            "body": request.body,
    -        }
    -
    -        response = await self.write(**request_kwargs)
    -        await stream.send_message(response)
    -
    -    async def __rpc_delete(self, stream: grpclib.server.Stream) -> None:
    -        request = await stream.recv_message()
    -
    -        request_kwargs = {
    -            "bucket_name": request.bucket_name,
    -            "key": request.key,
    -        }
    -
    -        response = await self.delete(**request_kwargs)
    -        await stream.send_message(response)
    -
    -    def __mapping__(self) -> Dict[str, grpclib.const.Handler]:
    -        return {
    -            "/nitric.storage.v1.Storage/Read": grpclib.const.Handler(
    -                self.__rpc_read,
    -                grpclib.const.Cardinality.UNARY_UNARY,
    -                StorageReadRequest,
    -                StorageReadResponse,
    -            ),
    -            "/nitric.storage.v1.Storage/Write": grpclib.const.Handler(
    -                self.__rpc_write,
    -                grpclib.const.Cardinality.UNARY_UNARY,
    -                StorageWriteRequest,
    -                StorageWriteResponse,
    -            ),
    -            "/nitric.storage.v1.Storage/Delete": grpclib.const.Handler(
    -                self.__rpc_delete,
    -                grpclib.const.Cardinality.UNARY_UNARY,
    -                StorageDeleteRequest,
    -                StorageDeleteResponse,
    -            ),
    -        }
    -
    -

    Ancestors

    -
      -
    • betterproto.grpc.grpclib_server.ServiceBase
    • -
    • abc.ABC
    • -
    -

    Methods

    -
    -
    -async def delete(self, bucket_name: str, key: str) ‑> StorageDeleteResponse -
    -
    -
    -
    - -Expand source code - -
    async def delete(self, bucket_name: str, key: str) -> "StorageDeleteResponse":
    -    raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED)
    -
    -
    -
    -async def read(self, bucket_name: str, key: str) ‑> StorageReadResponse -
    -
    -
    -
    - -Expand source code - -
    async def read(self, bucket_name: str, key: str) -> "StorageReadResponse":
    -    raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED)
    -
    -
    -
    -async def write(self, bucket_name: str, key: str, body: bytes) ‑> StorageWriteResponse -
    -
    -
    -
    - -Expand source code - -
    async def write(self, bucket_name: str, key: str, body: bytes) -> "StorageWriteResponse":
    -    raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED)
    -
    -
    -
    -
    -
    -class StorageDeleteRequest -(bucket_name: str = <object object>, key: str = <object object>) -
    -
    -

    Request to delete a storage item

    -
    - -Expand source code - -
    class StorageDeleteRequest(betterproto.Message):
    -    """Request to delete a storage item"""
    -
    -    # Name of the bucket to delete from
    -    bucket_name: str = betterproto.string_field(1)
    -    # Key of item to delete
    -    key: str = betterproto.string_field(2)
    -
    -

    Ancestors

    -
      -
    • betterproto.Message
    • -
    • abc.ABC
    • -
    -

    Class variables

    -
    -
    var bucket_name : str
    -
    -
    -
    -
    var key : str
    -
    -
    -
    -
    -
    -
    -class StorageDeleteResponse -
    -
    -

    Result of deleting a storage item

    -
    - -Expand source code - -
    class StorageDeleteResponse(betterproto.Message):
    -    """Result of deleting a storage item"""
    -
    -    pass
    -
    -

    Ancestors

    -
      -
    • betterproto.Message
    • -
    • abc.ABC
    • -
    -
    -
    -class StorageReadRequest -(bucket_name: str = <object object>, key: str = <object object>) -
    -
    -

    Request to retrieve a storage item

    -
    - -Expand source code - -
    class StorageReadRequest(betterproto.Message):
    -    """Request to retrieve a storage item"""
    -
    -    # Nitric name of the bucket to retrieve from  this will be automatically
    -    # resolved to the provider specific bucket identifier.
    -    bucket_name: str = betterproto.string_field(1)
    -    # Key of item to retrieve
    -    key: str = betterproto.string_field(2)
    -
    -

    Ancestors

    -
      -
    • betterproto.Message
    • -
    • abc.ABC
    • -
    -

    Class variables

    -
    -
    var bucket_name : str
    -
    -
    -
    -
    var key : str
    -
    -
    -
    -
    -
    -
    -class StorageReadResponse -(body: bytes = <object object>) -
    -
    -

    Returned storage item

    -
    - -Expand source code - -
    class StorageReadResponse(betterproto.Message):
    -    """Returned storage item"""
    -
    -    # The body bytes of the retrieved storage item
    -    body: bytes = betterproto.bytes_field(1)
    -
    -

    Ancestors

    -
      -
    • betterproto.Message
    • -
    • abc.ABC
    • -
    -

    Class variables

    -
    -
    var body : bytes
    -
    -
    -
    -
    -
    -
    -class StorageStub -(channel: Channel, *, timeout: Union[float, NoneType] = None, deadline: Union[ForwardRef('Deadline'), NoneType] = None, metadata: Union[Mapping[str, Union[str, bytes]], Collection[Tuple[str, Union[str, bytes]]], NoneType] = None) -
    -
    -

    Base class for async gRPC clients.

    -
    - -Expand source code - -
    class StorageStub(betterproto.ServiceStub):
    -    async def read(self, *, bucket_name: str = "", key: str = "") -> "StorageReadResponse":
    -
    -        request = StorageReadRequest()
    -        request.bucket_name = bucket_name
    -        request.key = key
    -
    -        return await self._unary_unary("/nitric.storage.v1.Storage/Read", request, StorageReadResponse)
    -
    -    async def write(self, *, bucket_name: str = "", key: str = "", body: bytes = b"") -> "StorageWriteResponse":
    -
    -        request = StorageWriteRequest()
    -        request.bucket_name = bucket_name
    -        request.key = key
    -        request.body = body
    -
    -        return await self._unary_unary("/nitric.storage.v1.Storage/Write", request, StorageWriteResponse)
    -
    -    async def delete(self, *, bucket_name: str = "", key: str = "") -> "StorageDeleteResponse":
    -
    -        request = StorageDeleteRequest()
    -        request.bucket_name = bucket_name
    -        request.key = key
    -
    -        return await self._unary_unary("/nitric.storage.v1.Storage/Delete", request, StorageDeleteResponse)
    -
    -

    Ancestors

    -
      -
    • betterproto.grpc.grpclib_client.ServiceStub
    • -
    • abc.ABC
    • -
    -

    Methods

    -
    -
    -async def delete(self, *, bucket_name: str = '', key: str = '') ‑> StorageDeleteResponse -
    -
    -
    -
    - -Expand source code - -
    async def delete(self, *, bucket_name: str = "", key: str = "") -> "StorageDeleteResponse":
    -
    -    request = StorageDeleteRequest()
    -    request.bucket_name = bucket_name
    -    request.key = key
    -
    -    return await self._unary_unary("/nitric.storage.v1.Storage/Delete", request, StorageDeleteResponse)
    -
    -
    -
    -async def read(self, *, bucket_name: str = '', key: str = '') ‑> StorageReadResponse -
    -
    -
    -
    - -Expand source code - -
    async def read(self, *, bucket_name: str = "", key: str = "") -> "StorageReadResponse":
    -
    -    request = StorageReadRequest()
    -    request.bucket_name = bucket_name
    -    request.key = key
    -
    -    return await self._unary_unary("/nitric.storage.v1.Storage/Read", request, StorageReadResponse)
    -
    -
    -
    -async def write(self, *, bucket_name: str = '', key: str = '', body: bytes = b'') ‑> StorageWriteResponse -
    -
    -
    -
    - -Expand source code - -
    async def write(self, *, bucket_name: str = "", key: str = "", body: bytes = b"") -> "StorageWriteResponse":
    -
    -    request = StorageWriteRequest()
    -    request.bucket_name = bucket_name
    -    request.key = key
    -    request.body = body
    -
    -    return await self._unary_unary("/nitric.storage.v1.Storage/Write", request, StorageWriteResponse)
    -
    -
    -
    -
    -
    -class StorageWriteRequest -(bucket_name: str = <object object>, key: str = <object object>, body: bytes = <object object>) -
    -
    -

    Request to put (create/update) a storage item

    -
    - -Expand source code - -
    class StorageWriteRequest(betterproto.Message):
    -    """Request to put (create/update) a storage item"""
    -
    -    # Nitric name of the bucket to store in  this will be automatically resolved
    -    # to the provider specific bucket identifier.
    -    bucket_name: str = betterproto.string_field(1)
    -    # Key to store the item under
    -    key: str = betterproto.string_field(2)
    -    # bytes array to store
    -    body: bytes = betterproto.bytes_field(3)
    -
    -

    Ancestors

    -
      -
    • betterproto.Message
    • -
    • abc.ABC
    • -
    -

    Class variables

    -
    -
    var body : bytes
    -
    -
    -
    -
    var bucket_name : str
    -
    -
    -
    -
    var key : str
    -
    -
    -
    -
    -
    -
    -class StorageWriteResponse -
    -
    -

    Result of putting a storage item

    -
    - -Expand source code - -
    class StorageWriteResponse(betterproto.Message):
    -    """Result of putting a storage item"""
    -
    -    pass
    -
    -

    Ancestors

    -
      -
    • betterproto.Message
    • -
    • abc.ABC
    • -
    -
    -
    -
    -
    - -
    - - - \ No newline at end of file diff --git a/docs/nitric/proto/queue/v1/queue_pb2.html b/docs/nitric/proto/queue/v1/queue_pb2.html deleted file mode 100644 index 85725ae..0000000 --- a/docs/nitric/proto/queue/v1/queue_pb2.html +++ /dev/null @@ -1,976 +0,0 @@ - - - - - - -nitric.proto.queue.v1.queue_pb2 API documentation - - - - - - - - - - - -
    -
    -
    -

    Module nitric.proto.queue.v1.queue_pb2

    -
    -
    -

    Generated protocol buffer code.

    -
    - -Expand source code - -
    # -*- coding: utf-8 -*-
    -#
    -# Copyright (c) 2021 Nitric Technologies Pty Ltd.
    -#
    -# This file is part of Nitric Python 3 SDK.
    -# See https://github.com/nitrictech/python-sdk for further info.
    -#
    -# Licensed under the Apache License, Version 2.0 (the "License");
    -# you may not use this file except in compliance with the License.
    -# You may obtain a copy of the License at
    -#
    -#     http://www.apache.org/licenses/LICENSE-2.0
    -#
    -# Unless required by applicable law or agreed to in writing, software
    -# distributed under the License is distributed on an "AS IS" BASIS,
    -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    -# See the License for the specific language governing permissions and
    -# limitations under the License.
    -#
    -
    -# Generated by the protocol buffer compiler.  DO NOT EDIT!
    -# source: queue/v1/queue.proto
    -"""Generated protocol buffer code."""
    -from google.protobuf import descriptor as _descriptor
    -from google.protobuf import message as _message
    -from google.protobuf import reflection as _reflection
    -from google.protobuf import symbol_database as _symbol_database
    -# @@protoc_insertion_point(imports)
    -
    -_sym_db = _symbol_database.Default()
    -
    -
    -from google.protobuf import struct_pb2 as google_dot_protobuf_dot_struct__pb2
    -
    -
    -DESCRIPTOR = _descriptor.FileDescriptor(
    -  name='queue/v1/queue.proto',
    -  package='nitric.queue.v1',
    -  syntax='proto3',
    -  serialized_options=b'\n\030io.nitric.proto.queue.v1B\006QueuesP\001Z\014nitric/v1;v1\252\002\025Nitric.Proto.Queue.v1\312\002\025Nitric\\Proto\\Queue\\V1',
    -  create_key=_descriptor._internal_create_key,
    -  serialized_pb=b'\n\x14queue/v1/queue.proto\x12\x0fnitric.queue.v1\x1a\x1cgoogle/protobuf/struct.proto\"L\n\x10QueueSendRequest\x12\r\n\x05queue\x18\x01 \x01(\t\x12)\n\x04task\x18\x02 \x01(\x0b\x32\x1b.nitric.queue.v1.NitricTask\"\x13\n\x11QueueSendResponse\"R\n\x15QueueSendBatchRequest\x12\r\n\x05queue\x18\x01 \x01(\t\x12*\n\x05tasks\x18\x02 \x03(\x0b\x32\x1b.nitric.queue.v1.NitricTask\"J\n\x16QueueSendBatchResponse\x12\x30\n\x0b\x66\x61iledTasks\x18\x01 \x03(\x0b\x32\x1b.nitric.queue.v1.FailedTask\"3\n\x13QueueReceiveRequest\x12\r\n\x05queue\x18\x01 \x01(\t\x12\r\n\x05\x64\x65pth\x18\x02 \x01(\x05\"B\n\x14QueueReceiveResponse\x12*\n\x05tasks\x18\x01 \x03(\x0b\x32\x1b.nitric.queue.v1.NitricTask\"7\n\x14QueueCompleteRequest\x12\r\n\x05queue\x18\x01 \x01(\t\x12\x10\n\x08lease_id\x18\x02 \x01(\t\"\x17\n\x15QueueCompleteResponse\"H\n\nFailedTask\x12)\n\x04task\x18\x01 \x01(\x0b\x32\x1b.nitric.queue.v1.NitricTask\x12\x0f\n\x07message\x18\x02 \x01(\t\"j\n\nNitricTask\x12\n\n\x02id\x18\x01 \x01(\t\x12\x10\n\x08lease_id\x18\x02 \x01(\t\x12\x14\n\x0cpayload_type\x18\x03 \x01(\t\x12(\n\x07payload\x18\x04 \x01(\x0b\x32\x17.google.protobuf.Struct2\xe7\x02\n\x05Queue\x12M\n\x04Send\x12!.nitric.queue.v1.QueueSendRequest\x1a\".nitric.queue.v1.QueueSendResponse\x12\\\n\tSendBatch\x12&.nitric.queue.v1.QueueSendBatchRequest\x1a\'.nitric.queue.v1.QueueSendBatchResponse\x12V\n\x07Receive\x12$.nitric.queue.v1.QueueReceiveRequest\x1a%.nitric.queue.v1.QueueReceiveResponse\x12Y\n\x08\x43omplete\x12%.nitric.queue.v1.QueueCompleteRequest\x1a&.nitric.queue.v1.QueueCompleteResponseBb\n\x18io.nitric.proto.queue.v1B\x06QueuesP\x01Z\x0cnitric/v1;v1\xaa\x02\x15Nitric.Proto.Queue.v1\xca\x02\x15Nitric\\Proto\\Queue\\V1b\x06proto3'
    -  ,
    -  dependencies=[google_dot_protobuf_dot_struct__pb2.DESCRIPTOR,])
    -
    -
    -
    -
    -_QUEUESENDREQUEST = _descriptor.Descriptor(
    -  name='QueueSendRequest',
    -  full_name='nitric.queue.v1.QueueSendRequest',
    -  filename=None,
    -  file=DESCRIPTOR,
    -  containing_type=None,
    -  create_key=_descriptor._internal_create_key,
    -  fields=[
    -    _descriptor.FieldDescriptor(
    -      name='queue', full_name='nitric.queue.v1.QueueSendRequest.queue', index=0,
    -      number=1, type=9, cpp_type=9, label=1,
    -      has_default_value=False, default_value=b"".decode('utf-8'),
    -      message_type=None, enum_type=None, containing_type=None,
    -      is_extension=False, extension_scope=None,
    -      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
    -    _descriptor.FieldDescriptor(
    -      name='task', full_name='nitric.queue.v1.QueueSendRequest.task', index=1,
    -      number=2, type=11, cpp_type=10, label=1,
    -      has_default_value=False, default_value=None,
    -      message_type=None, enum_type=None, containing_type=None,
    -      is_extension=False, extension_scope=None,
    -      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
    -  ],
    -  extensions=[
    -  ],
    -  nested_types=[],
    -  enum_types=[
    -  ],
    -  serialized_options=None,
    -  is_extendable=False,
    -  syntax='proto3',
    -  extension_ranges=[],
    -  oneofs=[
    -  ],
    -  serialized_start=71,
    -  serialized_end=147,
    -)
    -
    -
    -_QUEUESENDRESPONSE = _descriptor.Descriptor(
    -  name='QueueSendResponse',
    -  full_name='nitric.queue.v1.QueueSendResponse',
    -  filename=None,
    -  file=DESCRIPTOR,
    -  containing_type=None,
    -  create_key=_descriptor._internal_create_key,
    -  fields=[
    -  ],
    -  extensions=[
    -  ],
    -  nested_types=[],
    -  enum_types=[
    -  ],
    -  serialized_options=None,
    -  is_extendable=False,
    -  syntax='proto3',
    -  extension_ranges=[],
    -  oneofs=[
    -  ],
    -  serialized_start=149,
    -  serialized_end=168,
    -)
    -
    -
    -_QUEUESENDBATCHREQUEST = _descriptor.Descriptor(
    -  name='QueueSendBatchRequest',
    -  full_name='nitric.queue.v1.QueueSendBatchRequest',
    -  filename=None,
    -  file=DESCRIPTOR,
    -  containing_type=None,
    -  create_key=_descriptor._internal_create_key,
    -  fields=[
    -    _descriptor.FieldDescriptor(
    -      name='queue', full_name='nitric.queue.v1.QueueSendBatchRequest.queue', index=0,
    -      number=1, type=9, cpp_type=9, label=1,
    -      has_default_value=False, default_value=b"".decode('utf-8'),
    -      message_type=None, enum_type=None, containing_type=None,
    -      is_extension=False, extension_scope=None,
    -      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
    -    _descriptor.FieldDescriptor(
    -      name='tasks', full_name='nitric.queue.v1.QueueSendBatchRequest.tasks', index=1,
    -      number=2, type=11, cpp_type=10, label=3,
    -      has_default_value=False, default_value=[],
    -      message_type=None, enum_type=None, containing_type=None,
    -      is_extension=False, extension_scope=None,
    -      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
    -  ],
    -  extensions=[
    -  ],
    -  nested_types=[],
    -  enum_types=[
    -  ],
    -  serialized_options=None,
    -  is_extendable=False,
    -  syntax='proto3',
    -  extension_ranges=[],
    -  oneofs=[
    -  ],
    -  serialized_start=170,
    -  serialized_end=252,
    -)
    -
    -
    -_QUEUESENDBATCHRESPONSE = _descriptor.Descriptor(
    -  name='QueueSendBatchResponse',
    -  full_name='nitric.queue.v1.QueueSendBatchResponse',
    -  filename=None,
    -  file=DESCRIPTOR,
    -  containing_type=None,
    -  create_key=_descriptor._internal_create_key,
    -  fields=[
    -    _descriptor.FieldDescriptor(
    -      name='failedTasks', full_name='nitric.queue.v1.QueueSendBatchResponse.failedTasks', index=0,
    -      number=1, type=11, cpp_type=10, label=3,
    -      has_default_value=False, default_value=[],
    -      message_type=None, enum_type=None, containing_type=None,
    -      is_extension=False, extension_scope=None,
    -      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
    -  ],
    -  extensions=[
    -  ],
    -  nested_types=[],
    -  enum_types=[
    -  ],
    -  serialized_options=None,
    -  is_extendable=False,
    -  syntax='proto3',
    -  extension_ranges=[],
    -  oneofs=[
    -  ],
    -  serialized_start=254,
    -  serialized_end=328,
    -)
    -
    -
    -_QUEUERECEIVEREQUEST = _descriptor.Descriptor(
    -  name='QueueReceiveRequest',
    -  full_name='nitric.queue.v1.QueueReceiveRequest',
    -  filename=None,
    -  file=DESCRIPTOR,
    -  containing_type=None,
    -  create_key=_descriptor._internal_create_key,
    -  fields=[
    -    _descriptor.FieldDescriptor(
    -      name='queue', full_name='nitric.queue.v1.QueueReceiveRequest.queue', index=0,
    -      number=1, type=9, cpp_type=9, label=1,
    -      has_default_value=False, default_value=b"".decode('utf-8'),
    -      message_type=None, enum_type=None, containing_type=None,
    -      is_extension=False, extension_scope=None,
    -      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
    -    _descriptor.FieldDescriptor(
    -      name='depth', full_name='nitric.queue.v1.QueueReceiveRequest.depth', index=1,
    -      number=2, type=5, cpp_type=1, label=1,
    -      has_default_value=False, default_value=0,
    -      message_type=None, enum_type=None, containing_type=None,
    -      is_extension=False, extension_scope=None,
    -      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
    -  ],
    -  extensions=[
    -  ],
    -  nested_types=[],
    -  enum_types=[
    -  ],
    -  serialized_options=None,
    -  is_extendable=False,
    -  syntax='proto3',
    -  extension_ranges=[],
    -  oneofs=[
    -  ],
    -  serialized_start=330,
    -  serialized_end=381,
    -)
    -
    -
    -_QUEUERECEIVERESPONSE = _descriptor.Descriptor(
    -  name='QueueReceiveResponse',
    -  full_name='nitric.queue.v1.QueueReceiveResponse',
    -  filename=None,
    -  file=DESCRIPTOR,
    -  containing_type=None,
    -  create_key=_descriptor._internal_create_key,
    -  fields=[
    -    _descriptor.FieldDescriptor(
    -      name='tasks', full_name='nitric.queue.v1.QueueReceiveResponse.tasks', index=0,
    -      number=1, type=11, cpp_type=10, label=3,
    -      has_default_value=False, default_value=[],
    -      message_type=None, enum_type=None, containing_type=None,
    -      is_extension=False, extension_scope=None,
    -      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
    -  ],
    -  extensions=[
    -  ],
    -  nested_types=[],
    -  enum_types=[
    -  ],
    -  serialized_options=None,
    -  is_extendable=False,
    -  syntax='proto3',
    -  extension_ranges=[],
    -  oneofs=[
    -  ],
    -  serialized_start=383,
    -  serialized_end=449,
    -)
    -
    -
    -_QUEUECOMPLETEREQUEST = _descriptor.Descriptor(
    -  name='QueueCompleteRequest',
    -  full_name='nitric.queue.v1.QueueCompleteRequest',
    -  filename=None,
    -  file=DESCRIPTOR,
    -  containing_type=None,
    -  create_key=_descriptor._internal_create_key,
    -  fields=[
    -    _descriptor.FieldDescriptor(
    -      name='queue', full_name='nitric.queue.v1.QueueCompleteRequest.queue', index=0,
    -      number=1, type=9, cpp_type=9, label=1,
    -      has_default_value=False, default_value=b"".decode('utf-8'),
    -      message_type=None, enum_type=None, containing_type=None,
    -      is_extension=False, extension_scope=None,
    -      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
    -    _descriptor.FieldDescriptor(
    -      name='lease_id', full_name='nitric.queue.v1.QueueCompleteRequest.lease_id', index=1,
    -      number=2, type=9, cpp_type=9, label=1,
    -      has_default_value=False, default_value=b"".decode('utf-8'),
    -      message_type=None, enum_type=None, containing_type=None,
    -      is_extension=False, extension_scope=None,
    -      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
    -  ],
    -  extensions=[
    -  ],
    -  nested_types=[],
    -  enum_types=[
    -  ],
    -  serialized_options=None,
    -  is_extendable=False,
    -  syntax='proto3',
    -  extension_ranges=[],
    -  oneofs=[
    -  ],
    -  serialized_start=451,
    -  serialized_end=506,
    -)
    -
    -
    -_QUEUECOMPLETERESPONSE = _descriptor.Descriptor(
    -  name='QueueCompleteResponse',
    -  full_name='nitric.queue.v1.QueueCompleteResponse',
    -  filename=None,
    -  file=DESCRIPTOR,
    -  containing_type=None,
    -  create_key=_descriptor._internal_create_key,
    -  fields=[
    -  ],
    -  extensions=[
    -  ],
    -  nested_types=[],
    -  enum_types=[
    -  ],
    -  serialized_options=None,
    -  is_extendable=False,
    -  syntax='proto3',
    -  extension_ranges=[],
    -  oneofs=[
    -  ],
    -  serialized_start=508,
    -  serialized_end=531,
    -)
    -
    -
    -_FAILEDTASK = _descriptor.Descriptor(
    -  name='FailedTask',
    -  full_name='nitric.queue.v1.FailedTask',
    -  filename=None,
    -  file=DESCRIPTOR,
    -  containing_type=None,
    -  create_key=_descriptor._internal_create_key,
    -  fields=[
    -    _descriptor.FieldDescriptor(
    -      name='task', full_name='nitric.queue.v1.FailedTask.task', index=0,
    -      number=1, type=11, cpp_type=10, label=1,
    -      has_default_value=False, default_value=None,
    -      message_type=None, enum_type=None, containing_type=None,
    -      is_extension=False, extension_scope=None,
    -      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
    -    _descriptor.FieldDescriptor(
    -      name='message', full_name='nitric.queue.v1.FailedTask.message', index=1,
    -      number=2, type=9, cpp_type=9, label=1,
    -      has_default_value=False, default_value=b"".decode('utf-8'),
    -      message_type=None, enum_type=None, containing_type=None,
    -      is_extension=False, extension_scope=None,
    -      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
    -  ],
    -  extensions=[
    -  ],
    -  nested_types=[],
    -  enum_types=[
    -  ],
    -  serialized_options=None,
    -  is_extendable=False,
    -  syntax='proto3',
    -  extension_ranges=[],
    -  oneofs=[
    -  ],
    -  serialized_start=533,
    -  serialized_end=605,
    -)
    -
    -
    -_NITRICTASK = _descriptor.Descriptor(
    -  name='NitricTask',
    -  full_name='nitric.queue.v1.NitricTask',
    -  filename=None,
    -  file=DESCRIPTOR,
    -  containing_type=None,
    -  create_key=_descriptor._internal_create_key,
    -  fields=[
    -    _descriptor.FieldDescriptor(
    -      name='id', full_name='nitric.queue.v1.NitricTask.id', index=0,
    -      number=1, type=9, cpp_type=9, label=1,
    -      has_default_value=False, default_value=b"".decode('utf-8'),
    -      message_type=None, enum_type=None, containing_type=None,
    -      is_extension=False, extension_scope=None,
    -      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
    -    _descriptor.FieldDescriptor(
    -      name='lease_id', full_name='nitric.queue.v1.NitricTask.lease_id', index=1,
    -      number=2, type=9, cpp_type=9, label=1,
    -      has_default_value=False, default_value=b"".decode('utf-8'),
    -      message_type=None, enum_type=None, containing_type=None,
    -      is_extension=False, extension_scope=None,
    -      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
    -    _descriptor.FieldDescriptor(
    -      name='payload_type', full_name='nitric.queue.v1.NitricTask.payload_type', index=2,
    -      number=3, type=9, cpp_type=9, label=1,
    -      has_default_value=False, default_value=b"".decode('utf-8'),
    -      message_type=None, enum_type=None, containing_type=None,
    -      is_extension=False, extension_scope=None,
    -      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
    -    _descriptor.FieldDescriptor(
    -      name='payload', full_name='nitric.queue.v1.NitricTask.payload', index=3,
    -      number=4, type=11, cpp_type=10, label=1,
    -      has_default_value=False, default_value=None,
    -      message_type=None, enum_type=None, containing_type=None,
    -      is_extension=False, extension_scope=None,
    -      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
    -  ],
    -  extensions=[
    -  ],
    -  nested_types=[],
    -  enum_types=[
    -  ],
    -  serialized_options=None,
    -  is_extendable=False,
    -  syntax='proto3',
    -  extension_ranges=[],
    -  oneofs=[
    -  ],
    -  serialized_start=607,
    -  serialized_end=713,
    -)
    -
    -_QUEUESENDREQUEST.fields_by_name['task'].message_type = _NITRICTASK
    -_QUEUESENDBATCHREQUEST.fields_by_name['tasks'].message_type = _NITRICTASK
    -_QUEUESENDBATCHRESPONSE.fields_by_name['failedTasks'].message_type = _FAILEDTASK
    -_QUEUERECEIVERESPONSE.fields_by_name['tasks'].message_type = _NITRICTASK
    -_FAILEDTASK.fields_by_name['task'].message_type = _NITRICTASK
    -_NITRICTASK.fields_by_name['payload'].message_type = google_dot_protobuf_dot_struct__pb2._STRUCT
    -DESCRIPTOR.message_types_by_name['QueueSendRequest'] = _QUEUESENDREQUEST
    -DESCRIPTOR.message_types_by_name['QueueSendResponse'] = _QUEUESENDRESPONSE
    -DESCRIPTOR.message_types_by_name['QueueSendBatchRequest'] = _QUEUESENDBATCHREQUEST
    -DESCRIPTOR.message_types_by_name['QueueSendBatchResponse'] = _QUEUESENDBATCHRESPONSE
    -DESCRIPTOR.message_types_by_name['QueueReceiveRequest'] = _QUEUERECEIVEREQUEST
    -DESCRIPTOR.message_types_by_name['QueueReceiveResponse'] = _QUEUERECEIVERESPONSE
    -DESCRIPTOR.message_types_by_name['QueueCompleteRequest'] = _QUEUECOMPLETEREQUEST
    -DESCRIPTOR.message_types_by_name['QueueCompleteResponse'] = _QUEUECOMPLETERESPONSE
    -DESCRIPTOR.message_types_by_name['FailedTask'] = _FAILEDTASK
    -DESCRIPTOR.message_types_by_name['NitricTask'] = _NITRICTASK
    -_sym_db.RegisterFileDescriptor(DESCRIPTOR)
    -
    -QueueSendRequest = _reflection.GeneratedProtocolMessageType('QueueSendRequest', (_message.Message,), {
    -  'DESCRIPTOR' : _QUEUESENDREQUEST,
    -  '__module__' : 'queue.v1.queue_pb2'
    -  # @@protoc_insertion_point(class_scope:nitric.queue.v1.QueueSendRequest)
    -  })
    -_sym_db.RegisterMessage(QueueSendRequest)
    -
    -QueueSendResponse = _reflection.GeneratedProtocolMessageType('QueueSendResponse', (_message.Message,), {
    -  'DESCRIPTOR' : _QUEUESENDRESPONSE,
    -  '__module__' : 'queue.v1.queue_pb2'
    -  # @@protoc_insertion_point(class_scope:nitric.queue.v1.QueueSendResponse)
    -  })
    -_sym_db.RegisterMessage(QueueSendResponse)
    -
    -QueueSendBatchRequest = _reflection.GeneratedProtocolMessageType('QueueSendBatchRequest', (_message.Message,), {
    -  'DESCRIPTOR' : _QUEUESENDBATCHREQUEST,
    -  '__module__' : 'queue.v1.queue_pb2'
    -  # @@protoc_insertion_point(class_scope:nitric.queue.v1.QueueSendBatchRequest)
    -  })
    -_sym_db.RegisterMessage(QueueSendBatchRequest)
    -
    -QueueSendBatchResponse = _reflection.GeneratedProtocolMessageType('QueueSendBatchResponse', (_message.Message,), {
    -  'DESCRIPTOR' : _QUEUESENDBATCHRESPONSE,
    -  '__module__' : 'queue.v1.queue_pb2'
    -  # @@protoc_insertion_point(class_scope:nitric.queue.v1.QueueSendBatchResponse)
    -  })
    -_sym_db.RegisterMessage(QueueSendBatchResponse)
    -
    -QueueReceiveRequest = _reflection.GeneratedProtocolMessageType('QueueReceiveRequest', (_message.Message,), {
    -  'DESCRIPTOR' : _QUEUERECEIVEREQUEST,
    -  '__module__' : 'queue.v1.queue_pb2'
    -  # @@protoc_insertion_point(class_scope:nitric.queue.v1.QueueReceiveRequest)
    -  })
    -_sym_db.RegisterMessage(QueueReceiveRequest)
    -
    -QueueReceiveResponse = _reflection.GeneratedProtocolMessageType('QueueReceiveResponse', (_message.Message,), {
    -  'DESCRIPTOR' : _QUEUERECEIVERESPONSE,
    -  '__module__' : 'queue.v1.queue_pb2'
    -  # @@protoc_insertion_point(class_scope:nitric.queue.v1.QueueReceiveResponse)
    -  })
    -_sym_db.RegisterMessage(QueueReceiveResponse)
    -
    -QueueCompleteRequest = _reflection.GeneratedProtocolMessageType('QueueCompleteRequest', (_message.Message,), {
    -  'DESCRIPTOR' : _QUEUECOMPLETEREQUEST,
    -  '__module__' : 'queue.v1.queue_pb2'
    -  # @@protoc_insertion_point(class_scope:nitric.queue.v1.QueueCompleteRequest)
    -  })
    -_sym_db.RegisterMessage(QueueCompleteRequest)
    -
    -QueueCompleteResponse = _reflection.GeneratedProtocolMessageType('QueueCompleteResponse', (_message.Message,), {
    -  'DESCRIPTOR' : _QUEUECOMPLETERESPONSE,
    -  '__module__' : 'queue.v1.queue_pb2'
    -  # @@protoc_insertion_point(class_scope:nitric.queue.v1.QueueCompleteResponse)
    -  })
    -_sym_db.RegisterMessage(QueueCompleteResponse)
    -
    -FailedTask = _reflection.GeneratedProtocolMessageType('FailedTask', (_message.Message,), {
    -  'DESCRIPTOR' : _FAILEDTASK,
    -  '__module__' : 'queue.v1.queue_pb2'
    -  # @@protoc_insertion_point(class_scope:nitric.queue.v1.FailedTask)
    -  })
    -_sym_db.RegisterMessage(FailedTask)
    -
    -NitricTask = _reflection.GeneratedProtocolMessageType('NitricTask', (_message.Message,), {
    -  'DESCRIPTOR' : _NITRICTASK,
    -  '__module__' : 'queue.v1.queue_pb2'
    -  # @@protoc_insertion_point(class_scope:nitric.queue.v1.NitricTask)
    -  })
    -_sym_db.RegisterMessage(NitricTask)
    -
    -
    -DESCRIPTOR._options = None
    -
    -_QUEUE = _descriptor.ServiceDescriptor(
    -  name='Queue',
    -  full_name='nitric.queue.v1.Queue',
    -  file=DESCRIPTOR,
    -  index=0,
    -  serialized_options=None,
    -  create_key=_descriptor._internal_create_key,
    -  serialized_start=716,
    -  serialized_end=1075,
    -  methods=[
    -  _descriptor.MethodDescriptor(
    -    name='Send',
    -    full_name='nitric.queue.v1.Queue.Send',
    -    index=0,
    -    containing_service=None,
    -    input_type=_QUEUESENDREQUEST,
    -    output_type=_QUEUESENDRESPONSE,
    -    serialized_options=None,
    -    create_key=_descriptor._internal_create_key,
    -  ),
    -  _descriptor.MethodDescriptor(
    -    name='SendBatch',
    -    full_name='nitric.queue.v1.Queue.SendBatch',
    -    index=1,
    -    containing_service=None,
    -    input_type=_QUEUESENDBATCHREQUEST,
    -    output_type=_QUEUESENDBATCHRESPONSE,
    -    serialized_options=None,
    -    create_key=_descriptor._internal_create_key,
    -  ),
    -  _descriptor.MethodDescriptor(
    -    name='Receive',
    -    full_name='nitric.queue.v1.Queue.Receive',
    -    index=2,
    -    containing_service=None,
    -    input_type=_QUEUERECEIVEREQUEST,
    -    output_type=_QUEUERECEIVERESPONSE,
    -    serialized_options=None,
    -    create_key=_descriptor._internal_create_key,
    -  ),
    -  _descriptor.MethodDescriptor(
    -    name='Complete',
    -    full_name='nitric.queue.v1.Queue.Complete',
    -    index=3,
    -    containing_service=None,
    -    input_type=_QUEUECOMPLETEREQUEST,
    -    output_type=_QUEUECOMPLETERESPONSE,
    -    serialized_options=None,
    -    create_key=_descriptor._internal_create_key,
    -  ),
    -])
    -_sym_db.RegisterServiceDescriptor(_QUEUE)
    -
    -DESCRIPTOR.services_by_name['Queue'] = _QUEUE
    -
    -# @@protoc_insertion_point(module_scope)
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -

    Classes

    -
    -
    -class FailedTask -(*args, **kwargs) -
    -
    -

    A ProtocolMessage

    -

    Ancestors

    -
      -
    • google.protobuf.pyext._message.CMessage
    • -
    • google.protobuf.message.Message
    • -
    -

    Class variables

    -
    -
    var DESCRIPTOR
    -
    -
    -
    -
    -

    Instance variables

    -
    -
    var message
    -
    -

    Field nitric.queue.v1.FailedTask.message

    -
    -
    var task
    -
    -

    Field nitric.queue.v1.FailedTask.task

    -
    -
    -
    -
    -class NitricTask -(*args, **kwargs) -
    -
    -

    A ProtocolMessage

    -

    Ancestors

    -
      -
    • google.protobuf.pyext._message.CMessage
    • -
    • google.protobuf.message.Message
    • -
    -

    Class variables

    -
    -
    var DESCRIPTOR
    -
    -
    -
    -
    -

    Instance variables

    -
    -
    var id
    -
    -

    Field nitric.queue.v1.NitricTask.id

    -
    -
    var lease_id
    -
    -

    Field nitric.queue.v1.NitricTask.lease_id

    -
    -
    var payload
    -
    -

    Field nitric.queue.v1.NitricTask.payload

    -
    -
    var payload_type
    -
    -

    Field nitric.queue.v1.NitricTask.payload_type

    -
    -
    -
    -
    -class QueueCompleteRequest -(*args, **kwargs) -
    -
    -

    A ProtocolMessage

    -

    Ancestors

    -
      -
    • google.protobuf.pyext._message.CMessage
    • -
    • google.protobuf.message.Message
    • -
    -

    Class variables

    -
    -
    var DESCRIPTOR
    -
    -
    -
    -
    -

    Instance variables

    -
    -
    var lease_id
    -
    -

    Field nitric.queue.v1.QueueCompleteRequest.lease_id

    -
    -
    var queue
    -
    -

    Field nitric.queue.v1.QueueCompleteRequest.queue

    -
    -
    -
    -
    -class QueueCompleteResponse -(*args, **kwargs) -
    -
    -

    A ProtocolMessage

    -

    Ancestors

    -
      -
    • google.protobuf.pyext._message.CMessage
    • -
    • google.protobuf.message.Message
    • -
    -

    Class variables

    -
    -
    var DESCRIPTOR
    -
    -
    -
    -
    -
    -
    -class QueueReceiveRequest -(*args, **kwargs) -
    -
    -

    A ProtocolMessage

    -

    Ancestors

    -
      -
    • google.protobuf.pyext._message.CMessage
    • -
    • google.protobuf.message.Message
    • -
    -

    Class variables

    -
    -
    var DESCRIPTOR
    -
    -
    -
    -
    -

    Instance variables

    -
    -
    var depth
    -
    -

    Field nitric.queue.v1.QueueReceiveRequest.depth

    -
    -
    var queue
    -
    -

    Field nitric.queue.v1.QueueReceiveRequest.queue

    -
    -
    -
    -
    -class QueueReceiveResponse -(*args, **kwargs) -
    -
    -

    A ProtocolMessage

    -

    Ancestors

    -
      -
    • google.protobuf.pyext._message.CMessage
    • -
    • google.protobuf.message.Message
    • -
    -

    Class variables

    -
    -
    var DESCRIPTOR
    -
    -
    -
    -
    -

    Instance variables

    -
    -
    var tasks
    -
    -

    Field nitric.queue.v1.QueueReceiveResponse.tasks

    -
    -
    -
    -
    -class QueueSendBatchRequest -(*args, **kwargs) -
    -
    -

    A ProtocolMessage

    -

    Ancestors

    -
      -
    • google.protobuf.pyext._message.CMessage
    • -
    • google.protobuf.message.Message
    • -
    -

    Class variables

    -
    -
    var DESCRIPTOR
    -
    -
    -
    -
    -

    Instance variables

    -
    -
    var queue
    -
    -

    Field nitric.queue.v1.QueueSendBatchRequest.queue

    -
    -
    var tasks
    -
    -

    Field nitric.queue.v1.QueueSendBatchRequest.tasks

    -
    -
    -
    -
    -class QueueSendBatchResponse -(*args, **kwargs) -
    -
    -

    A ProtocolMessage

    -

    Ancestors

    -
      -
    • google.protobuf.pyext._message.CMessage
    • -
    • google.protobuf.message.Message
    • -
    -

    Class variables

    -
    -
    var DESCRIPTOR
    -
    -
    -
    -
    -

    Instance variables

    -
    -
    var failedTasks
    -
    -

    Field nitric.queue.v1.QueueSendBatchResponse.failedTasks

    -
    -
    -
    -
    -class QueueSendRequest -(*args, **kwargs) -
    -
    -

    A ProtocolMessage

    -

    Ancestors

    -
      -
    • google.protobuf.pyext._message.CMessage
    • -
    • google.protobuf.message.Message
    • -
    -

    Class variables

    -
    -
    var DESCRIPTOR
    -
    -
    -
    -
    -

    Instance variables

    -
    -
    var queue
    -
    -

    Field nitric.queue.v1.QueueSendRequest.queue

    -
    -
    var task
    -
    -

    Field nitric.queue.v1.QueueSendRequest.task

    -
    -
    -
    -
    -class QueueSendResponse -(*args, **kwargs) -
    -
    -

    A ProtocolMessage

    -

    Ancestors

    -
      -
    • google.protobuf.pyext._message.CMessage
    • -
    • google.protobuf.message.Message
    • -
    -

    Class variables

    -
    -
    var DESCRIPTOR
    -
    -
    -
    -
    -
    -
    -
    -
    - -
    - - - \ No newline at end of file diff --git a/docs/nitric/proto/storage/v1/storage_pb2.html b/docs/nitric/proto/storage/v1/storage_pb2.html deleted file mode 100644 index cd81b26..0000000 --- a/docs/nitric/proto/storage/v1/storage_pb2.html +++ /dev/null @@ -1,618 +0,0 @@ - - - - - - -nitric.proto.storage.v1.storage_pb2 API documentation - - - - - - - - - - - -
    -
    -
    -

    Module nitric.proto.storage.v1.storage_pb2

    -
    -
    -

    Generated protocol buffer code.

    -
    - -Expand source code - -
    # -*- coding: utf-8 -*-
    -#
    -# Copyright (c) 2021 Nitric Technologies Pty Ltd.
    -#
    -# This file is part of Nitric Python 3 SDK.
    -# See https://github.com/nitrictech/python-sdk for further info.
    -#
    -# Licensed under the Apache License, Version 2.0 (the "License");
    -# you may not use this file except in compliance with the License.
    -# You may obtain a copy of the License at
    -#
    -#     http://www.apache.org/licenses/LICENSE-2.0
    -#
    -# Unless required by applicable law or agreed to in writing, software
    -# distributed under the License is distributed on an "AS IS" BASIS,
    -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    -# See the License for the specific language governing permissions and
    -# limitations under the License.
    -#
    -
    -# Generated by the protocol buffer compiler.  DO NOT EDIT!
    -# source: storage/v1/storage.proto
    -"""Generated protocol buffer code."""
    -from google.protobuf import descriptor as _descriptor
    -from google.protobuf import message as _message
    -from google.protobuf import reflection as _reflection
    -from google.protobuf import symbol_database as _symbol_database
    -# @@protoc_insertion_point(imports)
    -
    -_sym_db = _symbol_database.Default()
    -
    -
    -
    -
    -DESCRIPTOR = _descriptor.FileDescriptor(
    -  name='storage/v1/storage.proto',
    -  package='nitric.storage.v1',
    -  syntax='proto3',
    -  serialized_options=b'\n\032io.nitric.proto.storage.v1B\010StoragesP\001Z\014nitric/v1;v1\252\002\027Nitric.Proto.Storage.v1\312\002\027Nitric\\Proto\\Storage\\V1',
    -  create_key=_descriptor._internal_create_key,
    -  serialized_pb=b'\n\x18storage/v1/storage.proto\x12\x11nitric.storage.v1\"E\n\x13StorageWriteRequest\x12\x13\n\x0b\x62ucket_name\x18\x01 \x01(\t\x12\x0b\n\x03key\x18\x02 \x01(\t\x12\x0c\n\x04\x62ody\x18\x03 \x01(\x0c\"\x16\n\x14StorageWriteResponse\"6\n\x12StorageReadRequest\x12\x13\n\x0b\x62ucket_name\x18\x01 \x01(\t\x12\x0b\n\x03key\x18\x02 \x01(\t\"#\n\x13StorageReadResponse\x12\x0c\n\x04\x62ody\x18\x01 \x01(\x0c\"8\n\x14StorageDeleteRequest\x12\x13\n\x0b\x62ucket_name\x18\x01 \x01(\t\x12\x0b\n\x03key\x18\x02 \x01(\t\"\x17\n\x15StorageDeleteResponse2\x97\x02\n\x07Storage\x12U\n\x04Read\x12%.nitric.storage.v1.StorageReadRequest\x1a&.nitric.storage.v1.StorageReadResponse\x12X\n\x05Write\x12&.nitric.storage.v1.StorageWriteRequest\x1a\'.nitric.storage.v1.StorageWriteResponse\x12[\n\x06\x44\x65lete\x12\'.nitric.storage.v1.StorageDeleteRequest\x1a(.nitric.storage.v1.StorageDeleteResponseBj\n\x1aio.nitric.proto.storage.v1B\x08StoragesP\x01Z\x0cnitric/v1;v1\xaa\x02\x17Nitric.Proto.Storage.v1\xca\x02\x17Nitric\\Proto\\Storage\\V1b\x06proto3'
    -)
    -
    -
    -
    -
    -_STORAGEWRITEREQUEST = _descriptor.Descriptor(
    -  name='StorageWriteRequest',
    -  full_name='nitric.storage.v1.StorageWriteRequest',
    -  filename=None,
    -  file=DESCRIPTOR,
    -  containing_type=None,
    -  create_key=_descriptor._internal_create_key,
    -  fields=[
    -    _descriptor.FieldDescriptor(
    -      name='bucket_name', full_name='nitric.storage.v1.StorageWriteRequest.bucket_name', index=0,
    -      number=1, type=9, cpp_type=9, label=1,
    -      has_default_value=False, default_value=b"".decode('utf-8'),
    -      message_type=None, enum_type=None, containing_type=None,
    -      is_extension=False, extension_scope=None,
    -      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
    -    _descriptor.FieldDescriptor(
    -      name='key', full_name='nitric.storage.v1.StorageWriteRequest.key', index=1,
    -      number=2, type=9, cpp_type=9, label=1,
    -      has_default_value=False, default_value=b"".decode('utf-8'),
    -      message_type=None, enum_type=None, containing_type=None,
    -      is_extension=False, extension_scope=None,
    -      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
    -    _descriptor.FieldDescriptor(
    -      name='body', full_name='nitric.storage.v1.StorageWriteRequest.body', index=2,
    -      number=3, type=12, cpp_type=9, label=1,
    -      has_default_value=False, default_value=b"",
    -      message_type=None, enum_type=None, containing_type=None,
    -      is_extension=False, extension_scope=None,
    -      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
    -  ],
    -  extensions=[
    -  ],
    -  nested_types=[],
    -  enum_types=[
    -  ],
    -  serialized_options=None,
    -  is_extendable=False,
    -  syntax='proto3',
    -  extension_ranges=[],
    -  oneofs=[
    -  ],
    -  serialized_start=47,
    -  serialized_end=116,
    -)
    -
    -
    -_STORAGEWRITERESPONSE = _descriptor.Descriptor(
    -  name='StorageWriteResponse',
    -  full_name='nitric.storage.v1.StorageWriteResponse',
    -  filename=None,
    -  file=DESCRIPTOR,
    -  containing_type=None,
    -  create_key=_descriptor._internal_create_key,
    -  fields=[
    -  ],
    -  extensions=[
    -  ],
    -  nested_types=[],
    -  enum_types=[
    -  ],
    -  serialized_options=None,
    -  is_extendable=False,
    -  syntax='proto3',
    -  extension_ranges=[],
    -  oneofs=[
    -  ],
    -  serialized_start=118,
    -  serialized_end=140,
    -)
    -
    -
    -_STORAGEREADREQUEST = _descriptor.Descriptor(
    -  name='StorageReadRequest',
    -  full_name='nitric.storage.v1.StorageReadRequest',
    -  filename=None,
    -  file=DESCRIPTOR,
    -  containing_type=None,
    -  create_key=_descriptor._internal_create_key,
    -  fields=[
    -    _descriptor.FieldDescriptor(
    -      name='bucket_name', full_name='nitric.storage.v1.StorageReadRequest.bucket_name', index=0,
    -      number=1, type=9, cpp_type=9, label=1,
    -      has_default_value=False, default_value=b"".decode('utf-8'),
    -      message_type=None, enum_type=None, containing_type=None,
    -      is_extension=False, extension_scope=None,
    -      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
    -    _descriptor.FieldDescriptor(
    -      name='key', full_name='nitric.storage.v1.StorageReadRequest.key', index=1,
    -      number=2, type=9, cpp_type=9, label=1,
    -      has_default_value=False, default_value=b"".decode('utf-8'),
    -      message_type=None, enum_type=None, containing_type=None,
    -      is_extension=False, extension_scope=None,
    -      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
    -  ],
    -  extensions=[
    -  ],
    -  nested_types=[],
    -  enum_types=[
    -  ],
    -  serialized_options=None,
    -  is_extendable=False,
    -  syntax='proto3',
    -  extension_ranges=[],
    -  oneofs=[
    -  ],
    -  serialized_start=142,
    -  serialized_end=196,
    -)
    -
    -
    -_STORAGEREADRESPONSE = _descriptor.Descriptor(
    -  name='StorageReadResponse',
    -  full_name='nitric.storage.v1.StorageReadResponse',
    -  filename=None,
    -  file=DESCRIPTOR,
    -  containing_type=None,
    -  create_key=_descriptor._internal_create_key,
    -  fields=[
    -    _descriptor.FieldDescriptor(
    -      name='body', full_name='nitric.storage.v1.StorageReadResponse.body', index=0,
    -      number=1, type=12, cpp_type=9, label=1,
    -      has_default_value=False, default_value=b"",
    -      message_type=None, enum_type=None, containing_type=None,
    -      is_extension=False, extension_scope=None,
    -      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
    -  ],
    -  extensions=[
    -  ],
    -  nested_types=[],
    -  enum_types=[
    -  ],
    -  serialized_options=None,
    -  is_extendable=False,
    -  syntax='proto3',
    -  extension_ranges=[],
    -  oneofs=[
    -  ],
    -  serialized_start=198,
    -  serialized_end=233,
    -)
    -
    -
    -_STORAGEDELETEREQUEST = _descriptor.Descriptor(
    -  name='StorageDeleteRequest',
    -  full_name='nitric.storage.v1.StorageDeleteRequest',
    -  filename=None,
    -  file=DESCRIPTOR,
    -  containing_type=None,
    -  create_key=_descriptor._internal_create_key,
    -  fields=[
    -    _descriptor.FieldDescriptor(
    -      name='bucket_name', full_name='nitric.storage.v1.StorageDeleteRequest.bucket_name', index=0,
    -      number=1, type=9, cpp_type=9, label=1,
    -      has_default_value=False, default_value=b"".decode('utf-8'),
    -      message_type=None, enum_type=None, containing_type=None,
    -      is_extension=False, extension_scope=None,
    -      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
    -    _descriptor.FieldDescriptor(
    -      name='key', full_name='nitric.storage.v1.StorageDeleteRequest.key', index=1,
    -      number=2, type=9, cpp_type=9, label=1,
    -      has_default_value=False, default_value=b"".decode('utf-8'),
    -      message_type=None, enum_type=None, containing_type=None,
    -      is_extension=False, extension_scope=None,
    -      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
    -  ],
    -  extensions=[
    -  ],
    -  nested_types=[],
    -  enum_types=[
    -  ],
    -  serialized_options=None,
    -  is_extendable=False,
    -  syntax='proto3',
    -  extension_ranges=[],
    -  oneofs=[
    -  ],
    -  serialized_start=235,
    -  serialized_end=291,
    -)
    -
    -
    -_STORAGEDELETERESPONSE = _descriptor.Descriptor(
    -  name='StorageDeleteResponse',
    -  full_name='nitric.storage.v1.StorageDeleteResponse',
    -  filename=None,
    -  file=DESCRIPTOR,
    -  containing_type=None,
    -  create_key=_descriptor._internal_create_key,
    -  fields=[
    -  ],
    -  extensions=[
    -  ],
    -  nested_types=[],
    -  enum_types=[
    -  ],
    -  serialized_options=None,
    -  is_extendable=False,
    -  syntax='proto3',
    -  extension_ranges=[],
    -  oneofs=[
    -  ],
    -  serialized_start=293,
    -  serialized_end=316,
    -)
    -
    -DESCRIPTOR.message_types_by_name['StorageWriteRequest'] = _STORAGEWRITEREQUEST
    -DESCRIPTOR.message_types_by_name['StorageWriteResponse'] = _STORAGEWRITERESPONSE
    -DESCRIPTOR.message_types_by_name['StorageReadRequest'] = _STORAGEREADREQUEST
    -DESCRIPTOR.message_types_by_name['StorageReadResponse'] = _STORAGEREADRESPONSE
    -DESCRIPTOR.message_types_by_name['StorageDeleteRequest'] = _STORAGEDELETEREQUEST
    -DESCRIPTOR.message_types_by_name['StorageDeleteResponse'] = _STORAGEDELETERESPONSE
    -_sym_db.RegisterFileDescriptor(DESCRIPTOR)
    -
    -StorageWriteRequest = _reflection.GeneratedProtocolMessageType('StorageWriteRequest', (_message.Message,), {
    -  'DESCRIPTOR' : _STORAGEWRITEREQUEST,
    -  '__module__' : 'storage.v1.storage_pb2'
    -  # @@protoc_insertion_point(class_scope:nitric.storage.v1.StorageWriteRequest)
    -  })
    -_sym_db.RegisterMessage(StorageWriteRequest)
    -
    -StorageWriteResponse = _reflection.GeneratedProtocolMessageType('StorageWriteResponse', (_message.Message,), {
    -  'DESCRIPTOR' : _STORAGEWRITERESPONSE,
    -  '__module__' : 'storage.v1.storage_pb2'
    -  # @@protoc_insertion_point(class_scope:nitric.storage.v1.StorageWriteResponse)
    -  })
    -_sym_db.RegisterMessage(StorageWriteResponse)
    -
    -StorageReadRequest = _reflection.GeneratedProtocolMessageType('StorageReadRequest', (_message.Message,), {
    -  'DESCRIPTOR' : _STORAGEREADREQUEST,
    -  '__module__' : 'storage.v1.storage_pb2'
    -  # @@protoc_insertion_point(class_scope:nitric.storage.v1.StorageReadRequest)
    -  })
    -_sym_db.RegisterMessage(StorageReadRequest)
    -
    -StorageReadResponse = _reflection.GeneratedProtocolMessageType('StorageReadResponse', (_message.Message,), {
    -  'DESCRIPTOR' : _STORAGEREADRESPONSE,
    -  '__module__' : 'storage.v1.storage_pb2'
    -  # @@protoc_insertion_point(class_scope:nitric.storage.v1.StorageReadResponse)
    -  })
    -_sym_db.RegisterMessage(StorageReadResponse)
    -
    -StorageDeleteRequest = _reflection.GeneratedProtocolMessageType('StorageDeleteRequest', (_message.Message,), {
    -  'DESCRIPTOR' : _STORAGEDELETEREQUEST,
    -  '__module__' : 'storage.v1.storage_pb2'
    -  # @@protoc_insertion_point(class_scope:nitric.storage.v1.StorageDeleteRequest)
    -  })
    -_sym_db.RegisterMessage(StorageDeleteRequest)
    -
    -StorageDeleteResponse = _reflection.GeneratedProtocolMessageType('StorageDeleteResponse', (_message.Message,), {
    -  'DESCRIPTOR' : _STORAGEDELETERESPONSE,
    -  '__module__' : 'storage.v1.storage_pb2'
    -  # @@protoc_insertion_point(class_scope:nitric.storage.v1.StorageDeleteResponse)
    -  })
    -_sym_db.RegisterMessage(StorageDeleteResponse)
    -
    -
    -DESCRIPTOR._options = None
    -
    -_STORAGE = _descriptor.ServiceDescriptor(
    -  name='Storage',
    -  full_name='nitric.storage.v1.Storage',
    -  file=DESCRIPTOR,
    -  index=0,
    -  serialized_options=None,
    -  create_key=_descriptor._internal_create_key,
    -  serialized_start=319,
    -  serialized_end=598,
    -  methods=[
    -  _descriptor.MethodDescriptor(
    -    name='Read',
    -    full_name='nitric.storage.v1.Storage.Read',
    -    index=0,
    -    containing_service=None,
    -    input_type=_STORAGEREADREQUEST,
    -    output_type=_STORAGEREADRESPONSE,
    -    serialized_options=None,
    -    create_key=_descriptor._internal_create_key,
    -  ),
    -  _descriptor.MethodDescriptor(
    -    name='Write',
    -    full_name='nitric.storage.v1.Storage.Write',
    -    index=1,
    -    containing_service=None,
    -    input_type=_STORAGEWRITEREQUEST,
    -    output_type=_STORAGEWRITERESPONSE,
    -    serialized_options=None,
    -    create_key=_descriptor._internal_create_key,
    -  ),
    -  _descriptor.MethodDescriptor(
    -    name='Delete',
    -    full_name='nitric.storage.v1.Storage.Delete',
    -    index=2,
    -    containing_service=None,
    -    input_type=_STORAGEDELETEREQUEST,
    -    output_type=_STORAGEDELETERESPONSE,
    -    serialized_options=None,
    -    create_key=_descriptor._internal_create_key,
    -  ),
    -])
    -_sym_db.RegisterServiceDescriptor(_STORAGE)
    -
    -DESCRIPTOR.services_by_name['Storage'] = _STORAGE
    -
    -# @@protoc_insertion_point(module_scope)
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -

    Classes

    -
    -
    -class StorageDeleteRequest -(*args, **kwargs) -
    -
    -

    A ProtocolMessage

    -

    Ancestors

    -
      -
    • google.protobuf.pyext._message.CMessage
    • -
    • google.protobuf.message.Message
    • -
    -

    Class variables

    -
    -
    var DESCRIPTOR
    -
    -
    -
    -
    -

    Instance variables

    -
    -
    var bucket_name
    -
    -

    Field nitric.storage.v1.StorageDeleteRequest.bucket_name

    -
    -
    var key
    -
    -

    Field nitric.storage.v1.StorageDeleteRequest.key

    -
    -
    -
    -
    -class StorageDeleteResponse -(*args, **kwargs) -
    -
    -

    A ProtocolMessage

    -

    Ancestors

    -
      -
    • google.protobuf.pyext._message.CMessage
    • -
    • google.protobuf.message.Message
    • -
    -

    Class variables

    -
    -
    var DESCRIPTOR
    -
    -
    -
    -
    -
    -
    -class StorageReadRequest -(*args, **kwargs) -
    -
    -

    A ProtocolMessage

    -

    Ancestors

    -
      -
    • google.protobuf.pyext._message.CMessage
    • -
    • google.protobuf.message.Message
    • -
    -

    Class variables

    -
    -
    var DESCRIPTOR
    -
    -
    -
    -
    -

    Instance variables

    -
    -
    var bucket_name
    -
    -

    Field nitric.storage.v1.StorageReadRequest.bucket_name

    -
    -
    var key
    -
    -

    Field nitric.storage.v1.StorageReadRequest.key

    -
    -
    -
    -
    -class StorageReadResponse -(*args, **kwargs) -
    -
    -

    A ProtocolMessage

    -

    Ancestors

    -
      -
    • google.protobuf.pyext._message.CMessage
    • -
    • google.protobuf.message.Message
    • -
    -

    Class variables

    -
    -
    var DESCRIPTOR
    -
    -
    -
    -
    -

    Instance variables

    -
    -
    var body
    -
    -

    Field nitric.storage.v1.StorageReadResponse.body

    -
    -
    -
    -
    -class StorageWriteRequest -(*args, **kwargs) -
    -
    -

    A ProtocolMessage

    -

    Ancestors

    -
      -
    • google.protobuf.pyext._message.CMessage
    • -
    • google.protobuf.message.Message
    • -
    -

    Class variables

    -
    -
    var DESCRIPTOR
    -
    -
    -
    -
    -

    Instance variables

    -
    -
    var body
    -
    -

    Field nitric.storage.v1.StorageWriteRequest.body

    -
    -
    var bucket_name
    -
    -

    Field nitric.storage.v1.StorageWriteRequest.bucket_name

    -
    -
    var key
    -
    -

    Field nitric.storage.v1.StorageWriteRequest.key

    -
    -
    -
    -
    -class StorageWriteResponse -(*args, **kwargs) -
    -
    -

    A ProtocolMessage

    -

    Ancestors

    -
      -
    • google.protobuf.pyext._message.CMessage
    • -
    • google.protobuf.message.Message
    • -
    -

    Class variables

    -
    -
    var DESCRIPTOR
    -
    -
    -
    -
    -
    -
    -
    -
    - -
    - - - \ No newline at end of file