diff --git a/flyteidl/protos/docs/admin/admin.rst b/flyteidl/protos/docs/admin/admin.rst
new file mode 100644
index 00000000000..dd730f42332
--- /dev/null
+++ b/flyteidl/protos/docs/admin/admin.rst
@@ -0,0 +1,3764 @@
+######################
+Protocol Documentation
+######################
+
+
+
+
+.. _ref_flyteidl/admin/cluster_assignment.proto:
+
+flyteidl/admin/cluster_assignment.proto
+==================================================================
+
+
+
+
+
+.. _ref_flyteidl.admin.Affinity:
+
+Affinity
+------------------------------------------------------------------
+
+Defines a set of constraints used to select eligible objects based on labels they possess.
+
+
+
+.. csv-table:: Affinity type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "selectors", ":ref:`ref_flyteidl.admin.Selector`", "repeated", "Multiples selectors are 'and'-ed together to produce the list of matching, eligible objects."
+
+
+
+
+
+
+
+.. _ref_flyteidl.admin.ClusterAssignment:
+
+ClusterAssignment
+------------------------------------------------------------------
+
+Encapsulates specifications for routing an execution onto a specific cluster.
+
+
+
+.. csv-table:: ClusterAssignment type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "affinity", ":ref:`ref_flyteidl.admin.Affinity`", "", ""
+ "toleration", ":ref:`ref_flyteidl.admin.Toleration`", "", ""
+
+
+
+
+
+
+
+.. _ref_flyteidl.admin.Selector:
+
+Selector
+------------------------------------------------------------------
+
+A Selector is a specification for identifying a set of objects with corresponding labels.
+
+
+
+.. csv-table:: Selector type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "key", ":ref:`ref_string`", "", "The label key."
+ "value", ":ref:`ref_string`", "repeated", "One or more values used to match labels. For equality (or inequality) requirements, values must contain a single element. For set-based requirements, values may contain one or more elements."
+ "operator", ":ref:`ref_flyteidl.admin.Selector.Operator`", "", ""
+
+
+
+
+
+
+
+.. _ref_flyteidl.admin.Toleration:
+
+Toleration
+------------------------------------------------------------------
+
+Defines a set of specific label selectors that the execution can tolerate on a cluster.
+
+
+
+.. csv-table:: Toleration type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "selectors", ":ref:`ref_flyteidl.admin.Selector`", "repeated", "A toleration selector is similar to that of an affinity but the only valid operators are EQUALS AND EXISTS."
+
+
+
+
+
+
+
+
+
+.. _ref_flyteidl.admin.Selector.Operator:
+
+Selector.Operator
+------------------------------------------------------------------
+
+Defines how a label with a corresponding key and value is selected or excluded.
+
+.. csv-table:: Enum Selector.Operator values
+ :header: "Name", "Number", "Description"
+ :widths: auto
+
+ "EQUALS", "0", ""
+ "NOT_EQUALS", "1", ""
+ "IN", "2", ""
+ "NOT_IN", "3", ""
+ "EXISTS", "4", "A label key with any value"
+
+
+
+
+
+
+
+
+
+
+.. _ref_flyteidl/admin/common.proto:
+
+flyteidl/admin/common.proto
+==================================================================
+
+
+
+
+
+.. _ref_flyteidl.admin.Annotations:
+
+Annotations
+------------------------------------------------------------------
+
+Annotation values to be applied to an execution resource.
+In the future a mode (e.g. OVERRIDE, APPEND, etc) can be defined
+to specify how to merge annotations defined at registration and execution time.
+
+
+
+.. csv-table:: Annotations type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "values", ":ref:`ref_flyteidl.admin.Annotations.ValuesEntry`", "repeated", "Map of custom annotations to be applied to the execution resource."
+
+
+
+
+
+
+
+.. _ref_flyteidl.admin.Annotations.ValuesEntry:
+
+Annotations.ValuesEntry
+------------------------------------------------------------------
+
+
+
+
+
+.. csv-table:: Annotations.ValuesEntry type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "key", ":ref:`ref_string`", "", ""
+ "value", ":ref:`ref_string`", "", ""
+
+
+
+
+
+
+
+.. _ref_flyteidl.admin.AuthRole:
+
+AuthRole
+------------------------------------------------------------------
+
+Defines permissions associated with executions created by this launch plan spec.
+Use either of these roles when they have permissions required by your workflow execution.
+Deprecated.
+
+
+
+.. csv-table:: AuthRole type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "assumable_iam_role", ":ref:`ref_string`", "", "Defines an optional iam role which will be used for tasks run in executions created with this launch plan."
+ "kubernetes_service_account", ":ref:`ref_string`", "", "Defines an optional kubernetes service account which will be used for tasks run in executions created with this launch plan."
+
+
+
+
+
+
+
+.. _ref_flyteidl.admin.EmailNotification:
+
+EmailNotification
+------------------------------------------------------------------
+
+Defines an email notification specification.
+
+
+
+.. csv-table:: EmailNotification type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "recipients_email", ":ref:`ref_string`", "repeated", "The list of email addresses recipients for this notification. +required"
+
+
+
+
+
+
+
+.. _ref_flyteidl.admin.Labels:
+
+Labels
+------------------------------------------------------------------
+
+Label values to be applied to an execution resource.
+In the future a mode (e.g. OVERRIDE, APPEND, etc) can be defined
+to specify how to merge labels defined at registration and execution time.
+
+
+
+.. csv-table:: Labels type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "values", ":ref:`ref_flyteidl.admin.Labels.ValuesEntry`", "repeated", "Map of custom labels to be applied to the execution resource."
+
+
+
+
+
+
+
+.. _ref_flyteidl.admin.Labels.ValuesEntry:
+
+Labels.ValuesEntry
+------------------------------------------------------------------
+
+
+
+
+
+.. csv-table:: Labels.ValuesEntry type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "key", ":ref:`ref_string`", "", ""
+ "value", ":ref:`ref_string`", "", ""
+
+
+
+
+
+
+
+.. _ref_flyteidl.admin.NamedEntity:
+
+NamedEntity
+------------------------------------------------------------------
+
+Encapsulates information common to a NamedEntity, a Flyte resource such as a task,
+workflow or launch plan. A NamedEntity is exclusively identified by its resource type
+and identifier.
+
+
+
+.. csv-table:: NamedEntity type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "resource_type", ":ref:`ref_flyteidl.core.ResourceType`", "", "Resource type of the named entity. One of Task, Workflow or LaunchPlan."
+ "id", ":ref:`ref_flyteidl.admin.NamedEntityIdentifier`", "", ""
+ "metadata", ":ref:`ref_flyteidl.admin.NamedEntityMetadata`", "", "Additional metadata around a named entity."
+
+
+
+
+
+
+
+.. _ref_flyteidl.admin.NamedEntityGetRequest:
+
+NamedEntityGetRequest
+------------------------------------------------------------------
+
+A request to retrieve the metadata associated with a NamedEntityIdentifier
+
+
+
+.. csv-table:: NamedEntityGetRequest type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "resource_type", ":ref:`ref_flyteidl.core.ResourceType`", "", "Resource type of the metadata to get. One of Task, Workflow or LaunchPlan. +required"
+ "id", ":ref:`ref_flyteidl.admin.NamedEntityIdentifier`", "", "The identifier for the named entity for which to fetch metadata. +required"
+
+
+
+
+
+
+
+.. _ref_flyteidl.admin.NamedEntityIdentifier:
+
+NamedEntityIdentifier
+------------------------------------------------------------------
+
+Encapsulation of fields that identifies a Flyte resource.
+A Flyte resource can be a task, workflow or launch plan.
+A resource can internally have multiple versions and is uniquely identified
+by project, domain, and name.
+
+
+
+.. csv-table:: NamedEntityIdentifier type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "project", ":ref:`ref_string`", "", "Name of the project the resource belongs to."
+ "domain", ":ref:`ref_string`", "", "Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project."
+ "name", ":ref:`ref_string`", "", "User provided value for the resource. The combination of project + domain + name uniquely identifies the resource. +optional - in certain contexts - like 'List API', 'Launch plans'"
+
+
+
+
+
+
+
+.. _ref_flyteidl.admin.NamedEntityIdentifierList:
+
+NamedEntityIdentifierList
+------------------------------------------------------------------
+
+Represents a list of NamedEntityIdentifiers.
+
+
+
+.. csv-table:: NamedEntityIdentifierList type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "entities", ":ref:`ref_flyteidl.admin.NamedEntityIdentifier`", "repeated", "A list of identifiers."
+ "token", ":ref:`ref_string`", "", "In the case of multiple pages of results, the server-provided token can be used to fetch the next page in a query. If there are no more results, this value will be empty."
+
+
+
+
+
+
+
+.. _ref_flyteidl.admin.NamedEntityIdentifierListRequest:
+
+NamedEntityIdentifierListRequest
+------------------------------------------------------------------
+
+Represents a request structure to list NamedEntityIdentifiers.
+
+
+
+.. csv-table:: NamedEntityIdentifierListRequest type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "project", ":ref:`ref_string`", "", "Name of the project that contains the identifiers. +required"
+ "domain", ":ref:`ref_string`", "", "Name of the domain the identifiers belongs to within the project. +required"
+ "limit", ":ref:`ref_uint32`", "", "Indicates the number of resources to be returned. +required"
+ "token", ":ref:`ref_string`", "", "In the case of multiple pages of results, the server-provided token can be used to fetch the next page in a query. +optional"
+ "sort_by", ":ref:`ref_flyteidl.admin.Sort`", "", "Specifies how listed entities should be sorted in the response. +optional"
+ "filters", ":ref:`ref_string`", "", "Indicates a list of filters passed as string. +optional"
+
+
+
+
+
+
+
+.. _ref_flyteidl.admin.NamedEntityList:
+
+NamedEntityList
+------------------------------------------------------------------
+
+Represents a list of NamedEntityIdentifiers.
+
+
+
+.. csv-table:: NamedEntityList type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "entities", ":ref:`ref_flyteidl.admin.NamedEntity`", "repeated", "A list of NamedEntity objects"
+ "token", ":ref:`ref_string`", "", "In the case of multiple pages of results, the server-provided token can be used to fetch the next page in a query. If there are no more results, this value will be empty."
+
+
+
+
+
+
+
+.. _ref_flyteidl.admin.NamedEntityListRequest:
+
+NamedEntityListRequest
+------------------------------------------------------------------
+
+Represents a request structure to list NamedEntity objects
+
+
+
+.. csv-table:: NamedEntityListRequest type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "resource_type", ":ref:`ref_flyteidl.core.ResourceType`", "", "Resource type of the metadata to query. One of Task, Workflow or LaunchPlan. +required"
+ "project", ":ref:`ref_string`", "", "Name of the project that contains the identifiers. +required"
+ "domain", ":ref:`ref_string`", "", "Name of the domain the identifiers belongs to within the project."
+ "limit", ":ref:`ref_uint32`", "", "Indicates the number of resources to be returned."
+ "token", ":ref:`ref_string`", "", "In the case of multiple pages of results, the server-provided token can be used to fetch the next page in a query. +optional"
+ "sort_by", ":ref:`ref_flyteidl.admin.Sort`", "", "Specifies how listed entities should be sorted in the response. +optional"
+ "filters", ":ref:`ref_string`", "", "Indicates a list of filters passed as string. +optional"
+
+
+
+
+
+
+
+.. _ref_flyteidl.admin.NamedEntityMetadata:
+
+NamedEntityMetadata
+------------------------------------------------------------------
+
+Additional metadata around a named entity.
+
+
+
+.. csv-table:: NamedEntityMetadata type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "description", ":ref:`ref_string`", "", "Common description across all versions of the entity +optional"
+ "state", ":ref:`ref_flyteidl.admin.NamedEntityState`", "", "Shared state across all version of the entity At this point in time, only workflow entities can have their state archived."
+
+
+
+
+
+
+
+.. _ref_flyteidl.admin.NamedEntityUpdateRequest:
+
+NamedEntityUpdateRequest
+------------------------------------------------------------------
+
+Request to set the referenced named entity state to the configured value.
+
+
+
+.. csv-table:: NamedEntityUpdateRequest type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "resource_type", ":ref:`ref_flyteidl.core.ResourceType`", "", "Resource type of the metadata to update +required"
+ "id", ":ref:`ref_flyteidl.admin.NamedEntityIdentifier`", "", "Identifier of the metadata to update +required"
+ "metadata", ":ref:`ref_flyteidl.admin.NamedEntityMetadata`", "", "Metadata object to set as the new value +required"
+
+
+
+
+
+
+
+.. _ref_flyteidl.admin.NamedEntityUpdateResponse:
+
+NamedEntityUpdateResponse
+------------------------------------------------------------------
+
+Purposefully empty, may be populated in the future.
+
+
+
+
+
+
+
+
+.. _ref_flyteidl.admin.Notification:
+
+Notification
+------------------------------------------------------------------
+
+Represents a structure for notifications based on execution status.
+The notification content is configured within flyte admin but can be templatized.
+Future iterations could expose configuring notifications with custom content.
+
+
+
+.. csv-table:: Notification type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "phases", ":ref:`ref_flyteidl.core.WorkflowExecution.Phase`", "repeated", "A list of phases to which users can associate the notifications to. +required"
+ "email", ":ref:`ref_flyteidl.admin.EmailNotification`", "", ""
+ "pager_duty", ":ref:`ref_flyteidl.admin.PagerDutyNotification`", "", ""
+ "slack", ":ref:`ref_flyteidl.admin.SlackNotification`", "", ""
+
+
+
+
+
+
+
+.. _ref_flyteidl.admin.ObjectGetRequest:
+
+ObjectGetRequest
+------------------------------------------------------------------
+
+Shared request structure to fetch a single resource.
+Resources include: Task, Workflow, LaunchPlan
+
+
+
+.. csv-table:: ObjectGetRequest type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "id", ":ref:`ref_flyteidl.core.Identifier`", "", "Indicates a unique version of resource. +required"
+
+
+
+
+
+
+
+.. _ref_flyteidl.admin.PagerDutyNotification:
+
+PagerDutyNotification
+------------------------------------------------------------------
+
+Defines a pager duty notification specification.
+
+
+
+.. csv-table:: PagerDutyNotification type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "recipients_email", ":ref:`ref_string`", "repeated", "Currently, PagerDuty notifications leverage email to trigger a notification. +required"
+
+
+
+
+
+
+
+.. _ref_flyteidl.admin.RawOutputDataConfig:
+
+RawOutputDataConfig
+------------------------------------------------------------------
+
+Encapsulates user settings pertaining to offloaded data (i.e. Blobs, Schema, query data, etc.).
+See https://github.com/flyteorg/flyte/issues/211 for more background information.
+
+
+
+.. csv-table:: RawOutputDataConfig type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "output_location_prefix", ":ref:`ref_string`", "", "Prefix for where offloaded data from user workflows will be written e.g. s3://bucket/key or s3://bucket/"
+
+
+
+
+
+
+
+.. _ref_flyteidl.admin.ResourceListRequest:
+
+ResourceListRequest
+------------------------------------------------------------------
+
+Shared request structure to retrieve a list of resources.
+Resources include: Task, Workflow, LaunchPlan
+
+
+
+.. csv-table:: ResourceListRequest type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "id", ":ref:`ref_flyteidl.admin.NamedEntityIdentifier`", "", "id represents the unique identifier of the resource. +required"
+ "limit", ":ref:`ref_uint32`", "", "Indicates the number of resources to be returned. +required"
+ "token", ":ref:`ref_string`", "", "In the case of multiple pages of results, this server-provided token can be used to fetch the next page in a query. +optional"
+ "filters", ":ref:`ref_string`", "", "Indicates a list of filters passed as string. More info on constructing filters : +optional"
+ "sort_by", ":ref:`ref_flyteidl.admin.Sort`", "", "Sort ordering. +optional"
+
+
+
+
+
+
+
+.. _ref_flyteidl.admin.SlackNotification:
+
+SlackNotification
+------------------------------------------------------------------
+
+Defines a slack notification specification.
+
+
+
+.. csv-table:: SlackNotification type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "recipients_email", ":ref:`ref_string`", "repeated", "Currently, Slack notifications leverage email to trigger a notification. +required"
+
+
+
+
+
+
+
+.. _ref_flyteidl.admin.Sort:
+
+Sort
+------------------------------------------------------------------
+
+Specifies sort ordering in a list request.
+
+
+
+.. csv-table:: Sort type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "key", ":ref:`ref_string`", "", "Indicates an attribute to sort the response values. +required"
+ "direction", ":ref:`ref_flyteidl.admin.Sort.Direction`", "", "Indicates the direction to apply sort key for response values. +optional"
+
+
+
+
+
+
+
+.. _ref_flyteidl.admin.UrlBlob:
+
+UrlBlob
+------------------------------------------------------------------
+
+Represents a string url and associated metadata used throughout the platform.
+
+
+
+.. csv-table:: UrlBlob type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "url", ":ref:`ref_string`", "", "Actual url value."
+ "bytes", ":ref:`ref_int64`", "", "Represents the size of the file accessible at the above url."
+
+
+
+
+
+
+
+
+
+.. _ref_flyteidl.admin.NamedEntityState:
+
+NamedEntityState
+------------------------------------------------------------------
+
+The status of the named entity is used to control its visibility in the UI.
+
+.. csv-table:: Enum NamedEntityState values
+ :header: "Name", "Number", "Description"
+ :widths: auto
+
+ "NAMED_ENTITY_ACTIVE", "0", "By default, all named entities are considered active and under development."
+ "NAMED_ENTITY_ARCHIVED", "1", "Archived named entities are no longer visible in the UI."
+ "SYSTEM_GENERATED", "2", "System generated entities that aren't explicitly created or managed by a user."
+
+
+
+.. _ref_flyteidl.admin.Sort.Direction:
+
+Sort.Direction
+------------------------------------------------------------------
+
+
+
+.. csv-table:: Enum Sort.Direction values
+ :header: "Name", "Number", "Description"
+ :widths: auto
+
+ "DESCENDING", "0", "By default, fields are sorted in descending order."
+ "ASCENDING", "1", ""
+
+
+
+
+
+
+
+
+
+
+.. _ref_flyteidl/admin/event.proto:
+
+flyteidl/admin/event.proto
+==================================================================
+
+
+
+
+
+.. _ref_flyteidl.admin.EventErrorAlreadyInTerminalState:
+
+EventErrorAlreadyInTerminalState
+------------------------------------------------------------------
+
+Indicates that a sent event was not used to update execution state due to
+the referenced execution already being terminated (and therefore ineligible
+for further state transitions).
+
+
+
+.. csv-table:: EventErrorAlreadyInTerminalState type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "current_phase", ":ref:`ref_string`", "", "+required"
+
+
+
+
+
+
+
+.. _ref_flyteidl.admin.EventErrorIncompatibleCluster:
+
+EventErrorIncompatibleCluster
+------------------------------------------------------------------
+
+Indicates an event was rejected because it came from a different cluster than
+is on record as running the execution.
+
+
+
+.. csv-table:: EventErrorIncompatibleCluster type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "cluster", ":ref:`ref_string`", "", "The cluster which has been recorded as processing the execution. +required"
+
+
+
+
+
+
+
+.. _ref_flyteidl.admin.EventFailureReason:
+
+EventFailureReason
+------------------------------------------------------------------
+
+Indicates why a sent event was not used to update execution.
+
+
+
+.. csv-table:: EventFailureReason type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "already_in_terminal_state", ":ref:`ref_flyteidl.admin.EventErrorAlreadyInTerminalState`", "", ""
+ "incompatible_cluster", ":ref:`ref_flyteidl.admin.EventErrorIncompatibleCluster`", "", ""
+
+
+
+
+
+
+
+.. _ref_flyteidl.admin.NodeExecutionEventRequest:
+
+NodeExecutionEventRequest
+------------------------------------------------------------------
+
+Request to send a notification that a node execution event has occurred.
+
+
+
+.. csv-table:: NodeExecutionEventRequest type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "request_id", ":ref:`ref_string`", "", "Unique ID for this request that can be traced between services"
+ "event", ":ref:`ref_flyteidl.event.NodeExecutionEvent`", "", "Details about the event that occurred."
+
+
+
+
+
+
+
+.. _ref_flyteidl.admin.NodeExecutionEventResponse:
+
+NodeExecutionEventResponse
+------------------------------------------------------------------
+
+Purposefully empty, may be populated in the future.
+
+
+
+
+
+
+
+
+.. _ref_flyteidl.admin.TaskExecutionEventRequest:
+
+TaskExecutionEventRequest
+------------------------------------------------------------------
+
+Request to send a notification that a task execution event has occurred.
+
+
+
+.. csv-table:: TaskExecutionEventRequest type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "request_id", ":ref:`ref_string`", "", "Unique ID for this request that can be traced between services"
+ "event", ":ref:`ref_flyteidl.event.TaskExecutionEvent`", "", "Details about the event that occurred."
+
+
+
+
+
+
+
+.. _ref_flyteidl.admin.TaskExecutionEventResponse:
+
+TaskExecutionEventResponse
+------------------------------------------------------------------
+
+Purposefully empty, may be populated in the future.
+
+
+
+
+
+
+
+
+.. _ref_flyteidl.admin.WorkflowExecutionEventRequest:
+
+WorkflowExecutionEventRequest
+------------------------------------------------------------------
+
+Request to send a notification that a workflow execution event has occurred.
+
+
+
+.. csv-table:: WorkflowExecutionEventRequest type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "request_id", ":ref:`ref_string`", "", "Unique ID for this request that can be traced between services"
+ "event", ":ref:`ref_flyteidl.event.WorkflowExecutionEvent`", "", "Details about the event that occurred."
+
+
+
+
+
+
+
+.. _ref_flyteidl.admin.WorkflowExecutionEventResponse:
+
+WorkflowExecutionEventResponse
+------------------------------------------------------------------
+
+Purposefully empty, may be populated in the future.
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+.. _ref_flyteidl/admin/execution.proto:
+
+flyteidl/admin/execution.proto
+==================================================================
+
+
+
+
+
+.. _ref_flyteidl.admin.AbortMetadata:
+
+AbortMetadata
+------------------------------------------------------------------
+
+Specifies metadata around an aborted workflow execution.
+
+
+
+.. csv-table:: AbortMetadata type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "cause", ":ref:`ref_string`", "", "In the case of a user-specified abort, this will pass along the user-supplied cause."
+ "principal", ":ref:`ref_string`", "", "Identifies the entity (if any) responsible for terminating the execution"
+
+
+
+
+
+
+
+.. _ref_flyteidl.admin.Execution:
+
+Execution
+------------------------------------------------------------------
+
+A workflow execution represents an instantiated workflow, including all inputs and additional
+metadata as well as computed results included state, outputs, and duration-based attributes.
+Used as a response object used in Get and List execution requests.
+
+
+
+.. csv-table:: Execution type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "id", ":ref:`ref_flyteidl.core.WorkflowExecutionIdentifier`", "", "Unique identifier of the workflow execution."
+ "spec", ":ref:`ref_flyteidl.admin.ExecutionSpec`", "", "User-provided configuration and inputs for launching the execution."
+ "closure", ":ref:`ref_flyteidl.admin.ExecutionClosure`", "", "Execution results."
+
+
+
+
+
+
+
+.. _ref_flyteidl.admin.ExecutionClosure:
+
+ExecutionClosure
+------------------------------------------------------------------
+
+Encapsulates the results of the Execution
+
+
+
+.. csv-table:: ExecutionClosure type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "outputs", ":ref:`ref_flyteidl.admin.LiteralMapBlob`", "", "**Deprecated.** Output URI in the case of a successful execution. DEPRECATED. Use GetExecutionData to fetch output data instead."
+ "error", ":ref:`ref_flyteidl.core.ExecutionError`", "", "Error information in the case of a failed execution."
+ "abort_cause", ":ref:`ref_string`", "", "**Deprecated.** In the case of a user-specified abort, this will pass along the user-supplied cause."
+ "abort_metadata", ":ref:`ref_flyteidl.admin.AbortMetadata`", "", "In the case of a user-specified abort, this will pass along the user and their supplied cause."
+ "output_data", ":ref:`ref_flyteidl.core.LiteralMap`", "", "**Deprecated.** Raw output data produced by this execution. DEPRECATED. Use GetExecutionData to fetch output data instead."
+ "computed_inputs", ":ref:`ref_flyteidl.core.LiteralMap`", "", "**Deprecated.** Inputs computed and passed for execution. computed_inputs depends on inputs in ExecutionSpec, fixed and default inputs in launch plan"
+ "phase", ":ref:`ref_flyteidl.core.WorkflowExecution.Phase`", "", "Most recent recorded phase for the execution."
+ "started_at", ":ref:`ref_google.protobuf.Timestamp`", "", "Reported time at which the execution began running."
+ "duration", ":ref:`ref_google.protobuf.Duration`", "", "The amount of time the execution spent running."
+ "created_at", ":ref:`ref_google.protobuf.Timestamp`", "", "Reported time at which the execution was created."
+ "updated_at", ":ref:`ref_google.protobuf.Timestamp`", "", "Reported time at which the execution was last updated."
+ "notifications", ":ref:`ref_flyteidl.admin.Notification`", "repeated", "The notification settings to use after merging the CreateExecutionRequest and the launch plan notification settings. An execution launched with notifications will always prefer that definition to notifications defined statically in a launch plan."
+ "workflow_id", ":ref:`ref_flyteidl.core.Identifier`", "", "Identifies the workflow definition for this execution."
+ "state_change_details", ":ref:`ref_flyteidl.admin.ExecutionStateChangeDetails`", "", "Provides the details of the last stage change"
+
+
+
+
+
+
+
+.. _ref_flyteidl.admin.ExecutionCreateRequest:
+
+ExecutionCreateRequest
+------------------------------------------------------------------
+
+Request to launch an execution with the given project, domain and optionally-assigned name.
+
+
+
+.. csv-table:: ExecutionCreateRequest type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "project", ":ref:`ref_string`", "", "Name of the project the execution belongs to. +required"
+ "domain", ":ref:`ref_string`", "", "Name of the domain the execution belongs to. A domain can be considered as a subset within a specific project. +required"
+ "name", ":ref:`ref_string`", "", "User provided value for the resource. If none is provided the system will generate a unique string. +optional"
+ "spec", ":ref:`ref_flyteidl.admin.ExecutionSpec`", "", "Additional fields necessary to launch the execution. +optional"
+ "inputs", ":ref:`ref_flyteidl.core.LiteralMap`", "", "The inputs required to start the execution. All required inputs must be included in this map. If not required and not provided, defaults apply. +optional"
+
+
+
+
+
+
+
+.. _ref_flyteidl.admin.ExecutionCreateResponse:
+
+ExecutionCreateResponse
+------------------------------------------------------------------
+
+The unique identifier for a successfully created execution.
+If the name was *not* specified in the create request, this identifier will include a generated name.
+
+
+
+.. csv-table:: ExecutionCreateResponse type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "id", ":ref:`ref_flyteidl.core.WorkflowExecutionIdentifier`", "", ""
+
+
+
+
+
+
+
+.. _ref_flyteidl.admin.ExecutionList:
+
+ExecutionList
+------------------------------------------------------------------
+
+Used as a response for request to list executions.
+See :ref:`ref_flyteidl.admin.Execution` for more details
+
+
+
+.. csv-table:: ExecutionList type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "executions", ":ref:`ref_flyteidl.admin.Execution`", "repeated", ""
+ "token", ":ref:`ref_string`", "", "In the case of multiple pages of results, the server-provided token can be used to fetch the next page in a query. If there are no more results, this value will be empty."
+
+
+
+
+
+
+
+.. _ref_flyteidl.admin.ExecutionMetadata:
+
+ExecutionMetadata
+------------------------------------------------------------------
+
+Represents attributes about an execution which are not required to launch the execution but are useful to record.
+These attributes are assigned at launch time and do not change.
+
+
+
+.. csv-table:: ExecutionMetadata type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "mode", ":ref:`ref_flyteidl.admin.ExecutionMetadata.ExecutionMode`", "", ""
+ "principal", ":ref:`ref_string`", "", "Identifier of the entity that triggered this execution. For systems using back-end authentication any value set here will be discarded in favor of the authenticated user context."
+ "nesting", ":ref:`ref_uint32`", "", "Indicates the nestedness of this execution. If a user launches a workflow execution, the default nesting is 0. If this execution further launches a workflow (child workflow), the nesting level is incremented by 0 => 1 Generally, if workflow at nesting level k launches a workflow then the child workflow will have nesting = k + 1."
+ "scheduled_at", ":ref:`ref_google.protobuf.Timestamp`", "", "For scheduled executions, the requested time for execution for this specific schedule invocation."
+ "parent_node_execution", ":ref:`ref_flyteidl.core.NodeExecutionIdentifier`", "", "Which subworkflow node (if any) launched this execution"
+ "reference_execution", ":ref:`ref_flyteidl.core.WorkflowExecutionIdentifier`", "", "Optional, a reference workflow execution related to this execution. In the case of a relaunch, this references the original workflow execution."
+ "system_metadata", ":ref:`ref_flyteidl.admin.SystemMetadata`", "", "Optional, platform-specific metadata about the execution. In this the future this may be gated behind an ACL or some sort of authorization."
+
+
+
+
+
+
+
+.. _ref_flyteidl.admin.ExecutionRecoverRequest:
+
+ExecutionRecoverRequest
+------------------------------------------------------------------
+
+Request to recover the referenced execution.
+
+
+
+.. csv-table:: ExecutionRecoverRequest type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "id", ":ref:`ref_flyteidl.core.WorkflowExecutionIdentifier`", "", "Identifier of the workflow execution to recover."
+ "name", ":ref:`ref_string`", "", "User provided value for the recovered execution. If none is provided the system will generate a unique string. +optional"
+ "metadata", ":ref:`ref_flyteidl.admin.ExecutionMetadata`", "", "Additional metadata which will be used to overwrite any metadata in the reference execution when triggering a recovery execution."
+
+
+
+
+
+
+
+.. _ref_flyteidl.admin.ExecutionRelaunchRequest:
+
+ExecutionRelaunchRequest
+------------------------------------------------------------------
+
+Request to relaunch the referenced execution.
+
+
+
+.. csv-table:: ExecutionRelaunchRequest type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "id", ":ref:`ref_flyteidl.core.WorkflowExecutionIdentifier`", "", "Identifier of the workflow execution to relaunch. +required"
+ "name", ":ref:`ref_string`", "", "User provided value for the relaunched execution. If none is provided the system will generate a unique string. +optional"
+
+
+
+
+
+
+
+.. _ref_flyteidl.admin.ExecutionSpec:
+
+ExecutionSpec
+------------------------------------------------------------------
+
+An ExecutionSpec encompasses all data used to launch this execution. The Spec does not change over the lifetime
+of an execution as it progresses across phase changes.
+
+
+
+.. csv-table:: ExecutionSpec type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "launch_plan", ":ref:`ref_flyteidl.core.Identifier`", "", "Launch plan to be executed"
+ "inputs", ":ref:`ref_flyteidl.core.LiteralMap`", "", "**Deprecated.** Input values to be passed for the execution"
+ "metadata", ":ref:`ref_flyteidl.admin.ExecutionMetadata`", "", "Metadata for the execution"
+ "notifications", ":ref:`ref_flyteidl.admin.NotificationList`", "", "List of notifications based on Execution status transitions When this list is not empty it is used rather than any notifications defined in the referenced launch plan. When this list is empty, the notifications defined for the launch plan will be applied."
+ "disable_all", ":ref:`ref_bool`", "", "This should be set to true if all notifications are intended to be disabled for this execution."
+ "labels", ":ref:`ref_flyteidl.admin.Labels`", "", "Labels to apply to the execution resource."
+ "annotations", ":ref:`ref_flyteidl.admin.Annotations`", "", "Annotations to apply to the execution resource."
+ "security_context", ":ref:`ref_flyteidl.core.SecurityContext`", "", "Optional: security context override to apply this execution."
+ "auth_role", ":ref:`ref_flyteidl.admin.AuthRole`", "", "**Deprecated.** Optional: auth override to apply this execution."
+ "quality_of_service", ":ref:`ref_flyteidl.core.QualityOfService`", "", "Indicates the runtime priority of the execution."
+ "max_parallelism", ":ref:`ref_int32`", "", "Controls the maximum number of task nodes that can be run in parallel for the entire workflow. This is useful to achieve fairness. Note: MapTasks are regarded as one unit, and parallelism/concurrency of MapTasks is independent from this."
+ "raw_output_data_config", ":ref:`ref_flyteidl.admin.RawOutputDataConfig`", "", "User setting to configure where to store offloaded data (i.e. Blobs, structured datasets, query data, etc.). This should be a prefix like s3://my-bucket/my-data"
+ "cluster_assignment", ":ref:`ref_flyteidl.admin.ClusterAssignment`", "", "Controls how to select an available cluster on which this execution should run."
+
+
+
+
+
+
+
+.. _ref_flyteidl.admin.ExecutionStateChangeDetails:
+
+ExecutionStateChangeDetails
+------------------------------------------------------------------
+
+
+
+
+
+.. csv-table:: ExecutionStateChangeDetails type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "state", ":ref:`ref_flyteidl.admin.ExecutionState`", "", "The state of the execution is used to control its visibility in the UI/CLI."
+ "occurred_at", ":ref:`ref_google.protobuf.Timestamp`", "", "This timestamp represents when the state changed."
+ "principal", ":ref:`ref_string`", "", "Identifies the entity (if any) responsible for causing the state change of the execution"
+
+
+
+
+
+
+
+.. _ref_flyteidl.admin.ExecutionTerminateRequest:
+
+ExecutionTerminateRequest
+------------------------------------------------------------------
+
+Request to terminate an in-progress execution. This action is irreversible.
+If an execution is already terminated, this request will simply be a no-op.
+This request will fail if it references a non-existent execution.
+If the request succeeds the phase "ABORTED" will be recorded for the termination
+with the optional cause added to the output_result.
+
+
+
+.. csv-table:: ExecutionTerminateRequest type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "id", ":ref:`ref_flyteidl.core.WorkflowExecutionIdentifier`", "", "Uniquely identifies the individual workflow execution to be terminated."
+ "cause", ":ref:`ref_string`", "", "Optional reason for aborting."
+
+
+
+
+
+
+
+.. _ref_flyteidl.admin.ExecutionTerminateResponse:
+
+ExecutionTerminateResponse
+------------------------------------------------------------------
+
+Purposefully empty, may be populated in the future.
+
+
+
+
+
+
+
+
+.. _ref_flyteidl.admin.ExecutionUpdateRequest:
+
+ExecutionUpdateRequest
+------------------------------------------------------------------
+
+
+
+
+
+.. csv-table:: ExecutionUpdateRequest type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "id", ":ref:`ref_flyteidl.core.WorkflowExecutionIdentifier`", "", "Identifier of the execution to update"
+ "state", ":ref:`ref_flyteidl.admin.ExecutionState`", "", "State to set as the new value active/archive"
+
+
+
+
+
+
+
+.. _ref_flyteidl.admin.ExecutionUpdateResponse:
+
+ExecutionUpdateResponse
+------------------------------------------------------------------
+
+
+
+
+
+
+
+
+
+
+.. _ref_flyteidl.admin.LiteralMapBlob:
+
+LiteralMapBlob
+------------------------------------------------------------------
+
+Input/output data can represented by actual values or a link to where values are stored
+
+
+
+.. csv-table:: LiteralMapBlob type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "values", ":ref:`ref_flyteidl.core.LiteralMap`", "", "**Deprecated.** Data in LiteralMap format"
+ "uri", ":ref:`ref_string`", "", "In the event that the map is too large, we return a uri to the data"
+
+
+
+
+
+
+
+.. _ref_flyteidl.admin.NotificationList:
+
+NotificationList
+------------------------------------------------------------------
+
+
+
+
+
+.. csv-table:: NotificationList type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "notifications", ":ref:`ref_flyteidl.admin.Notification`", "repeated", ""
+
+
+
+
+
+
+
+.. _ref_flyteidl.admin.SystemMetadata:
+
+SystemMetadata
+------------------------------------------------------------------
+
+Represents system, rather than user-facing, metadata about an execution.
+
+
+
+.. csv-table:: SystemMetadata type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "execution_cluster", ":ref:`ref_string`", "", "Which execution cluster this execution ran on."
+
+
+
+
+
+
+
+.. _ref_flyteidl.admin.WorkflowExecutionGetDataRequest:
+
+WorkflowExecutionGetDataRequest
+------------------------------------------------------------------
+
+Request structure to fetch inputs, output and other data produced by an execution.
+By default this data is not returned inline in :ref:`ref_flyteidl.admin.WorkflowExecutionGetRequest`
+
+
+
+.. csv-table:: WorkflowExecutionGetDataRequest type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "id", ":ref:`ref_flyteidl.core.WorkflowExecutionIdentifier`", "", "The identifier of the execution for which to fetch inputs and outputs."
+
+
+
+
+
+
+
+.. _ref_flyteidl.admin.WorkflowExecutionGetDataResponse:
+
+WorkflowExecutionGetDataResponse
+------------------------------------------------------------------
+
+Response structure for WorkflowExecutionGetDataRequest which contains inputs and outputs for an execution.
+
+
+
+.. csv-table:: WorkflowExecutionGetDataResponse type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "outputs", ":ref:`ref_flyteidl.admin.UrlBlob`", "", "**Deprecated.** Signed url to fetch a core.LiteralMap of execution outputs. Deprecated: Please use full_outputs instead."
+ "inputs", ":ref:`ref_flyteidl.admin.UrlBlob`", "", "**Deprecated.** Signed url to fetch a core.LiteralMap of execution inputs. Deprecated: Please use full_inputs instead."
+ "full_inputs", ":ref:`ref_flyteidl.core.LiteralMap`", "", "Full_inputs will only be populated if they are under a configured size threshold."
+ "full_outputs", ":ref:`ref_flyteidl.core.LiteralMap`", "", "Full_outputs will only be populated if they are under a configured size threshold."
+
+
+
+
+
+
+
+.. _ref_flyteidl.admin.WorkflowExecutionGetRequest:
+
+WorkflowExecutionGetRequest
+------------------------------------------------------------------
+
+A message used to fetch a single workflow execution entity.
+See :ref:`ref_flyteidl.admin.Execution` for more details
+
+
+
+.. csv-table:: WorkflowExecutionGetRequest type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "id", ":ref:`ref_flyteidl.core.WorkflowExecutionIdentifier`", "", "Uniquely identifies an individual workflow execution."
+
+
+
+
+
+
+
+
+
+.. _ref_flyteidl.admin.ExecutionMetadata.ExecutionMode:
+
+ExecutionMetadata.ExecutionMode
+------------------------------------------------------------------
+
+The method by which this execution was launched.
+
+.. csv-table:: Enum ExecutionMetadata.ExecutionMode values
+ :header: "Name", "Number", "Description"
+ :widths: auto
+
+ "MANUAL", "0", "The default execution mode, MANUAL implies that an execution was launched by an individual."
+ "SCHEDULED", "1", "A schedule triggered this execution launch."
+ "SYSTEM", "2", "A system process was responsible for launching this execution rather an individual."
+ "RELAUNCH", "3", "This execution was launched with identical inputs as a previous execution."
+ "CHILD_WORKFLOW", "4", "This execution was triggered by another execution."
+ "RECOVERED", "5", "This execution was recovered from another execution."
+
+
+
+.. _ref_flyteidl.admin.ExecutionState:
+
+ExecutionState
+------------------------------------------------------------------
+
+The state of the execution is used to control its visibility in the UI/CLI.
+
+.. csv-table:: Enum ExecutionState values
+ :header: "Name", "Number", "Description"
+ :widths: auto
+
+ "EXECUTION_ACTIVE", "0", "By default, all executions are considered active."
+ "EXECUTION_ARCHIVED", "1", "Archived executions are no longer visible in the UI."
+
+
+
+
+
+
+
+
+
+
+.. _ref_flyteidl/admin/launch_plan.proto:
+
+flyteidl/admin/launch_plan.proto
+==================================================================
+
+
+
+
+
+.. _ref_flyteidl.admin.ActiveLaunchPlanListRequest:
+
+ActiveLaunchPlanListRequest
+------------------------------------------------------------------
+
+Represents a request structure to list active launch plans within a project/domain.
+See :ref:`ref_flyteidl.admin.LaunchPlan` for more details
+
+
+
+.. csv-table:: ActiveLaunchPlanListRequest type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "project", ":ref:`ref_string`", "", "Name of the project that contains the identifiers. +required."
+ "domain", ":ref:`ref_string`", "", "Name of the domain the identifiers belongs to within the project. +required."
+ "limit", ":ref:`ref_uint32`", "", "Indicates the number of resources to be returned. +required."
+ "token", ":ref:`ref_string`", "", "In the case of multiple pages of results, the server-provided token can be used to fetch the next page in a query. +optional"
+ "sort_by", ":ref:`ref_flyteidl.admin.Sort`", "", "Sort ordering. +optional"
+
+
+
+
+
+
+
+.. _ref_flyteidl.admin.ActiveLaunchPlanRequest:
+
+ActiveLaunchPlanRequest
+------------------------------------------------------------------
+
+Represents a request struct for finding an active launch plan for a given NamedEntityIdentifier
+See :ref:`ref_flyteidl.admin.LaunchPlan` for more details
+
+
+
+.. csv-table:: ActiveLaunchPlanRequest type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "id", ":ref:`ref_flyteidl.admin.NamedEntityIdentifier`", "", "+required."
+
+
+
+
+
+
+
+.. _ref_flyteidl.admin.Auth:
+
+Auth
+------------------------------------------------------------------
+
+Defines permissions associated with executions created by this launch plan spec.
+Use either of these roles when they have permissions required by your workflow execution.
+Deprecated.
+
+
+
+.. csv-table:: Auth type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "assumable_iam_role", ":ref:`ref_string`", "", "Defines an optional iam role which will be used for tasks run in executions created with this launch plan."
+ "kubernetes_service_account", ":ref:`ref_string`", "", "Defines an optional kubernetes service account which will be used for tasks run in executions created with this launch plan."
+
+
+
+
+
+
+
+.. _ref_flyteidl.admin.LaunchPlan:
+
+LaunchPlan
+------------------------------------------------------------------
+
+A LaunchPlan provides the capability to templatize workflow executions.
+Launch plans simplify associating one or more schedules, inputs and notifications with your workflows.
+Launch plans can be shared and used to trigger executions with predefined inputs even when a workflow
+definition doesn't necessarily have a default value for said input.
+
+
+
+.. csv-table:: LaunchPlan type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "id", ":ref:`ref_flyteidl.core.Identifier`", "", "Uniquely identifies a launch plan entity."
+ "spec", ":ref:`ref_flyteidl.admin.LaunchPlanSpec`", "", "User-provided launch plan details, including reference workflow, inputs and other metadata."
+ "closure", ":ref:`ref_flyteidl.admin.LaunchPlanClosure`", "", "Values computed by the flyte platform after launch plan registration."
+
+
+
+
+
+
+
+.. _ref_flyteidl.admin.LaunchPlanClosure:
+
+LaunchPlanClosure
+------------------------------------------------------------------
+
+Values computed by the flyte platform after launch plan registration.
+These include expected_inputs required to be present in a CreateExecutionRequest
+to launch the reference workflow as well timestamp values associated with the launch plan.
+
+
+
+.. csv-table:: LaunchPlanClosure type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "state", ":ref:`ref_flyteidl.admin.LaunchPlanState`", "", "Indicate the Launch plan state."
+ "expected_inputs", ":ref:`ref_flyteidl.core.ParameterMap`", "", "Indicates the set of inputs expected when creating an execution with the Launch plan"
+ "expected_outputs", ":ref:`ref_flyteidl.core.VariableMap`", "", "Indicates the set of outputs expected to be produced by creating an execution with the Launch plan"
+ "created_at", ":ref:`ref_google.protobuf.Timestamp`", "", "Time at which the launch plan was created."
+ "updated_at", ":ref:`ref_google.protobuf.Timestamp`", "", "Time at which the launch plan was last updated."
+
+
+
+
+
+
+
+.. _ref_flyteidl.admin.LaunchPlanCreateRequest:
+
+LaunchPlanCreateRequest
+------------------------------------------------------------------
+
+Request to register a launch plan. The included LaunchPlanSpec may have a complete or incomplete set of inputs required
+to launch a workflow execution. By default all launch plans are registered in state INACTIVE. If you wish to
+set the state to ACTIVE, you must submit a LaunchPlanUpdateRequest, after you have successfully created a launch plan.
+
+
+
+.. csv-table:: LaunchPlanCreateRequest type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "id", ":ref:`ref_flyteidl.core.Identifier`", "", "Uniquely identifies a launch plan entity."
+ "spec", ":ref:`ref_flyteidl.admin.LaunchPlanSpec`", "", "User-provided launch plan details, including reference workflow, inputs and other metadata."
+
+
+
+
+
+
+
+.. _ref_flyteidl.admin.LaunchPlanCreateResponse:
+
+LaunchPlanCreateResponse
+------------------------------------------------------------------
+
+Purposefully empty, may be populated in the future.
+
+
+
+
+
+
+
+
+.. _ref_flyteidl.admin.LaunchPlanList:
+
+LaunchPlanList
+------------------------------------------------------------------
+
+Response object for list launch plan requests.
+See :ref:`ref_flyteidl.admin.LaunchPlan` for more details
+
+
+
+.. csv-table:: LaunchPlanList type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "launch_plans", ":ref:`ref_flyteidl.admin.LaunchPlan`", "repeated", ""
+ "token", ":ref:`ref_string`", "", "In the case of multiple pages of results, the server-provided token can be used to fetch the next page in a query. If there are no more results, this value will be empty."
+
+
+
+
+
+
+
+.. _ref_flyteidl.admin.LaunchPlanMetadata:
+
+LaunchPlanMetadata
+------------------------------------------------------------------
+
+Additional launch plan attributes included in the LaunchPlanSpec not strictly required to launch
+the reference workflow.
+
+
+
+.. csv-table:: LaunchPlanMetadata type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "schedule", ":ref:`ref_flyteidl.admin.Schedule`", "", "Schedule to execute the Launch Plan"
+ "notifications", ":ref:`ref_flyteidl.admin.Notification`", "repeated", "List of notifications based on Execution status transitions"
+
+
+
+
+
+
+
+.. _ref_flyteidl.admin.LaunchPlanSpec:
+
+LaunchPlanSpec
+------------------------------------------------------------------
+
+User-provided launch plan definition and configuration values.
+
+
+
+.. csv-table:: LaunchPlanSpec type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "workflow_id", ":ref:`ref_flyteidl.core.Identifier`", "", "Reference to the Workflow template that the launch plan references"
+ "entity_metadata", ":ref:`ref_flyteidl.admin.LaunchPlanMetadata`", "", "Metadata for the Launch Plan"
+ "default_inputs", ":ref:`ref_flyteidl.core.ParameterMap`", "", "Input values to be passed for the execution. These can be overriden when an execution is created with this launch plan."
+ "fixed_inputs", ":ref:`ref_flyteidl.core.LiteralMap`", "", "Fixed, non-overridable inputs for the Launch Plan. These can not be overriden when an execution is created with this launch plan."
+ "role", ":ref:`ref_string`", "", "**Deprecated.** String to indicate the role to use to execute the workflow underneath"
+ "labels", ":ref:`ref_flyteidl.admin.Labels`", "", "Custom labels to be applied to the execution resource."
+ "annotations", ":ref:`ref_flyteidl.admin.Annotations`", "", "Custom annotations to be applied to the execution resource."
+ "auth", ":ref:`ref_flyteidl.admin.Auth`", "", "**Deprecated.** Indicates the permission associated with workflow executions triggered with this launch plan."
+ "auth_role", ":ref:`ref_flyteidl.admin.AuthRole`", "", "**Deprecated.** "
+ "security_context", ":ref:`ref_flyteidl.core.SecurityContext`", "", "Indicates security context for permissions triggered with this launch plan"
+ "quality_of_service", ":ref:`ref_flyteidl.core.QualityOfService`", "", "Indicates the runtime priority of the execution."
+ "raw_output_data_config", ":ref:`ref_flyteidl.admin.RawOutputDataConfig`", "", "Encapsulates user settings pertaining to offloaded data (i.e. Blobs, Schema, query data, etc.)."
+ "max_parallelism", ":ref:`ref_int32`", "", "Controls the maximum number of tasknodes that can be run in parallel for the entire workflow. This is useful to achieve fairness. Note: MapTasks are regarded as one unit, and parallelism/concurrency of MapTasks is independent from this."
+
+
+
+
+
+
+
+.. _ref_flyteidl.admin.LaunchPlanUpdateRequest:
+
+LaunchPlanUpdateRequest
+------------------------------------------------------------------
+
+Request to set the referenced launch plan state to the configured value.
+See :ref:`ref_flyteidl.admin.LaunchPlan` for more details
+
+
+
+.. csv-table:: LaunchPlanUpdateRequest type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "id", ":ref:`ref_flyteidl.core.Identifier`", "", "Identifier of launch plan for which to change state. +required."
+ "state", ":ref:`ref_flyteidl.admin.LaunchPlanState`", "", "Desired state to apply to the launch plan. +required."
+
+
+
+
+
+
+
+.. _ref_flyteidl.admin.LaunchPlanUpdateResponse:
+
+LaunchPlanUpdateResponse
+------------------------------------------------------------------
+
+Purposefully empty, may be populated in the future.
+
+
+
+
+
+
+
+
+
+
+.. _ref_flyteidl.admin.LaunchPlanState:
+
+LaunchPlanState
+------------------------------------------------------------------
+
+By default any launch plan regardless of state can be used to launch a workflow execution.
+However, at most one version of a launch plan
+(e.g. a NamedEntityIdentifier set of shared project, domain and name values) can be
+active at a time in regards to *schedules*. That is, at most one schedule in a NamedEntityIdentifier
+group will be observed and trigger executions at a defined cadence.
+
+.. csv-table:: Enum LaunchPlanState values
+ :header: "Name", "Number", "Description"
+ :widths: auto
+
+ "INACTIVE", "0", ""
+ "ACTIVE", "1", ""
+
+
+
+
+
+
+
+
+
+
+.. _ref_flyteidl/admin/matchable_resource.proto:
+
+flyteidl/admin/matchable_resource.proto
+==================================================================
+
+
+
+
+
+.. _ref_flyteidl.admin.ClusterResourceAttributes:
+
+ClusterResourceAttributes
+------------------------------------------------------------------
+
+
+
+
+
+.. csv-table:: ClusterResourceAttributes type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "attributes", ":ref:`ref_flyteidl.admin.ClusterResourceAttributes.AttributesEntry`", "repeated", "Custom resource attributes which will be applied in cluster resource creation (e.g. quotas). Map keys are the *case-sensitive* names of variables in templatized resource files. Map values should be the custom values which get substituted during resource creation."
+
+
+
+
+
+
+
+.. _ref_flyteidl.admin.ClusterResourceAttributes.AttributesEntry:
+
+ClusterResourceAttributes.AttributesEntry
+------------------------------------------------------------------
+
+
+
+
+
+.. csv-table:: ClusterResourceAttributes.AttributesEntry type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "key", ":ref:`ref_string`", "", ""
+ "value", ":ref:`ref_string`", "", ""
+
+
+
+
+
+
+
+.. _ref_flyteidl.admin.ExecutionClusterLabel:
+
+ExecutionClusterLabel
+------------------------------------------------------------------
+
+
+
+
+
+.. csv-table:: ExecutionClusterLabel type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "value", ":ref:`ref_string`", "", "Label value to determine where the execution will be run"
+
+
+
+
+
+
+
+.. _ref_flyteidl.admin.ExecutionQueueAttributes:
+
+ExecutionQueueAttributes
+------------------------------------------------------------------
+
+
+
+
+
+.. csv-table:: ExecutionQueueAttributes type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "tags", ":ref:`ref_string`", "repeated", "Tags used for assigning execution queues for tasks defined within this project."
+
+
+
+
+
+
+
+.. _ref_flyteidl.admin.ListMatchableAttributesRequest:
+
+ListMatchableAttributesRequest
+------------------------------------------------------------------
+
+Request all matching resource attributes for a resource type.
+See :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for more details
+
+
+
+.. csv-table:: ListMatchableAttributesRequest type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "resource_type", ":ref:`ref_flyteidl.admin.MatchableResource`", "", "+required"
+
+
+
+
+
+
+
+.. _ref_flyteidl.admin.ListMatchableAttributesResponse:
+
+ListMatchableAttributesResponse
+------------------------------------------------------------------
+
+Response for a request for all matching resource attributes for a resource type.
+See :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for more details
+
+
+
+.. csv-table:: ListMatchableAttributesResponse type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "configurations", ":ref:`ref_flyteidl.admin.MatchableAttributesConfiguration`", "repeated", ""
+
+
+
+
+
+
+
+.. _ref_flyteidl.admin.MatchableAttributesConfiguration:
+
+MatchableAttributesConfiguration
+------------------------------------------------------------------
+
+Represents a custom set of attributes applied for either a domain; a domain and project; or
+domain, project and workflow name.
+These are used to override system level defaults for kubernetes cluster resource management,
+default execution values, and more all across different levels of specificity.
+
+
+
+.. csv-table:: MatchableAttributesConfiguration type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "attributes", ":ref:`ref_flyteidl.admin.MatchingAttributes`", "", ""
+ "domain", ":ref:`ref_string`", "", ""
+ "project", ":ref:`ref_string`", "", ""
+ "workflow", ":ref:`ref_string`", "", ""
+ "launch_plan", ":ref:`ref_string`", "", ""
+
+
+
+
+
+
+
+.. _ref_flyteidl.admin.MatchingAttributes:
+
+MatchingAttributes
+------------------------------------------------------------------
+
+Generic container for encapsulating all types of the above attributes messages.
+
+
+
+.. csv-table:: MatchingAttributes type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "task_resource_attributes", ":ref:`ref_flyteidl.admin.TaskResourceAttributes`", "", ""
+ "cluster_resource_attributes", ":ref:`ref_flyteidl.admin.ClusterResourceAttributes`", "", ""
+ "execution_queue_attributes", ":ref:`ref_flyteidl.admin.ExecutionQueueAttributes`", "", ""
+ "execution_cluster_label", ":ref:`ref_flyteidl.admin.ExecutionClusterLabel`", "", ""
+ "quality_of_service", ":ref:`ref_flyteidl.core.QualityOfService`", "", ""
+ "plugin_overrides", ":ref:`ref_flyteidl.admin.PluginOverrides`", "", ""
+ "workflow_execution_config", ":ref:`ref_flyteidl.admin.WorkflowExecutionConfig`", "", ""
+ "cluster_assignment", ":ref:`ref_flyteidl.admin.ClusterAssignment`", "", ""
+
+
+
+
+
+
+
+.. _ref_flyteidl.admin.PluginOverride:
+
+PluginOverride
+------------------------------------------------------------------
+
+This MatchableAttribute configures selecting alternate plugin implementations for a given task type.
+In addition to an override implementation a selection of fallbacks can be provided or other modes
+for handling cases where the desired plugin override is not enabled in a given Flyte deployment.
+
+
+
+.. csv-table:: PluginOverride type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "task_type", ":ref:`ref_string`", "", "A predefined yet extensible Task type identifier."
+ "plugin_id", ":ref:`ref_string`", "repeated", "A set of plugin ids which should handle tasks of this type instead of the default registered plugin. The list will be tried in order until a plugin is found with that id."
+ "missing_plugin_behavior", ":ref:`ref_flyteidl.admin.PluginOverride.MissingPluginBehavior`", "", "Defines the behavior when no plugin from the plugin_id list is not found."
+
+
+
+
+
+
+
+.. _ref_flyteidl.admin.PluginOverrides:
+
+PluginOverrides
+------------------------------------------------------------------
+
+
+
+
+
+.. csv-table:: PluginOverrides type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "overrides", ":ref:`ref_flyteidl.admin.PluginOverride`", "repeated", ""
+
+
+
+
+
+
+
+.. _ref_flyteidl.admin.TaskResourceAttributes:
+
+TaskResourceAttributes
+------------------------------------------------------------------
+
+Defines task resource defaults and limits that will be applied at task registration.
+
+
+
+.. csv-table:: TaskResourceAttributes type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "defaults", ":ref:`ref_flyteidl.admin.TaskResourceSpec`", "", ""
+ "limits", ":ref:`ref_flyteidl.admin.TaskResourceSpec`", "", ""
+
+
+
+
+
+
+
+.. _ref_flyteidl.admin.TaskResourceSpec:
+
+TaskResourceSpec
+------------------------------------------------------------------
+
+Defines a set of overridable task resource attributes set during task registration.
+
+
+
+.. csv-table:: TaskResourceSpec type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "cpu", ":ref:`ref_string`", "", ""
+ "gpu", ":ref:`ref_string`", "", ""
+ "memory", ":ref:`ref_string`", "", ""
+ "storage", ":ref:`ref_string`", "", ""
+ "ephemeral_storage", ":ref:`ref_string`", "", ""
+
+
+
+
+
+
+
+.. _ref_flyteidl.admin.WorkflowExecutionConfig:
+
+WorkflowExecutionConfig
+------------------------------------------------------------------
+
+Adds defaults for customizable workflow-execution specifications and overrides.
+
+
+
+.. csv-table:: WorkflowExecutionConfig type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "max_parallelism", ":ref:`ref_int32`", "", "Can be used to control the number of parallel nodes to run within the workflow. This is useful to achieve fairness."
+ "security_context", ":ref:`ref_flyteidl.core.SecurityContext`", "", "Indicates security context permissions for executions triggered with this matchable attribute."
+ "raw_output_data_config", ":ref:`ref_flyteidl.admin.RawOutputDataConfig`", "", "Encapsulates user settings pertaining to offloaded data (i.e. Blobs, Schema, query data, etc.)."
+ "labels", ":ref:`ref_flyteidl.admin.Labels`", "", "Custom labels to be applied to a triggered execution resource."
+ "annotations", ":ref:`ref_flyteidl.admin.Annotations`", "", "Custom annotations to be applied to a triggered execution resource."
+
+
+
+
+
+
+
+
+
+.. _ref_flyteidl.admin.MatchableResource:
+
+MatchableResource
+------------------------------------------------------------------
+
+Defines a resource that can be configured by customizable Project-, ProjectDomain- or WorkflowAttributes
+based on matching tags.
+
+.. csv-table:: Enum MatchableResource values
+ :header: "Name", "Number", "Description"
+ :widths: auto
+
+ "TASK_RESOURCE", "0", "Applies to customizable task resource requests and limits."
+ "CLUSTER_RESOURCE", "1", "Applies to configuring templated kubernetes cluster resources."
+ "EXECUTION_QUEUE", "2", "Configures task and dynamic task execution queue assignment."
+ "EXECUTION_CLUSTER_LABEL", "3", "Configures the K8s cluster label to be used for execution to be run"
+ "QUALITY_OF_SERVICE_SPECIFICATION", "4", "Configures default quality of service when undefined in an execution spec."
+ "PLUGIN_OVERRIDE", "5", "Selects configurable plugin implementation behavior for a given task type."
+ "WORKFLOW_EXECUTION_CONFIG", "6", "Adds defaults for customizable workflow-execution specifications and overrides."
+ "CLUSTER_ASSIGNMENT", "7", "Controls how to select an available cluster on which this execution should run."
+
+
+
+.. _ref_flyteidl.admin.PluginOverride.MissingPluginBehavior:
+
+PluginOverride.MissingPluginBehavior
+------------------------------------------------------------------
+
+
+
+.. csv-table:: Enum PluginOverride.MissingPluginBehavior values
+ :header: "Name", "Number", "Description"
+ :widths: auto
+
+ "FAIL", "0", "By default, if this plugin is not enabled for a Flyte deployment then execution will fail."
+ "USE_DEFAULT", "1", "Uses the system-configured default implementation."
+
+
+
+
+
+
+
+
+
+
+.. _ref_flyteidl/admin/node_execution.proto:
+
+flyteidl/admin/node_execution.proto
+==================================================================
+
+
+
+
+
+.. _ref_flyteidl.admin.DynamicWorkflowNodeMetadata:
+
+DynamicWorkflowNodeMetadata
+------------------------------------------------------------------
+
+For dynamic workflow nodes we capture information about the dynamic workflow definition that gets generated.
+
+
+
+.. csv-table:: DynamicWorkflowNodeMetadata type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "id", ":ref:`ref_flyteidl.core.Identifier`", "", "id represents the unique identifier of the workflow."
+ "compiled_workflow", ":ref:`ref_flyteidl.core.CompiledWorkflowClosure`", "", "Represents the compiled representation of the embedded dynamic workflow."
+
+
+
+
+
+
+
+.. _ref_flyteidl.admin.NodeExecution:
+
+NodeExecution
+------------------------------------------------------------------
+
+Encapsulates all details for a single node execution entity.
+A node represents a component in the overall workflow graph. A node launch a task, multiple tasks, an entire nested
+sub-workflow, or even a separate child-workflow execution.
+The same task can be called repeatedly in a single workflow but each node is unique.
+
+
+
+.. csv-table:: NodeExecution type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "id", ":ref:`ref_flyteidl.core.NodeExecutionIdentifier`", "", "Uniquely identifies an individual node execution."
+ "input_uri", ":ref:`ref_string`", "", "Path to remote data store where input blob is stored."
+ "closure", ":ref:`ref_flyteidl.admin.NodeExecutionClosure`", "", "Computed results associated with this node execution."
+ "metadata", ":ref:`ref_flyteidl.admin.NodeExecutionMetaData`", "", "Metadata for Node Execution"
+
+
+
+
+
+
+
+.. _ref_flyteidl.admin.NodeExecutionClosure:
+
+NodeExecutionClosure
+------------------------------------------------------------------
+
+Container for node execution details and results.
+
+
+
+.. csv-table:: NodeExecutionClosure type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "output_uri", ":ref:`ref_string`", "", "**Deprecated.** Links to a remotely stored, serialized core.LiteralMap of node execution outputs. DEPRECATED. Use GetNodeExecutionData to fetch output data instead."
+ "error", ":ref:`ref_flyteidl.core.ExecutionError`", "", "Error information for the Node"
+ "output_data", ":ref:`ref_flyteidl.core.LiteralMap`", "", "**Deprecated.** Raw output data produced by this node execution. DEPRECATED. Use GetNodeExecutionData to fetch output data instead."
+ "phase", ":ref:`ref_flyteidl.core.NodeExecution.Phase`", "", "The last recorded phase for this node execution."
+ "started_at", ":ref:`ref_google.protobuf.Timestamp`", "", "Time at which the node execution began running."
+ "duration", ":ref:`ref_google.protobuf.Duration`", "", "The amount of time the node execution spent running."
+ "created_at", ":ref:`ref_google.protobuf.Timestamp`", "", "Time at which the node execution was created."
+ "updated_at", ":ref:`ref_google.protobuf.Timestamp`", "", "Time at which the node execution was last updated."
+ "workflow_node_metadata", ":ref:`ref_flyteidl.admin.WorkflowNodeMetadata`", "", ""
+ "task_node_metadata", ":ref:`ref_flyteidl.admin.TaskNodeMetadata`", "", ""
+
+
+
+
+
+
+
+.. _ref_flyteidl.admin.NodeExecutionForTaskListRequest:
+
+NodeExecutionForTaskListRequest
+------------------------------------------------------------------
+
+Represents a request structure to retrieve a list of node execution entities launched by a specific task.
+This can arise when a task yields a subworkflow.
+
+
+
+.. csv-table:: NodeExecutionForTaskListRequest type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "task_execution_id", ":ref:`ref_flyteidl.core.TaskExecutionIdentifier`", "", "Indicates the node execution to filter by. +required"
+ "limit", ":ref:`ref_uint32`", "", "Indicates the number of resources to be returned. +required"
+ "token", ":ref:`ref_string`", "", "In the case of multiple pages of results, the, server-provided token can be used to fetch the next page in a query. +optional"
+ "filters", ":ref:`ref_string`", "", "Indicates a list of filters passed as string. More info on constructing filters : +optional"
+ "sort_by", ":ref:`ref_flyteidl.admin.Sort`", "", "Sort ordering. +optional"
+
+
+
+
+
+
+
+.. _ref_flyteidl.admin.NodeExecutionGetDataRequest:
+
+NodeExecutionGetDataRequest
+------------------------------------------------------------------
+
+Request structure to fetch inputs and output for a node execution.
+By default, these are not returned in :ref:`ref_flyteidl.admin.NodeExecutionGetRequest`
+
+
+
+.. csv-table:: NodeExecutionGetDataRequest type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "id", ":ref:`ref_flyteidl.core.NodeExecutionIdentifier`", "", "The identifier of the node execution for which to fetch inputs and outputs."
+
+
+
+
+
+
+
+.. _ref_flyteidl.admin.NodeExecutionGetDataResponse:
+
+NodeExecutionGetDataResponse
+------------------------------------------------------------------
+
+Response structure for NodeExecutionGetDataRequest which contains inputs and outputs for a node execution.
+
+
+
+.. csv-table:: NodeExecutionGetDataResponse type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "inputs", ":ref:`ref_flyteidl.admin.UrlBlob`", "", "**Deprecated.** Signed url to fetch a core.LiteralMap of node execution inputs. Deprecated: Please use full_inputs instead."
+ "outputs", ":ref:`ref_flyteidl.admin.UrlBlob`", "", "**Deprecated.** Signed url to fetch a core.LiteralMap of node execution outputs. Deprecated: Please use full_outputs instead."
+ "full_inputs", ":ref:`ref_flyteidl.core.LiteralMap`", "", "Full_inputs will only be populated if they are under a configured size threshold."
+ "full_outputs", ":ref:`ref_flyteidl.core.LiteralMap`", "", "Full_outputs will only be populated if they are under a configured size threshold."
+ "dynamic_workflow", ":ref:`ref_flyteidl.admin.DynamicWorkflowNodeMetadata`", "", "Optional Workflow closure for a dynamically generated workflow, in the case this node yields a dynamic workflow we return its structure here."
+
+
+
+
+
+
+
+.. _ref_flyteidl.admin.NodeExecutionGetRequest:
+
+NodeExecutionGetRequest
+------------------------------------------------------------------
+
+A message used to fetch a single node execution entity.
+See :ref:`ref_flyteidl.admin.NodeExecution` for more details
+
+
+
+.. csv-table:: NodeExecutionGetRequest type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "id", ":ref:`ref_flyteidl.core.NodeExecutionIdentifier`", "", "Uniquely identifies an individual node execution. +required"
+
+
+
+
+
+
+
+.. _ref_flyteidl.admin.NodeExecutionList:
+
+NodeExecutionList
+------------------------------------------------------------------
+
+Request structure to retrieve a list of node execution entities.
+See :ref:`ref_flyteidl.admin.NodeExecution` for more details
+
+
+
+.. csv-table:: NodeExecutionList type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "node_executions", ":ref:`ref_flyteidl.admin.NodeExecution`", "repeated", ""
+ "token", ":ref:`ref_string`", "", "In the case of multiple pages of results, the server-provided token can be used to fetch the next page in a query. If there are no more results, this value will be empty."
+
+
+
+
+
+
+
+.. _ref_flyteidl.admin.NodeExecutionListRequest:
+
+NodeExecutionListRequest
+------------------------------------------------------------------
+
+Represents a request structure to retrieve a list of node execution entities.
+See :ref:`ref_flyteidl.admin.NodeExecution` for more details
+
+
+
+.. csv-table:: NodeExecutionListRequest type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "workflow_execution_id", ":ref:`ref_flyteidl.core.WorkflowExecutionIdentifier`", "", "Indicates the workflow execution to filter by. +required"
+ "limit", ":ref:`ref_uint32`", "", "Indicates the number of resources to be returned. +required"
+ "token", ":ref:`ref_string`", "", ""
+ "filters", ":ref:`ref_string`", "", "Indicates a list of filters passed as string. More info on constructing filters : +optional"
+ "sort_by", ":ref:`ref_flyteidl.admin.Sort`", "", "Sort ordering. +optional"
+ "unique_parent_id", ":ref:`ref_string`", "", "Unique identifier of the parent node in the execution +optional"
+
+
+
+
+
+
+
+.. _ref_flyteidl.admin.NodeExecutionMetaData:
+
+NodeExecutionMetaData
+------------------------------------------------------------------
+
+Represents additional attributes related to a Node Execution
+
+
+
+.. csv-table:: NodeExecutionMetaData type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "retry_group", ":ref:`ref_string`", "", "Node executions are grouped depending on retries of the parent Retry group is unique within the context of a parent node."
+ "is_parent_node", ":ref:`ref_bool`", "", "Boolean flag indicating if the node has child nodes under it This can be true when a node contains a dynamic workflow which then produces child nodes."
+ "spec_node_id", ":ref:`ref_string`", "", "Node id of the node in the original workflow This maps to value of WorkflowTemplate.nodes[X].id"
+ "is_dynamic", ":ref:`ref_bool`", "", "Boolean flag indicating if the node has contains a dynamic workflow which then produces child nodes. This is to distinguish between subworkflows and dynamic workflows which can both have is_parent_node as true."
+
+
+
+
+
+
+
+.. _ref_flyteidl.admin.TaskNodeMetadata:
+
+TaskNodeMetadata
+------------------------------------------------------------------
+
+Metadata for the case in which the node is a TaskNode
+
+
+
+.. csv-table:: TaskNodeMetadata type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "cache_status", ":ref:`ref_flyteidl.core.CatalogCacheStatus`", "", "Captures the status of caching for this execution."
+ "catalog_key", ":ref:`ref_flyteidl.core.CatalogMetadata`", "", "This structure carries the catalog artifact information"
+
+
+
+
+
+
+
+.. _ref_flyteidl.admin.WorkflowNodeMetadata:
+
+WorkflowNodeMetadata
+------------------------------------------------------------------
+
+Metadata for a WorkflowNode
+
+
+
+.. csv-table:: WorkflowNodeMetadata type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "executionId", ":ref:`ref_flyteidl.core.WorkflowExecutionIdentifier`", "", "The identifier for a workflow execution launched by a node."
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+.. _ref_flyteidl/admin/notification.proto:
+
+flyteidl/admin/notification.proto
+==================================================================
+
+
+
+
+
+.. _ref_flyteidl.admin.EmailMessage:
+
+EmailMessage
+------------------------------------------------------------------
+
+Represents the Email object that is sent to a publisher/subscriber
+to forward the notification.
+Note: This is internal to Admin and doesn't need to be exposed to other components.
+
+
+
+.. csv-table:: EmailMessage type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "recipients_email", ":ref:`ref_string`", "repeated", "The list of email addresses to receive an email with the content populated in the other fields. Currently, each email recipient will receive its own email. This populates the TO field."
+ "sender_email", ":ref:`ref_string`", "", "The email of the sender. This populates the FROM field."
+ "subject_line", ":ref:`ref_string`", "", "The content of the subject line. This populates the SUBJECT field."
+ "body", ":ref:`ref_string`", "", "The content of the email body. This populates the BODY field."
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+.. _ref_flyteidl/admin/project.proto:
+
+flyteidl/admin/project.proto
+==================================================================
+
+
+
+
+
+.. _ref_flyteidl.admin.Domain:
+
+Domain
+------------------------------------------------------------------
+
+Namespace within a project commonly used to differentiate between different service instances.
+e.g. "production", "development", etc.
+
+
+
+.. csv-table:: Domain type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "id", ":ref:`ref_string`", "", "Globally unique domain name."
+ "name", ":ref:`ref_string`", "", "Display name."
+
+
+
+
+
+
+
+.. _ref_flyteidl.admin.Project:
+
+Project
+------------------------------------------------------------------
+
+Top-level namespace used to classify different entities like workflows and executions.
+
+
+
+.. csv-table:: Project type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "id", ":ref:`ref_string`", "", "Globally unique project name."
+ "name", ":ref:`ref_string`", "", "Display name."
+ "domains", ":ref:`ref_flyteidl.admin.Domain`", "repeated", ""
+ "description", ":ref:`ref_string`", "", ""
+ "labels", ":ref:`ref_flyteidl.admin.Labels`", "", "Leverage Labels from flyteidel.admin.common.proto to tag projects with ownership information."
+ "state", ":ref:`ref_flyteidl.admin.Project.ProjectState`", "", ""
+
+
+
+
+
+
+
+.. _ref_flyteidl.admin.ProjectListRequest:
+
+ProjectListRequest
+------------------------------------------------------------------
+
+Request to retrieve a list of projects matching specified filters.
+See :ref:`ref_flyteidl.admin.Project` for more details
+
+
+
+.. csv-table:: ProjectListRequest type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "limit", ":ref:`ref_uint32`", "", "Indicates the number of projects to be returned. +required"
+ "token", ":ref:`ref_string`", "", "In the case of multiple pages of results, this server-provided token can be used to fetch the next page in a query. +optional"
+ "filters", ":ref:`ref_string`", "", "Indicates a list of filters passed as string. More info on constructing filters : +optional"
+ "sort_by", ":ref:`ref_flyteidl.admin.Sort`", "", "Sort ordering. +optional"
+
+
+
+
+
+
+
+.. _ref_flyteidl.admin.ProjectRegisterRequest:
+
+ProjectRegisterRequest
+------------------------------------------------------------------
+
+Adds a new user-project within the Flyte deployment.
+See :ref:`ref_flyteidl.admin.Project` for more details
+
+
+
+.. csv-table:: ProjectRegisterRequest type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "project", ":ref:`ref_flyteidl.admin.Project`", "", "+required"
+
+
+
+
+
+
+
+.. _ref_flyteidl.admin.ProjectRegisterResponse:
+
+ProjectRegisterResponse
+------------------------------------------------------------------
+
+Purposefully empty, may be updated in the future.
+
+
+
+
+
+
+
+
+.. _ref_flyteidl.admin.ProjectUpdateResponse:
+
+ProjectUpdateResponse
+------------------------------------------------------------------
+
+Purposefully empty, may be updated in the future.
+
+
+
+
+
+
+
+
+.. _ref_flyteidl.admin.Projects:
+
+Projects
+------------------------------------------------------------------
+
+Represents a list of projects.
+See :ref:`ref_flyteidl.admin.Project` for more details
+
+
+
+.. csv-table:: Projects type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "projects", ":ref:`ref_flyteidl.admin.Project`", "repeated", ""
+ "token", ":ref:`ref_string`", "", "In the case of multiple pages of results, the server-provided token can be used to fetch the next page in a query. If there are no more results, this value will be empty."
+
+
+
+
+
+
+
+
+
+.. _ref_flyteidl.admin.Project.ProjectState:
+
+Project.ProjectState
+------------------------------------------------------------------
+
+The state of the project is used to control its visibility in the UI and validity.
+
+.. csv-table:: Enum Project.ProjectState values
+ :header: "Name", "Number", "Description"
+ :widths: auto
+
+ "ACTIVE", "0", "By default, all projects are considered active."
+ "ARCHIVED", "1", "Archived projects are no longer visible in the UI and no longer valid."
+ "SYSTEM_GENERATED", "2", "System generated projects that aren't explicitly created or managed by a user."
+
+
+
+
+
+
+
+
+
+
+.. _ref_flyteidl/admin/project_domain_attributes.proto:
+
+flyteidl/admin/project_domain_attributes.proto
+==================================================================
+
+
+
+
+
+.. _ref_flyteidl.admin.ProjectDomainAttributes:
+
+ProjectDomainAttributes
+------------------------------------------------------------------
+
+Defines a set of custom matching attributes which defines resource defaults for a project and domain.
+For more info on matchable attributes, see :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration`
+
+
+
+.. csv-table:: ProjectDomainAttributes type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "project", ":ref:`ref_string`", "", "Unique project id for which this set of attributes will be applied."
+ "domain", ":ref:`ref_string`", "", "Unique domain id for which this set of attributes will be applied."
+ "matching_attributes", ":ref:`ref_flyteidl.admin.MatchingAttributes`", "", ""
+
+
+
+
+
+
+
+.. _ref_flyteidl.admin.ProjectDomainAttributesDeleteRequest:
+
+ProjectDomainAttributesDeleteRequest
+------------------------------------------------------------------
+
+Request to delete a set matchable project domain attribute override.
+For more info on matchable attributes, see :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration`
+
+
+
+.. csv-table:: ProjectDomainAttributesDeleteRequest type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "project", ":ref:`ref_string`", "", "Unique project id which this set of attributes references. +required"
+ "domain", ":ref:`ref_string`", "", "Unique domain id which this set of attributes references. +required"
+ "resource_type", ":ref:`ref_flyteidl.admin.MatchableResource`", "", "Which type of matchable attributes to delete. +required"
+
+
+
+
+
+
+
+.. _ref_flyteidl.admin.ProjectDomainAttributesDeleteResponse:
+
+ProjectDomainAttributesDeleteResponse
+------------------------------------------------------------------
+
+Purposefully empty, may be populated in the future.
+
+
+
+
+
+
+
+
+.. _ref_flyteidl.admin.ProjectDomainAttributesGetRequest:
+
+ProjectDomainAttributesGetRequest
+------------------------------------------------------------------
+
+Request to get an individual project domain attribute override.
+For more info on matchable attributes, see :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration`
+
+
+
+.. csv-table:: ProjectDomainAttributesGetRequest type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "project", ":ref:`ref_string`", "", "Unique project id which this set of attributes references. +required"
+ "domain", ":ref:`ref_string`", "", "Unique domain id which this set of attributes references. +required"
+ "resource_type", ":ref:`ref_flyteidl.admin.MatchableResource`", "", "Which type of matchable attributes to return. +required"
+
+
+
+
+
+
+
+.. _ref_flyteidl.admin.ProjectDomainAttributesGetResponse:
+
+ProjectDomainAttributesGetResponse
+------------------------------------------------------------------
+
+Response to get an individual project domain attribute override.
+For more info on matchable attributes, see :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration`
+
+
+
+.. csv-table:: ProjectDomainAttributesGetResponse type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "attributes", ":ref:`ref_flyteidl.admin.ProjectDomainAttributes`", "", ""
+
+
+
+
+
+
+
+.. _ref_flyteidl.admin.ProjectDomainAttributesUpdateRequest:
+
+ProjectDomainAttributesUpdateRequest
+------------------------------------------------------------------
+
+Sets custom attributes for a project-domain combination.
+For more info on matchable attributes, see :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration`
+
+
+
+.. csv-table:: ProjectDomainAttributesUpdateRequest type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "attributes", ":ref:`ref_flyteidl.admin.ProjectDomainAttributes`", "", "+required"
+
+
+
+
+
+
+
+.. _ref_flyteidl.admin.ProjectDomainAttributesUpdateResponse:
+
+ProjectDomainAttributesUpdateResponse
+------------------------------------------------------------------
+
+Purposefully empty, may be populated in the future.
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+.. _ref_flyteidl/admin/schedule.proto:
+
+flyteidl/admin/schedule.proto
+==================================================================
+
+
+
+
+
+.. _ref_flyteidl.admin.CronSchedule:
+
+CronSchedule
+------------------------------------------------------------------
+
+Options for schedules to run according to a cron expression.
+
+
+
+.. csv-table:: CronSchedule type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "schedule", ":ref:`ref_string`", "", "Standard/default cron implementation as described by https://en.wikipedia.org/wiki/Cron#CRON_expression; Also supports nonstandard predefined scheduling definitions as described by https://docs.aws.amazon.com/AmazonCloudWatch/latest/events/ScheduledEvents.html#CronExpressions except @reboot"
+ "offset", ":ref:`ref_string`", "", "ISO 8601 duration as described by https://en.wikipedia.org/wiki/ISO_8601#Durations"
+
+
+
+
+
+
+
+.. _ref_flyteidl.admin.FixedRate:
+
+FixedRate
+------------------------------------------------------------------
+
+Option for schedules run at a certain frequency e.g. every 2 minutes.
+
+
+
+.. csv-table:: FixedRate type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "value", ":ref:`ref_uint32`", "", ""
+ "unit", ":ref:`ref_flyteidl.admin.FixedRateUnit`", "", ""
+
+
+
+
+
+
+
+.. _ref_flyteidl.admin.Schedule:
+
+Schedule
+------------------------------------------------------------------
+
+Defines complete set of information required to trigger an execution on a schedule.
+
+
+
+.. csv-table:: Schedule type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "cron_expression", ":ref:`ref_string`", "", "**Deprecated.** Uses AWS syntax: Minutes Hours Day-of-month Month Day-of-week Year e.g. for a schedule that runs every 15 minutes: 0/15 * * * ? *"
+ "rate", ":ref:`ref_flyteidl.admin.FixedRate`", "", ""
+ "cron_schedule", ":ref:`ref_flyteidl.admin.CronSchedule`", "", ""
+ "kickoff_time_input_arg", ":ref:`ref_string`", "", "Name of the input variable that the kickoff time will be supplied to when the workflow is kicked off."
+
+
+
+
+
+
+
+
+
+.. _ref_flyteidl.admin.FixedRateUnit:
+
+FixedRateUnit
+------------------------------------------------------------------
+
+Represents a frequency at which to run a schedule.
+
+.. csv-table:: Enum FixedRateUnit values
+ :header: "Name", "Number", "Description"
+ :widths: auto
+
+ "MINUTE", "0", ""
+ "HOUR", "1", ""
+ "DAY", "2", ""
+
+
+
+
+
+
+
+
+
+
+.. _ref_flyteidl/admin/task.proto:
+
+flyteidl/admin/task.proto
+==================================================================
+
+
+
+
+
+.. _ref_flyteidl.admin.Task:
+
+Task
+------------------------------------------------------------------
+
+Flyte workflows are composed of many ordered tasks. That is small, reusable, self-contained logical blocks
+arranged to process workflow inputs and produce a deterministic set of outputs.
+Tasks can come in many varieties tuned for specialized behavior.
+
+
+
+.. csv-table:: Task type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "id", ":ref:`ref_flyteidl.core.Identifier`", "", "id represents the unique identifier of the task."
+ "closure", ":ref:`ref_flyteidl.admin.TaskClosure`", "", "closure encapsulates all the fields that maps to a compiled version of the task."
+
+
+
+
+
+
+
+.. _ref_flyteidl.admin.TaskClosure:
+
+TaskClosure
+------------------------------------------------------------------
+
+Compute task attributes which include values derived from the TaskSpec, as well as plugin-specific data
+and task metadata.
+
+
+
+.. csv-table:: TaskClosure type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "compiled_task", ":ref:`ref_flyteidl.core.CompiledTask`", "", "Represents the compiled representation of the task from the specification provided."
+ "created_at", ":ref:`ref_google.protobuf.Timestamp`", "", "Time at which the task was created."
+
+
+
+
+
+
+
+.. _ref_flyteidl.admin.TaskCreateRequest:
+
+TaskCreateRequest
+------------------------------------------------------------------
+
+Represents a request structure to create a revision of a task.
+See :ref:`ref_flyteidl.admin.Task` for more details
+
+
+
+.. csv-table:: TaskCreateRequest type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "id", ":ref:`ref_flyteidl.core.Identifier`", "", "id represents the unique identifier of the task. +required"
+ "spec", ":ref:`ref_flyteidl.admin.TaskSpec`", "", "Represents the specification for task. +required"
+
+
+
+
+
+
+
+.. _ref_flyteidl.admin.TaskCreateResponse:
+
+TaskCreateResponse
+------------------------------------------------------------------
+
+Represents a response structure if task creation succeeds.
+
+Purposefully empty, may be populated in the future.
+
+
+
+
+
+
+
+
+.. _ref_flyteidl.admin.TaskList:
+
+TaskList
+------------------------------------------------------------------
+
+Represents a list of tasks returned from the admin.
+See :ref:`ref_flyteidl.admin.Task` for more details
+
+
+
+.. csv-table:: TaskList type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "tasks", ":ref:`ref_flyteidl.admin.Task`", "repeated", "A list of tasks returned based on the request."
+ "token", ":ref:`ref_string`", "", "In the case of multiple pages of results, the server-provided token can be used to fetch the next page in a query. If there are no more results, this value will be empty."
+
+
+
+
+
+
+
+.. _ref_flyteidl.admin.TaskSpec:
+
+TaskSpec
+------------------------------------------------------------------
+
+Represents a structure that encapsulates the user-configured specification of the task.
+
+
+
+.. csv-table:: TaskSpec type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "template", ":ref:`ref_flyteidl.core.TaskTemplate`", "", "Template of the task that encapsulates all the metadata of the task."
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+.. _ref_flyteidl/admin/task_execution.proto:
+
+flyteidl/admin/task_execution.proto
+==================================================================
+
+
+
+
+
+.. _ref_flyteidl.admin.TaskExecution:
+
+TaskExecution
+------------------------------------------------------------------
+
+Encapsulates all details for a single task execution entity.
+A task execution represents an instantiated task, including all inputs and additional
+metadata as well as computed results included state, outputs, and duration-based attributes.
+
+
+
+.. csv-table:: TaskExecution type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "id", ":ref:`ref_flyteidl.core.TaskExecutionIdentifier`", "", "Unique identifier for the task execution."
+ "input_uri", ":ref:`ref_string`", "", "Path to remote data store where input blob is stored."
+ "closure", ":ref:`ref_flyteidl.admin.TaskExecutionClosure`", "", "Task execution details and results."
+ "is_parent", ":ref:`ref_bool`", "", "Whether this task spawned nodes."
+
+
+
+
+
+
+
+.. _ref_flyteidl.admin.TaskExecutionClosure:
+
+TaskExecutionClosure
+------------------------------------------------------------------
+
+Container for task execution details and results.
+
+
+
+.. csv-table:: TaskExecutionClosure type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "output_uri", ":ref:`ref_string`", "", "**Deprecated.** Path to remote data store where output blob is stored if the execution succeeded (and produced outputs). DEPRECATED. Use GetTaskExecutionData to fetch output data instead."
+ "error", ":ref:`ref_flyteidl.core.ExecutionError`", "", "Error information for the task execution. Populated if the execution failed."
+ "output_data", ":ref:`ref_flyteidl.core.LiteralMap`", "", "**Deprecated.** Raw output data produced by this task execution. DEPRECATED. Use GetTaskExecutionData to fetch output data instead."
+ "phase", ":ref:`ref_flyteidl.core.TaskExecution.Phase`", "", "The last recorded phase for this task execution."
+ "logs", ":ref:`ref_flyteidl.core.TaskLog`", "repeated", "Detailed log information output by the task execution."
+ "started_at", ":ref:`ref_google.protobuf.Timestamp`", "", "Time at which the task execution began running."
+ "duration", ":ref:`ref_google.protobuf.Duration`", "", "The amount of time the task execution spent running."
+ "created_at", ":ref:`ref_google.protobuf.Timestamp`", "", "Time at which the task execution was created."
+ "updated_at", ":ref:`ref_google.protobuf.Timestamp`", "", "Time at which the task execution was last updated."
+ "custom_info", ":ref:`ref_google.protobuf.Struct`", "", "Custom data specific to the task plugin."
+ "reason", ":ref:`ref_string`", "", "If there is an explanation for the most recent phase transition, the reason will capture it."
+ "task_type", ":ref:`ref_string`", "", "A predefined yet extensible Task type identifier."
+ "metadata", ":ref:`ref_flyteidl.event.TaskExecutionMetadata`", "", "Metadata around how a task was executed."
+
+
+
+
+
+
+
+.. _ref_flyteidl.admin.TaskExecutionGetDataRequest:
+
+TaskExecutionGetDataRequest
+------------------------------------------------------------------
+
+Request structure to fetch inputs and output for a task execution.
+By default this data is not returned inline in :ref:`ref_flyteidl.admin.TaskExecutionGetRequest`
+
+
+
+.. csv-table:: TaskExecutionGetDataRequest type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "id", ":ref:`ref_flyteidl.core.TaskExecutionIdentifier`", "", "The identifier of the task execution for which to fetch inputs and outputs. +required"
+
+
+
+
+
+
+
+.. _ref_flyteidl.admin.TaskExecutionGetDataResponse:
+
+TaskExecutionGetDataResponse
+------------------------------------------------------------------
+
+Response structure for TaskExecutionGetDataRequest which contains inputs and outputs for a task execution.
+
+
+
+.. csv-table:: TaskExecutionGetDataResponse type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "inputs", ":ref:`ref_flyteidl.admin.UrlBlob`", "", "**Deprecated.** Signed url to fetch a core.LiteralMap of task execution inputs. Deprecated: Please use full_inputs instead."
+ "outputs", ":ref:`ref_flyteidl.admin.UrlBlob`", "", "**Deprecated.** Signed url to fetch a core.LiteralMap of task execution outputs. Deprecated: Please use full_outputs instead."
+ "full_inputs", ":ref:`ref_flyteidl.core.LiteralMap`", "", "Full_inputs will only be populated if they are under a configured size threshold."
+ "full_outputs", ":ref:`ref_flyteidl.core.LiteralMap`", "", "Full_outputs will only be populated if they are under a configured size threshold."
+
+
+
+
+
+
+
+.. _ref_flyteidl.admin.TaskExecutionGetRequest:
+
+TaskExecutionGetRequest
+------------------------------------------------------------------
+
+A message used to fetch a single task execution entity.
+See :ref:`ref_flyteidl.admin.TaskExecution` for more details
+
+
+
+.. csv-table:: TaskExecutionGetRequest type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "id", ":ref:`ref_flyteidl.core.TaskExecutionIdentifier`", "", "Unique identifier for the task execution. +required"
+
+
+
+
+
+
+
+.. _ref_flyteidl.admin.TaskExecutionList:
+
+TaskExecutionList
+------------------------------------------------------------------
+
+Response structure for a query to list of task execution entities.
+See :ref:`ref_flyteidl.admin.TaskExecution` for more details
+
+
+
+.. csv-table:: TaskExecutionList type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "task_executions", ":ref:`ref_flyteidl.admin.TaskExecution`", "repeated", ""
+ "token", ":ref:`ref_string`", "", "In the case of multiple pages of results, the server-provided token can be used to fetch the next page in a query. If there are no more results, this value will be empty."
+
+
+
+
+
+
+
+.. _ref_flyteidl.admin.TaskExecutionListRequest:
+
+TaskExecutionListRequest
+------------------------------------------------------------------
+
+Represents a request structure to retrieve a list of task execution entities yielded by a specific node execution.
+See :ref:`ref_flyteidl.admin.TaskExecution` for more details
+
+
+
+.. csv-table:: TaskExecutionListRequest type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "node_execution_id", ":ref:`ref_flyteidl.core.NodeExecutionIdentifier`", "", "Indicates the node execution to filter by. +required"
+ "limit", ":ref:`ref_uint32`", "", "Indicates the number of resources to be returned. +required"
+ "token", ":ref:`ref_string`", "", "In the case of multiple pages of results, the server-provided token can be used to fetch the next page in a query. +optional"
+ "filters", ":ref:`ref_string`", "", "Indicates a list of filters passed as string. More info on constructing filters : +optional"
+ "sort_by", ":ref:`ref_flyteidl.admin.Sort`", "", "Sort ordering for returned list. +optional"
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+.. _ref_flyteidl/admin/version.proto:
+
+flyteidl/admin/version.proto
+==================================================================
+
+
+
+
+
+.. _ref_flyteidl.admin.GetVersionRequest:
+
+GetVersionRequest
+------------------------------------------------------------------
+
+Empty request for GetVersion
+
+
+
+
+
+
+
+
+.. _ref_flyteidl.admin.GetVersionResponse:
+
+GetVersionResponse
+------------------------------------------------------------------
+
+Response for the GetVersion API
+
+
+
+.. csv-table:: GetVersionResponse type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "control_plane_version", ":ref:`ref_flyteidl.admin.Version`", "", "The control plane version information. FlyteAdmin and related components form the control plane of Flyte"
+
+
+
+
+
+
+
+.. _ref_flyteidl.admin.Version:
+
+Version
+------------------------------------------------------------------
+
+Provides Version information for a component
+
+
+
+.. csv-table:: Version type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "Build", ":ref:`ref_string`", "", "Specifies the GIT sha of the build"
+ "Version", ":ref:`ref_string`", "", "Version for the build, should follow a semver"
+ "BuildTime", ":ref:`ref_string`", "", "Build timestamp"
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+.. _ref_flyteidl/admin/workflow.proto:
+
+flyteidl/admin/workflow.proto
+==================================================================
+
+
+
+
+
+.. _ref_flyteidl.admin.Workflow:
+
+Workflow
+------------------------------------------------------------------
+
+Represents the workflow structure stored in the Admin
+A workflow is created by ordering tasks and associating outputs to inputs
+in order to produce a directed-acyclic execution graph.
+
+
+
+.. csv-table:: Workflow type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "id", ":ref:`ref_flyteidl.core.Identifier`", "", "id represents the unique identifier of the workflow."
+ "closure", ":ref:`ref_flyteidl.admin.WorkflowClosure`", "", "closure encapsulates all the fields that maps to a compiled version of the workflow."
+
+
+
+
+
+
+
+.. _ref_flyteidl.admin.WorkflowClosure:
+
+WorkflowClosure
+------------------------------------------------------------------
+
+A container holding the compiled workflow produced from the WorkflowSpec and additional metadata.
+
+
+
+.. csv-table:: WorkflowClosure type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "compiled_workflow", ":ref:`ref_flyteidl.core.CompiledWorkflowClosure`", "", "Represents the compiled representation of the workflow from the specification provided."
+ "created_at", ":ref:`ref_google.protobuf.Timestamp`", "", "Time at which the workflow was created."
+
+
+
+
+
+
+
+.. _ref_flyteidl.admin.WorkflowCreateRequest:
+
+WorkflowCreateRequest
+------------------------------------------------------------------
+
+Represents a request structure to create a revision of a workflow.
+See :ref:`ref_flyteidl.admin.Workflow` for more details
+
+
+
+.. csv-table:: WorkflowCreateRequest type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "id", ":ref:`ref_flyteidl.core.Identifier`", "", "id represents the unique identifier of the workflow. +required"
+ "spec", ":ref:`ref_flyteidl.admin.WorkflowSpec`", "", "Represents the specification for workflow. +required"
+
+
+
+
+
+
+
+.. _ref_flyteidl.admin.WorkflowCreateResponse:
+
+WorkflowCreateResponse
+------------------------------------------------------------------
+
+Purposefully empty, may be populated in the future.
+
+
+
+
+
+
+
+
+.. _ref_flyteidl.admin.WorkflowList:
+
+WorkflowList
+------------------------------------------------------------------
+
+Represents a list of workflows returned from the admin.
+See :ref:`ref_flyteidl.admin.Workflow` for more details
+
+
+
+.. csv-table:: WorkflowList type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "workflows", ":ref:`ref_flyteidl.admin.Workflow`", "repeated", "A list of workflows returned based on the request."
+ "token", ":ref:`ref_string`", "", "In the case of multiple pages of results, the server-provided token can be used to fetch the next page in a query. If there are no more results, this value will be empty."
+
+
+
+
+
+
+
+.. _ref_flyteidl.admin.WorkflowSpec:
+
+WorkflowSpec
+------------------------------------------------------------------
+
+Represents a structure that encapsulates the specification of the workflow.
+
+
+
+.. csv-table:: WorkflowSpec type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "template", ":ref:`ref_flyteidl.core.WorkflowTemplate`", "", "Template of the task that encapsulates all the metadata of the workflow."
+ "sub_workflows", ":ref:`ref_flyteidl.core.WorkflowTemplate`", "repeated", "Workflows that are embedded into other workflows need to be passed alongside the parent workflow to the propeller compiler (since the compiler doesn't have any knowledge of other workflows - ie, it doesn't reach out to Admin to see other registered workflows). In fact, subworkflows do not even need to be registered."
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+.. _ref_flyteidl/admin/workflow_attributes.proto:
+
+flyteidl/admin/workflow_attributes.proto
+==================================================================
+
+
+
+
+
+.. _ref_flyteidl.admin.WorkflowAttributes:
+
+WorkflowAttributes
+------------------------------------------------------------------
+
+Defines a set of custom matching attributes which defines resource defaults for a project, domain and workflow.
+For more info on matchable attributes, see :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration`
+
+
+
+.. csv-table:: WorkflowAttributes type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "project", ":ref:`ref_string`", "", "Unique project id for which this set of attributes will be applied."
+ "domain", ":ref:`ref_string`", "", "Unique domain id for which this set of attributes will be applied."
+ "workflow", ":ref:`ref_string`", "", "Workflow name for which this set of attributes will be applied."
+ "matching_attributes", ":ref:`ref_flyteidl.admin.MatchingAttributes`", "", ""
+
+
+
+
+
+
+
+.. _ref_flyteidl.admin.WorkflowAttributesDeleteRequest:
+
+WorkflowAttributesDeleteRequest
+------------------------------------------------------------------
+
+Request to delete a set matchable workflow attribute override.
+For more info on matchable attributes, see :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration`
+
+
+
+.. csv-table:: WorkflowAttributesDeleteRequest type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "project", ":ref:`ref_string`", "", "Unique project id which this set of attributes references. +required"
+ "domain", ":ref:`ref_string`", "", "Unique domain id which this set of attributes references. +required"
+ "workflow", ":ref:`ref_string`", "", "Workflow name which this set of attributes references. +required"
+ "resource_type", ":ref:`ref_flyteidl.admin.MatchableResource`", "", "Which type of matchable attributes to delete. +required"
+
+
+
+
+
+
+
+.. _ref_flyteidl.admin.WorkflowAttributesDeleteResponse:
+
+WorkflowAttributesDeleteResponse
+------------------------------------------------------------------
+
+Purposefully empty, may be populated in the future.
+
+
+
+
+
+
+
+
+.. _ref_flyteidl.admin.WorkflowAttributesGetRequest:
+
+WorkflowAttributesGetRequest
+------------------------------------------------------------------
+
+Request to get an individual workflow attribute override.
+For more info on matchable attributes, see :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration`
+
+
+
+.. csv-table:: WorkflowAttributesGetRequest type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "project", ":ref:`ref_string`", "", "Unique project id which this set of attributes references. +required"
+ "domain", ":ref:`ref_string`", "", "Unique domain id which this set of attributes references. +required"
+ "workflow", ":ref:`ref_string`", "", "Workflow name which this set of attributes references. +required"
+ "resource_type", ":ref:`ref_flyteidl.admin.MatchableResource`", "", "Which type of matchable attributes to return. +required"
+
+
+
+
+
+
+
+.. _ref_flyteidl.admin.WorkflowAttributesGetResponse:
+
+WorkflowAttributesGetResponse
+------------------------------------------------------------------
+
+Response to get an individual workflow attribute override.
+
+
+
+.. csv-table:: WorkflowAttributesGetResponse type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "attributes", ":ref:`ref_flyteidl.admin.WorkflowAttributes`", "", ""
+
+
+
+
+
+
+
+.. _ref_flyteidl.admin.WorkflowAttributesUpdateRequest:
+
+WorkflowAttributesUpdateRequest
+------------------------------------------------------------------
+
+Sets custom attributes for a project, domain and workflow combination.
+For more info on matchable attributes, see :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration`
+
+
+
+.. csv-table:: WorkflowAttributesUpdateRequest type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "attributes", ":ref:`ref_flyteidl.admin.WorkflowAttributes`", "", ""
+
+
+
+
+
+
+
+.. _ref_flyteidl.admin.WorkflowAttributesUpdateResponse:
+
+WorkflowAttributesUpdateResponse
+------------------------------------------------------------------
+
+Purposefully empty, may be populated in the future.
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+.. _ref_google/protobuf/duration.proto:
+
+google/protobuf/duration.proto
+==================================================================
+
+
+
+
+
+.. _ref_google.protobuf.Duration:
+
+Duration
+------------------------------------------------------------------
+
+A Duration represents a signed, fixed-length span of time represented
+as a count of seconds and fractions of seconds at nanosecond
+resolution. It is independent of any calendar and concepts like "day"
+or "month". It is related to Timestamp in that the difference between
+two Timestamp values is a Duration and it can be added or subtracted
+from a Timestamp. Range is approximately +-10,000 years.
+
+# Examples
+
+Example 1: Compute Duration from two Timestamps in pseudo code.
+
+ Timestamp start = ...;
+ Timestamp end = ...;
+ Duration duration = ...;
+
+ duration.seconds = end.seconds - start.seconds;
+ duration.nanos = end.nanos - start.nanos;
+
+ if (duration.seconds < 0 && duration.nanos > 0) {
+ duration.seconds += 1;
+ duration.nanos -= 1000000000;
+ } else if (duration.seconds > 0 && duration.nanos < 0) {
+ duration.seconds -= 1;
+ duration.nanos += 1000000000;
+ }
+
+Example 2: Compute Timestamp from Timestamp + Duration in pseudo code.
+
+ Timestamp start = ...;
+ Duration duration = ...;
+ Timestamp end = ...;
+
+ end.seconds = start.seconds + duration.seconds;
+ end.nanos = start.nanos + duration.nanos;
+
+ if (end.nanos < 0) {
+ end.seconds -= 1;
+ end.nanos += 1000000000;
+ } else if (end.nanos >= 1000000000) {
+ end.seconds += 1;
+ end.nanos -= 1000000000;
+ }
+
+Example 3: Compute Duration from datetime.timedelta in Python.
+
+ td = datetime.timedelta(days=3, minutes=10)
+ duration = Duration()
+ duration.FromTimedelta(td)
+
+# JSON Mapping
+
+In JSON format, the Duration type is encoded as a string rather than an
+object, where the string ends in the suffix "s" (indicating seconds) and
+is preceded by the number of seconds, with nanoseconds expressed as
+fractional seconds. For example, 3 seconds with 0 nanoseconds should be
+encoded in JSON format as "3s", while 3 seconds and 1 nanosecond should
+be expressed in JSON format as "3.000000001s", and 3 seconds and 1
+microsecond should be expressed in JSON format as "3.000001s".
+
+
+
+.. csv-table:: Duration type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "seconds", ":ref:`ref_int64`", "", "Signed seconds of the span of time. Must be from -315,576,000,000 to +315,576,000,000 inclusive. Note: these bounds are computed from: 60 sec/min * 60 min/hr * 24 hr/day * 365.25 days/year * 10000 years"
+ "nanos", ":ref:`ref_int32`", "", "Signed fractions of a second at nanosecond resolution of the span of time. Durations less than one second are represented with a 0 `seconds` field and a positive or negative `nanos` field. For durations of one second or more, a non-zero value for the `nanos` field must be of the same sign as the `seconds` field. Must be from -999,999,999 to +999,999,999 inclusive."
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/flyteidl/protos/docs/core/core.rst b/flyteidl/protos/docs/core/core.rst
new file mode 100644
index 00000000000..31e8d3f3cc3
--- /dev/null
+++ b/flyteidl/protos/docs/core/core.rst
@@ -0,0 +1,3656 @@
+######################
+Protocol Documentation
+######################
+
+
+
+
+.. _ref_flyteidl/core/catalog.proto:
+
+flyteidl/core/catalog.proto
+==================================================================
+
+
+
+
+
+.. _ref_flyteidl.core.CatalogArtifactTag:
+
+CatalogArtifactTag
+------------------------------------------------------------------
+
+
+
+
+
+.. csv-table:: CatalogArtifactTag type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "artifact_id", ":ref:`ref_string`", "", "Artifact ID is generated name"
+ "name", ":ref:`ref_string`", "", "Flyte computes the tag automatically, as the hash of the values"
+
+
+
+
+
+
+
+.. _ref_flyteidl.core.CatalogMetadata:
+
+CatalogMetadata
+------------------------------------------------------------------
+
+Catalog artifact information with specific metadata
+
+
+
+.. csv-table:: CatalogMetadata type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "dataset_id", ":ref:`ref_flyteidl.core.Identifier`", "", "Dataset ID in the catalog"
+ "artifact_tag", ":ref:`ref_flyteidl.core.CatalogArtifactTag`", "", "Artifact tag in the catalog"
+ "source_task_execution", ":ref:`ref_flyteidl.core.TaskExecutionIdentifier`", "", "Today we only support TaskExecutionIdentifier as a source, as catalog caching only works for task executions"
+
+
+
+
+
+
+
+.. _ref_flyteidl.core.CatalogReservation:
+
+CatalogReservation
+------------------------------------------------------------------
+
+
+
+
+
+
+
+
+
+
+
+
+.. _ref_flyteidl.core.CatalogCacheStatus:
+
+CatalogCacheStatus
+------------------------------------------------------------------
+
+Indicates the status of CatalogCaching. The reason why this is not embedded in TaskNodeMetadata is, that we may use for other types of nodes as well in the future
+
+.. csv-table:: Enum CatalogCacheStatus values
+ :header: "Name", "Number", "Description"
+ :widths: auto
+
+ "CACHE_DISABLED", "0", "Used to indicate that caching was disabled"
+ "CACHE_MISS", "1", "Used to indicate that the cache lookup resulted in no matches"
+ "CACHE_HIT", "2", "used to indicate that the associated artifact was a result of a previous execution"
+ "CACHE_POPULATED", "3", "used to indicate that the resultant artifact was added to the cache"
+ "CACHE_LOOKUP_FAILURE", "4", "Used to indicate that cache lookup failed because of an error"
+ "CACHE_PUT_FAILURE", "5", "Used to indicate that cache lookup failed because of an error"
+
+
+
+.. _ref_flyteidl.core.CatalogReservation.Status:
+
+CatalogReservation.Status
+------------------------------------------------------------------
+
+Indicates the status of a catalog reservation operation.
+
+.. csv-table:: Enum CatalogReservation.Status values
+ :header: "Name", "Number", "Description"
+ :widths: auto
+
+ "RESERVATION_DISABLED", "0", "Used to indicate that reservations are disabled"
+ "RESERVATION_ACQUIRED", "1", "Used to indicate that a reservation was successfully acquired or extended"
+ "RESERVATION_EXISTS", "2", "Used to indicate that an active reservation currently exists"
+ "RESERVATION_RELEASED", "3", "Used to indicate that the reservation has been successfully released"
+ "RESERVATION_FAILURE", "4", "Used to indicate that a reservation operation resulted in failure"
+
+
+
+
+
+
+
+
+
+
+.. _ref_flyteidl/core/compiler.proto:
+
+flyteidl/core/compiler.proto
+==================================================================
+
+
+
+
+
+.. _ref_flyteidl.core.CompiledTask:
+
+CompiledTask
+------------------------------------------------------------------
+
+Output of the Compilation step. This object represent one Task. We store more metadata at this layer
+
+
+
+.. csv-table:: CompiledTask type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "template", ":ref:`ref_flyteidl.core.TaskTemplate`", "", "Completely contained TaskTemplate"
+
+
+
+
+
+
+
+.. _ref_flyteidl.core.CompiledWorkflow:
+
+CompiledWorkflow
+------------------------------------------------------------------
+
+Output of the compilation Step. This object represents one workflow. We store more metadata at this layer
+
+
+
+.. csv-table:: CompiledWorkflow type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "template", ":ref:`ref_flyteidl.core.WorkflowTemplate`", "", "Completely contained Workflow Template"
+ "connections", ":ref:`ref_flyteidl.core.ConnectionSet`", "", "For internal use only! This field is used by the system and must not be filled in. Any values set will be ignored."
+
+
+
+
+
+
+
+.. _ref_flyteidl.core.CompiledWorkflowClosure:
+
+CompiledWorkflowClosure
+------------------------------------------------------------------
+
+A Compiled Workflow Closure contains all the information required to start a new execution, or to visualize a workflow
+and its details. The CompiledWorkflowClosure should always contain a primary workflow, that is the main workflow that
+will being the execution. All subworkflows are denormalized. WorkflowNodes refer to the workflow identifiers of
+compiled subworkflows.
+
+
+
+.. csv-table:: CompiledWorkflowClosure type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "primary", ":ref:`ref_flyteidl.core.CompiledWorkflow`", "", "+required"
+ "sub_workflows", ":ref:`ref_flyteidl.core.CompiledWorkflow`", "repeated", "Guaranteed that there will only exist one and only one workflow with a given id, i.e., every sub workflow has a unique identifier. Also every enclosed subworkflow is used either by a primary workflow or by a subworkflow as an inlined workflow +optional"
+ "tasks", ":ref:`ref_flyteidl.core.CompiledTask`", "repeated", "Guaranteed that there will only exist one and only one task with a given id, i.e., every task has a unique id +required (at least 1)"
+
+
+
+
+
+
+
+.. _ref_flyteidl.core.ConnectionSet:
+
+ConnectionSet
+------------------------------------------------------------------
+
+Adjacency list for the workflow. This is created as part of the compilation process. Every process after the compilation
+step uses this created ConnectionSet
+
+
+
+.. csv-table:: ConnectionSet type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "downstream", ":ref:`ref_flyteidl.core.ConnectionSet.DownstreamEntry`", "repeated", "A list of all the node ids that are downstream from a given node id"
+ "upstream", ":ref:`ref_flyteidl.core.ConnectionSet.UpstreamEntry`", "repeated", "A list of all the node ids, that are upstream of this node id"
+
+
+
+
+
+
+
+.. _ref_flyteidl.core.ConnectionSet.DownstreamEntry:
+
+ConnectionSet.DownstreamEntry
+------------------------------------------------------------------
+
+
+
+
+
+.. csv-table:: ConnectionSet.DownstreamEntry type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "key", ":ref:`ref_string`", "", ""
+ "value", ":ref:`ref_flyteidl.core.ConnectionSet.IdList`", "", ""
+
+
+
+
+
+
+
+.. _ref_flyteidl.core.ConnectionSet.IdList:
+
+ConnectionSet.IdList
+------------------------------------------------------------------
+
+
+
+
+
+.. csv-table:: ConnectionSet.IdList type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "ids", ":ref:`ref_string`", "repeated", ""
+
+
+
+
+
+
+
+.. _ref_flyteidl.core.ConnectionSet.UpstreamEntry:
+
+ConnectionSet.UpstreamEntry
+------------------------------------------------------------------
+
+
+
+
+
+.. csv-table:: ConnectionSet.UpstreamEntry type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "key", ":ref:`ref_string`", "", ""
+ "value", ":ref:`ref_flyteidl.core.ConnectionSet.IdList`", "", ""
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+.. _ref_flyteidl/core/condition.proto:
+
+flyteidl/core/condition.proto
+==================================================================
+
+
+
+
+
+.. _ref_flyteidl.core.BooleanExpression:
+
+BooleanExpression
+------------------------------------------------------------------
+
+Defines a boolean expression tree. It can be a simple or a conjunction expression.
+Multiple expressions can be combined using a conjunction or a disjunction to result in a final boolean result.
+
+
+
+.. csv-table:: BooleanExpression type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "conjunction", ":ref:`ref_flyteidl.core.ConjunctionExpression`", "", ""
+ "comparison", ":ref:`ref_flyteidl.core.ComparisonExpression`", "", ""
+
+
+
+
+
+
+
+.. _ref_flyteidl.core.ComparisonExpression:
+
+ComparisonExpression
+------------------------------------------------------------------
+
+Defines a 2-level tree where the root is a comparison operator and Operands are primitives or known variables.
+Each expression results in a boolean result.
+
+
+
+.. csv-table:: ComparisonExpression type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "operator", ":ref:`ref_flyteidl.core.ComparisonExpression.Operator`", "", ""
+ "left_value", ":ref:`ref_flyteidl.core.Operand`", "", ""
+ "right_value", ":ref:`ref_flyteidl.core.Operand`", "", ""
+
+
+
+
+
+
+
+.. _ref_flyteidl.core.ConjunctionExpression:
+
+ConjunctionExpression
+------------------------------------------------------------------
+
+Defines a conjunction expression of two boolean expressions.
+
+
+
+.. csv-table:: ConjunctionExpression type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "operator", ":ref:`ref_flyteidl.core.ConjunctionExpression.LogicalOperator`", "", ""
+ "left_expression", ":ref:`ref_flyteidl.core.BooleanExpression`", "", ""
+ "right_expression", ":ref:`ref_flyteidl.core.BooleanExpression`", "", ""
+
+
+
+
+
+
+
+.. _ref_flyteidl.core.Operand:
+
+Operand
+------------------------------------------------------------------
+
+Defines an operand to a comparison expression.
+
+
+
+.. csv-table:: Operand type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "primitive", ":ref:`ref_flyteidl.core.Primitive`", "", "Can be a constant"
+ "var", ":ref:`ref_string`", "", "Or one of this node's input variables"
+
+
+
+
+
+
+
+
+
+.. _ref_flyteidl.core.ComparisonExpression.Operator:
+
+ComparisonExpression.Operator
+------------------------------------------------------------------
+
+Binary Operator for each expression
+
+.. csv-table:: Enum ComparisonExpression.Operator values
+ :header: "Name", "Number", "Description"
+ :widths: auto
+
+ "EQ", "0", ""
+ "NEQ", "1", ""
+ "GT", "2", "Greater Than"
+ "GTE", "3", ""
+ "LT", "4", "Less Than"
+ "LTE", "5", ""
+
+
+
+.. _ref_flyteidl.core.ConjunctionExpression.LogicalOperator:
+
+ConjunctionExpression.LogicalOperator
+------------------------------------------------------------------
+
+Nested conditions. They can be conjoined using AND / OR
+Order of evaluation is not important as the operators are Commutative
+
+.. csv-table:: Enum ConjunctionExpression.LogicalOperator values
+ :header: "Name", "Number", "Description"
+ :widths: auto
+
+ "AND", "0", "Conjunction"
+ "OR", "1", ""
+
+
+
+
+
+
+
+
+
+
+.. _ref_flyteidl/core/dynamic_job.proto:
+
+flyteidl/core/dynamic_job.proto
+==================================================================
+
+
+
+
+
+.. _ref_flyteidl.core.DynamicJobSpec:
+
+DynamicJobSpec
+------------------------------------------------------------------
+
+Describes a set of tasks to execute and how the final outputs are produced.
+
+
+
+.. csv-table:: DynamicJobSpec type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "nodes", ":ref:`ref_flyteidl.core.Node`", "repeated", "A collection of nodes to execute."
+ "min_successes", ":ref:`ref_int64`", "", "An absolute number of successful completions of nodes required to mark this job as succeeded. As soon as this criteria is met, the dynamic job will be marked as successful and outputs will be computed. If this number becomes impossible to reach (e.g. number of currently running tasks + number of already succeeded tasks < min_successes) the task will be aborted immediately and marked as failed. The default value of this field, if not specified, is the count of nodes repeated field."
+ "outputs", ":ref:`ref_flyteidl.core.Binding`", "repeated", "Describes how to bind the final output of the dynamic job from the outputs of executed nodes. The referenced ids in bindings should have the generated id for the subtask."
+ "tasks", ":ref:`ref_flyteidl.core.TaskTemplate`", "repeated", "[Optional] A complete list of task specs referenced in nodes."
+ "subworkflows", ":ref:`ref_flyteidl.core.WorkflowTemplate`", "repeated", "[Optional] A complete list of task specs referenced in nodes."
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+.. _ref_flyteidl/core/errors.proto:
+
+flyteidl/core/errors.proto
+==================================================================
+
+
+
+
+
+.. _ref_flyteidl.core.ContainerError:
+
+ContainerError
+------------------------------------------------------------------
+
+Error message to propagate detailed errors from container executions to the execution
+engine.
+
+
+
+.. csv-table:: ContainerError type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "code", ":ref:`ref_string`", "", "A simplified code for errors, so that we can provide a glossary of all possible errors."
+ "message", ":ref:`ref_string`", "", "A detailed error message."
+ "kind", ":ref:`ref_flyteidl.core.ContainerError.Kind`", "", "An abstract error kind for this error. Defaults to Non_Recoverable if not specified."
+ "origin", ":ref:`ref_flyteidl.core.ExecutionError.ErrorKind`", "", "Defines the origin of the error (system, user, unknown)."
+
+
+
+
+
+
+
+.. _ref_flyteidl.core.ErrorDocument:
+
+ErrorDocument
+------------------------------------------------------------------
+
+Defines the errors.pb file format the container can produce to communicate
+failure reasons to the execution engine.
+
+
+
+.. csv-table:: ErrorDocument type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "error", ":ref:`ref_flyteidl.core.ContainerError`", "", "The error raised during execution."
+
+
+
+
+
+
+
+
+
+.. _ref_flyteidl.core.ContainerError.Kind:
+
+ContainerError.Kind
+------------------------------------------------------------------
+
+Defines a generic error type that dictates the behavior of the retry strategy.
+
+.. csv-table:: Enum ContainerError.Kind values
+ :header: "Name", "Number", "Description"
+ :widths: auto
+
+ "NON_RECOVERABLE", "0", ""
+ "RECOVERABLE", "1", ""
+
+
+
+
+
+
+
+
+
+
+.. _ref_flyteidl/core/execution.proto:
+
+flyteidl/core/execution.proto
+==================================================================
+
+
+
+
+
+.. _ref_flyteidl.core.ExecutionError:
+
+ExecutionError
+------------------------------------------------------------------
+
+Represents the error message from the execution.
+
+
+
+.. csv-table:: ExecutionError type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "code", ":ref:`ref_string`", "", "Error code indicates a grouping of a type of error. More Info: <Link>"
+ "message", ":ref:`ref_string`", "", "Detailed description of the error - including stack trace."
+ "error_uri", ":ref:`ref_string`", "", "Full error contents accessible via a URI"
+ "kind", ":ref:`ref_flyteidl.core.ExecutionError.ErrorKind`", "", ""
+
+
+
+
+
+
+
+.. _ref_flyteidl.core.NodeExecution:
+
+NodeExecution
+------------------------------------------------------------------
+
+Indicates various phases of Node Execution
+
+
+
+
+
+
+
+
+.. _ref_flyteidl.core.QualityOfService:
+
+QualityOfService
+------------------------------------------------------------------
+
+Indicates the priority of an execution.
+
+
+
+.. csv-table:: QualityOfService type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "tier", ":ref:`ref_flyteidl.core.QualityOfService.Tier`", "", ""
+ "spec", ":ref:`ref_flyteidl.core.QualityOfServiceSpec`", "", ""
+
+
+
+
+
+
+
+.. _ref_flyteidl.core.QualityOfServiceSpec:
+
+QualityOfServiceSpec
+------------------------------------------------------------------
+
+Represents customized execution run-time attributes.
+
+
+
+.. csv-table:: QualityOfServiceSpec type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "queueing_budget", ":ref:`ref_google.protobuf.Duration`", "", "Indicates how much queueing delay an execution can tolerate."
+
+
+
+
+
+
+
+.. _ref_flyteidl.core.TaskExecution:
+
+TaskExecution
+------------------------------------------------------------------
+
+Phases that task plugins can go through. Not all phases may be applicable to a specific plugin task,
+but this is the cumulative list that customers may want to know about for their task.
+
+
+
+
+
+
+
+
+.. _ref_flyteidl.core.TaskLog:
+
+TaskLog
+------------------------------------------------------------------
+
+Log information for the task that is specific to a log sink
+When our log story is flushed out, we may have more metadata here like log link expiry
+
+
+
+.. csv-table:: TaskLog type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "uri", ":ref:`ref_string`", "", ""
+ "name", ":ref:`ref_string`", "", ""
+ "message_format", ":ref:`ref_flyteidl.core.TaskLog.MessageFormat`", "", ""
+ "ttl", ":ref:`ref_google.protobuf.Duration`", "", ""
+
+
+
+
+
+
+
+.. _ref_flyteidl.core.WorkflowExecution:
+
+WorkflowExecution
+------------------------------------------------------------------
+
+Indicates various phases of Workflow Execution
+
+
+
+
+
+
+
+
+
+
+.. _ref_flyteidl.core.ExecutionError.ErrorKind:
+
+ExecutionError.ErrorKind
+------------------------------------------------------------------
+
+Error type: System or User
+
+.. csv-table:: Enum ExecutionError.ErrorKind values
+ :header: "Name", "Number", "Description"
+ :widths: auto
+
+ "UNKNOWN", "0", ""
+ "USER", "1", ""
+ "SYSTEM", "2", ""
+
+
+
+.. _ref_flyteidl.core.NodeExecution.Phase:
+
+NodeExecution.Phase
+------------------------------------------------------------------
+
+
+
+.. csv-table:: Enum NodeExecution.Phase values
+ :header: "Name", "Number", "Description"
+ :widths: auto
+
+ "UNDEFINED", "0", ""
+ "QUEUED", "1", ""
+ "RUNNING", "2", ""
+ "SUCCEEDED", "3", ""
+ "FAILING", "4", ""
+ "FAILED", "5", ""
+ "ABORTED", "6", ""
+ "SKIPPED", "7", ""
+ "TIMED_OUT", "8", ""
+ "DYNAMIC_RUNNING", "9", ""
+ "RECOVERED", "10", ""
+
+
+
+.. _ref_flyteidl.core.QualityOfService.Tier:
+
+QualityOfService.Tier
+------------------------------------------------------------------
+
+
+
+.. csv-table:: Enum QualityOfService.Tier values
+ :header: "Name", "Number", "Description"
+ :widths: auto
+
+ "UNDEFINED", "0", "Default: no quality of service specified."
+ "HIGH", "1", ""
+ "MEDIUM", "2", ""
+ "LOW", "3", ""
+
+
+
+.. _ref_flyteidl.core.TaskExecution.Phase:
+
+TaskExecution.Phase
+------------------------------------------------------------------
+
+
+
+.. csv-table:: Enum TaskExecution.Phase values
+ :header: "Name", "Number", "Description"
+ :widths: auto
+
+ "UNDEFINED", "0", ""
+ "QUEUED", "1", ""
+ "RUNNING", "2", ""
+ "SUCCEEDED", "3", ""
+ "ABORTED", "4", ""
+ "FAILED", "5", ""
+ "INITIALIZING", "6", "To indicate cases where task is initializing, like: ErrImagePull, ContainerCreating, PodInitializing"
+ "WAITING_FOR_RESOURCES", "7", "To address cases, where underlying resource is not available: Backoff error, Resource quota exceeded"
+
+
+
+.. _ref_flyteidl.core.TaskLog.MessageFormat:
+
+TaskLog.MessageFormat
+------------------------------------------------------------------
+
+
+
+.. csv-table:: Enum TaskLog.MessageFormat values
+ :header: "Name", "Number", "Description"
+ :widths: auto
+
+ "UNKNOWN", "0", ""
+ "CSV", "1", ""
+ "JSON", "2", ""
+
+
+
+.. _ref_flyteidl.core.WorkflowExecution.Phase:
+
+WorkflowExecution.Phase
+------------------------------------------------------------------
+
+
+
+.. csv-table:: Enum WorkflowExecution.Phase values
+ :header: "Name", "Number", "Description"
+ :widths: auto
+
+ "UNDEFINED", "0", ""
+ "QUEUED", "1", ""
+ "RUNNING", "2", ""
+ "SUCCEEDING", "3", ""
+ "SUCCEEDED", "4", ""
+ "FAILING", "5", ""
+ "FAILED", "6", ""
+ "ABORTED", "7", ""
+ "TIMED_OUT", "8", ""
+ "ABORTING", "9", ""
+
+
+
+
+
+
+
+
+
+
+.. _ref_flyteidl/core/identifier.proto:
+
+flyteidl/core/identifier.proto
+==================================================================
+
+
+
+
+
+.. _ref_flyteidl.core.Identifier:
+
+Identifier
+------------------------------------------------------------------
+
+Encapsulation of fields that uniquely identifies a Flyte resource.
+
+
+
+.. csv-table:: Identifier type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "resource_type", ":ref:`ref_flyteidl.core.ResourceType`", "", "Identifies the specific type of resource that this identifier corresponds to."
+ "project", ":ref:`ref_string`", "", "Name of the project the resource belongs to."
+ "domain", ":ref:`ref_string`", "", "Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project."
+ "name", ":ref:`ref_string`", "", "User provided value for the resource."
+ "version", ":ref:`ref_string`", "", "Specific version of the resource."
+
+
+
+
+
+
+
+.. _ref_flyteidl.core.NodeExecutionIdentifier:
+
+NodeExecutionIdentifier
+------------------------------------------------------------------
+
+Encapsulation of fields that identify a Flyte node execution entity.
+
+
+
+.. csv-table:: NodeExecutionIdentifier type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "node_id", ":ref:`ref_string`", "", ""
+ "execution_id", ":ref:`ref_flyteidl.core.WorkflowExecutionIdentifier`", "", ""
+
+
+
+
+
+
+
+.. _ref_flyteidl.core.TaskExecutionIdentifier:
+
+TaskExecutionIdentifier
+------------------------------------------------------------------
+
+Encapsulation of fields that identify a Flyte task execution entity.
+
+
+
+.. csv-table:: TaskExecutionIdentifier type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "task_id", ":ref:`ref_flyteidl.core.Identifier`", "", ""
+ "node_execution_id", ":ref:`ref_flyteidl.core.NodeExecutionIdentifier`", "", ""
+ "retry_attempt", ":ref:`ref_uint32`", "", ""
+
+
+
+
+
+
+
+.. _ref_flyteidl.core.WorkflowExecutionIdentifier:
+
+WorkflowExecutionIdentifier
+------------------------------------------------------------------
+
+Encapsulation of fields that uniquely identifies a Flyte workflow execution
+
+
+
+.. csv-table:: WorkflowExecutionIdentifier type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "project", ":ref:`ref_string`", "", "Name of the project the resource belongs to."
+ "domain", ":ref:`ref_string`", "", "Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project."
+ "name", ":ref:`ref_string`", "", "User or system provided value for the resource."
+
+
+
+
+
+
+
+
+
+.. _ref_flyteidl.core.ResourceType:
+
+ResourceType
+------------------------------------------------------------------
+
+Indicates a resource type within Flyte.
+
+.. csv-table:: Enum ResourceType values
+ :header: "Name", "Number", "Description"
+ :widths: auto
+
+ "UNSPECIFIED", "0", ""
+ "TASK", "1", ""
+ "WORKFLOW", "2", ""
+ "LAUNCH_PLAN", "3", ""
+ "DATASET", "4", "A dataset represents an entity modeled in Flyte DataCatalog. A Dataset is also a versioned entity and can be a compilation of multiple individual objects. Eventually all Catalog objects should be modeled similar to Flyte Objects. The Dataset entities makes it possible for the UI and CLI to act on the objects in a similar manner to other Flyte objects"
+
+
+
+
+
+
+
+
+
+
+.. _ref_flyteidl/core/interface.proto:
+
+flyteidl/core/interface.proto
+==================================================================
+
+
+
+
+
+.. _ref_flyteidl.core.Parameter:
+
+Parameter
+------------------------------------------------------------------
+
+A parameter is used as input to a launch plan and has
+the special ability to have a default value or mark itself as required.
+
+
+
+.. csv-table:: Parameter type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "var", ":ref:`ref_flyteidl.core.Variable`", "", "+required Variable. Defines the type of the variable backing this parameter."
+ "default", ":ref:`ref_flyteidl.core.Literal`", "", "Defines a default value that has to match the variable type defined."
+ "required", ":ref:`ref_bool`", "", "+optional, is this value required to be filled."
+
+
+
+
+
+
+
+.. _ref_flyteidl.core.ParameterMap:
+
+ParameterMap
+------------------------------------------------------------------
+
+A map of Parameters.
+
+
+
+.. csv-table:: ParameterMap type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "parameters", ":ref:`ref_flyteidl.core.ParameterMap.ParametersEntry`", "repeated", "Defines a map of parameter names to parameters."
+
+
+
+
+
+
+
+.. _ref_flyteidl.core.ParameterMap.ParametersEntry:
+
+ParameterMap.ParametersEntry
+------------------------------------------------------------------
+
+
+
+
+
+.. csv-table:: ParameterMap.ParametersEntry type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "key", ":ref:`ref_string`", "", ""
+ "value", ":ref:`ref_flyteidl.core.Parameter`", "", ""
+
+
+
+
+
+
+
+.. _ref_flyteidl.core.TypedInterface:
+
+TypedInterface
+------------------------------------------------------------------
+
+Defines strongly typed inputs and outputs.
+
+
+
+.. csv-table:: TypedInterface type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "inputs", ":ref:`ref_flyteidl.core.VariableMap`", "", ""
+ "outputs", ":ref:`ref_flyteidl.core.VariableMap`", "", ""
+
+
+
+
+
+
+
+.. _ref_flyteidl.core.Variable:
+
+Variable
+------------------------------------------------------------------
+
+Defines a strongly typed variable.
+
+
+
+.. csv-table:: Variable type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "type", ":ref:`ref_flyteidl.core.LiteralType`", "", "Variable literal type."
+ "description", ":ref:`ref_string`", "", "+optional string describing input variable"
+
+
+
+
+
+
+
+.. _ref_flyteidl.core.VariableMap:
+
+VariableMap
+------------------------------------------------------------------
+
+A map of Variables
+
+
+
+.. csv-table:: VariableMap type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "variables", ":ref:`ref_flyteidl.core.VariableMap.VariablesEntry`", "repeated", "Defines a map of variable names to variables."
+
+
+
+
+
+
+
+.. _ref_flyteidl.core.VariableMap.VariablesEntry:
+
+VariableMap.VariablesEntry
+------------------------------------------------------------------
+
+
+
+
+
+.. csv-table:: VariableMap.VariablesEntry type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "key", ":ref:`ref_string`", "", ""
+ "value", ":ref:`ref_flyteidl.core.Variable`", "", ""
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+.. _ref_flyteidl/core/literals.proto:
+
+flyteidl/core/literals.proto
+==================================================================
+
+
+
+
+
+.. _ref_flyteidl.core.Binary:
+
+Binary
+------------------------------------------------------------------
+
+A simple byte array with a tag to help different parts of the system communicate about what is in the byte array.
+It's strongly advisable that consumers of this type define a unique tag and validate the tag before parsing the data.
+
+
+
+.. csv-table:: Binary type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "value", ":ref:`ref_bytes`", "", ""
+ "tag", ":ref:`ref_string`", "", ""
+
+
+
+
+
+
+
+.. _ref_flyteidl.core.Binding:
+
+Binding
+------------------------------------------------------------------
+
+An input/output binding of a variable to either static value or a node output.
+
+
+
+.. csv-table:: Binding type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "var", ":ref:`ref_string`", "", "Variable name must match an input/output variable of the node."
+ "binding", ":ref:`ref_flyteidl.core.BindingData`", "", "Data to use to bind this variable."
+
+
+
+
+
+
+
+.. _ref_flyteidl.core.BindingData:
+
+BindingData
+------------------------------------------------------------------
+
+Specifies either a simple value or a reference to another output.
+
+
+
+.. csv-table:: BindingData type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "scalar", ":ref:`ref_flyteidl.core.Scalar`", "", "A simple scalar value."
+ "collection", ":ref:`ref_flyteidl.core.BindingDataCollection`", "", "A collection of binding data. This allows nesting of binding data to any number of levels."
+ "promise", ":ref:`ref_flyteidl.core.OutputReference`", "", "References an output promised by another node."
+ "map", ":ref:`ref_flyteidl.core.BindingDataMap`", "", "A map of bindings. The key is always a string."
+ "union", ":ref:`ref_flyteidl.core.UnionInfo`", "", ""
+
+
+
+
+
+
+
+.. _ref_flyteidl.core.BindingDataCollection:
+
+BindingDataCollection
+------------------------------------------------------------------
+
+A collection of BindingData items.
+
+
+
+.. csv-table:: BindingDataCollection type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "bindings", ":ref:`ref_flyteidl.core.BindingData`", "repeated", ""
+
+
+
+
+
+
+
+.. _ref_flyteidl.core.BindingDataMap:
+
+BindingDataMap
+------------------------------------------------------------------
+
+A map of BindingData items.
+
+
+
+.. csv-table:: BindingDataMap type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "bindings", ":ref:`ref_flyteidl.core.BindingDataMap.BindingsEntry`", "repeated", ""
+
+
+
+
+
+
+
+.. _ref_flyteidl.core.BindingDataMap.BindingsEntry:
+
+BindingDataMap.BindingsEntry
+------------------------------------------------------------------
+
+
+
+
+
+.. csv-table:: BindingDataMap.BindingsEntry type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "key", ":ref:`ref_string`", "", ""
+ "value", ":ref:`ref_flyteidl.core.BindingData`", "", ""
+
+
+
+
+
+
+
+.. _ref_flyteidl.core.Blob:
+
+Blob
+------------------------------------------------------------------
+
+Refers to an offloaded set of files. It encapsulates the type of the store and a unique uri for where the data is.
+There are no restrictions on how the uri is formatted since it will depend on how to interact with the store.
+
+
+
+.. csv-table:: Blob type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "metadata", ":ref:`ref_flyteidl.core.BlobMetadata`", "", ""
+ "uri", ":ref:`ref_string`", "", ""
+
+
+
+
+
+
+
+.. _ref_flyteidl.core.BlobMetadata:
+
+BlobMetadata
+------------------------------------------------------------------
+
+
+
+
+
+.. csv-table:: BlobMetadata type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "type", ":ref:`ref_flyteidl.core.BlobType`", "", ""
+
+
+
+
+
+
+
+.. _ref_flyteidl.core.KeyValuePair:
+
+KeyValuePair
+------------------------------------------------------------------
+
+A generic key value pair.
+
+
+
+.. csv-table:: KeyValuePair type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "key", ":ref:`ref_string`", "", "required."
+ "value", ":ref:`ref_string`", "", "+optional."
+
+
+
+
+
+
+
+.. _ref_flyteidl.core.Literal:
+
+Literal
+------------------------------------------------------------------
+
+A simple value. This supports any level of nesting (e.g. array of array of array of Blobs) as well as simple primitives.
+
+
+
+.. csv-table:: Literal type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "scalar", ":ref:`ref_flyteidl.core.Scalar`", "", "A simple value."
+ "collection", ":ref:`ref_flyteidl.core.LiteralCollection`", "", "A collection of literals to allow nesting."
+ "map", ":ref:`ref_flyteidl.core.LiteralMap`", "", "A map of strings to literals."
+ "hash", ":ref:`ref_string`", "", "A hash representing this literal. This is used for caching purposes. For more details refer to RFC 1893 (https://github.com/flyteorg/flyte/blob/master/rfc/system/1893-caching-of-offloaded-objects.md)"
+
+
+
+
+
+
+
+.. _ref_flyteidl.core.LiteralCollection:
+
+LiteralCollection
+------------------------------------------------------------------
+
+A collection of literals. This is a workaround since oneofs in proto messages cannot contain a repeated field.
+
+
+
+.. csv-table:: LiteralCollection type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "literals", ":ref:`ref_flyteidl.core.Literal`", "repeated", ""
+
+
+
+
+
+
+
+.. _ref_flyteidl.core.LiteralMap:
+
+LiteralMap
+------------------------------------------------------------------
+
+A map of literals. This is a workaround since oneofs in proto messages cannot contain a repeated field.
+
+
+
+.. csv-table:: LiteralMap type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "literals", ":ref:`ref_flyteidl.core.LiteralMap.LiteralsEntry`", "repeated", ""
+
+
+
+
+
+
+
+.. _ref_flyteidl.core.LiteralMap.LiteralsEntry:
+
+LiteralMap.LiteralsEntry
+------------------------------------------------------------------
+
+
+
+
+
+.. csv-table:: LiteralMap.LiteralsEntry type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "key", ":ref:`ref_string`", "", ""
+ "value", ":ref:`ref_flyteidl.core.Literal`", "", ""
+
+
+
+
+
+
+
+.. _ref_flyteidl.core.Primitive:
+
+Primitive
+------------------------------------------------------------------
+
+Primitive Types
+
+
+
+.. csv-table:: Primitive type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "integer", ":ref:`ref_int64`", "", ""
+ "float_value", ":ref:`ref_double`", "", ""
+ "string_value", ":ref:`ref_string`", "", ""
+ "boolean", ":ref:`ref_bool`", "", ""
+ "datetime", ":ref:`ref_google.protobuf.Timestamp`", "", ""
+ "duration", ":ref:`ref_google.protobuf.Duration`", "", ""
+
+
+
+
+
+
+
+.. _ref_flyteidl.core.RetryStrategy:
+
+RetryStrategy
+------------------------------------------------------------------
+
+Retry strategy associated with an executable unit.
+
+
+
+.. csv-table:: RetryStrategy type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "retries", ":ref:`ref_uint32`", "", "Number of retries. Retries will be consumed when the job fails with a recoverable error. The number of retries must be less than or equals to 10."
+
+
+
+
+
+
+
+.. _ref_flyteidl.core.Scalar:
+
+Scalar
+------------------------------------------------------------------
+
+
+
+
+
+.. csv-table:: Scalar type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "primitive", ":ref:`ref_flyteidl.core.Primitive`", "", ""
+ "blob", ":ref:`ref_flyteidl.core.Blob`", "", ""
+ "binary", ":ref:`ref_flyteidl.core.Binary`", "", ""
+ "schema", ":ref:`ref_flyteidl.core.Schema`", "", ""
+ "none_type", ":ref:`ref_flyteidl.core.Void`", "", ""
+ "error", ":ref:`ref_flyteidl.core.Error`", "", ""
+ "generic", ":ref:`ref_google.protobuf.Struct`", "", ""
+ "structured_dataset", ":ref:`ref_flyteidl.core.StructuredDataset`", "", ""
+ "union", ":ref:`ref_flyteidl.core.Union`", "", ""
+
+
+
+
+
+
+
+.. _ref_flyteidl.core.Schema:
+
+Schema
+------------------------------------------------------------------
+
+A strongly typed schema that defines the interface of data retrieved from the underlying storage medium.
+
+
+
+.. csv-table:: Schema type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "uri", ":ref:`ref_string`", "", ""
+ "type", ":ref:`ref_flyteidl.core.SchemaType`", "", ""
+
+
+
+
+
+
+
+.. _ref_flyteidl.core.StructuredDataset:
+
+StructuredDataset
+------------------------------------------------------------------
+
+
+
+
+
+.. csv-table:: StructuredDataset type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "uri", ":ref:`ref_string`", "", "String location uniquely identifying where the data is. Should start with the storage location (e.g. s3://, gs://, bq://, etc.)"
+ "metadata", ":ref:`ref_flyteidl.core.StructuredDatasetMetadata`", "", ""
+
+
+
+
+
+
+
+.. _ref_flyteidl.core.StructuredDatasetMetadata:
+
+StructuredDatasetMetadata
+------------------------------------------------------------------
+
+
+
+
+
+.. csv-table:: StructuredDatasetMetadata type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "structured_dataset_type", ":ref:`ref_flyteidl.core.StructuredDatasetType`", "", "Bundle the type information along with the literal. This is here because StructuredDatasets can often be more defined at run time than at compile time. That is, at compile time you might only declare a task to return a pandas dataframe or a StructuredDataset, without any column information, but at run time, you might have that column information. flytekit python will copy this type information into the literal, from the type information, if not provided by the various plugins (encoders). Since this field is run time generated, it's not used for any type checking."
+
+
+
+
+
+
+
+.. _ref_flyteidl.core.Union:
+
+Union
+------------------------------------------------------------------
+
+The runtime representation of a tagged union value. See `UnionType` for more details.
+
+
+
+.. csv-table:: Union type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "value", ":ref:`ref_flyteidl.core.Literal`", "", ""
+ "type", ":ref:`ref_flyteidl.core.LiteralType`", "", ""
+
+
+
+
+
+
+
+.. _ref_flyteidl.core.UnionInfo:
+
+UnionInfo
+------------------------------------------------------------------
+
+
+
+
+
+.. csv-table:: UnionInfo type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "targetType", ":ref:`ref_flyteidl.core.LiteralType`", "", ""
+
+
+
+
+
+
+
+.. _ref_flyteidl.core.Void:
+
+Void
+------------------------------------------------------------------
+
+Used to denote a nil/null/None assignment to a scalar value. The underlying LiteralType for Void is intentionally
+undefined since it can be assigned to a scalar of any LiteralType.
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+.. _ref_flyteidl/core/security.proto:
+
+flyteidl/core/security.proto
+==================================================================
+
+
+
+
+
+.. _ref_flyteidl.core.Identity:
+
+Identity
+------------------------------------------------------------------
+
+Identity encapsulates the various security identities a task can run as. It's up to the underlying plugin to pick the
+right identity for the execution environment.
+
+
+
+.. csv-table:: Identity type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "iam_role", ":ref:`ref_string`", "", "iam_role references the fully qualified name of Identity & Access Management role to impersonate."
+ "k8s_service_account", ":ref:`ref_string`", "", "k8s_service_account references a kubernetes service account to impersonate."
+ "oauth2_client", ":ref:`ref_flyteidl.core.OAuth2Client`", "", "oauth2_client references an oauth2 client. Backend plugins can use this information to impersonate the client when making external calls."
+
+
+
+
+
+
+
+.. _ref_flyteidl.core.OAuth2Client:
+
+OAuth2Client
+------------------------------------------------------------------
+
+OAuth2Client encapsulates OAuth2 Client Credentials to be used when making calls on behalf of that task.
+
+
+
+.. csv-table:: OAuth2Client type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "client_id", ":ref:`ref_string`", "", "client_id is the public id for the client to use. The system will not perform any pre-auth validation that the secret requested matches the client_id indicated here. +required"
+ "client_secret", ":ref:`ref_flyteidl.core.Secret`", "", "client_secret is a reference to the secret used to authenticate the OAuth2 client. +required"
+
+
+
+
+
+
+
+.. _ref_flyteidl.core.OAuth2TokenRequest:
+
+OAuth2TokenRequest
+------------------------------------------------------------------
+
+OAuth2TokenRequest encapsulates information needed to request an OAuth2 token.
+FLYTE_TOKENS_ENV_PREFIX will be passed to indicate the prefix of the environment variables that will be present if
+tokens are passed through environment variables.
+FLYTE_TOKENS_PATH_PREFIX will be passed to indicate the prefix of the path where secrets will be mounted if tokens
+are passed through file mounts.
+
+
+
+.. csv-table:: OAuth2TokenRequest type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "name", ":ref:`ref_string`", "", "name indicates a unique id for the token request within this task token requests. It'll be used as a suffix for environment variables and as a filename for mounting tokens as files. +required"
+ "type", ":ref:`ref_flyteidl.core.OAuth2TokenRequest.Type`", "", "type indicates the type of the request to make. Defaults to CLIENT_CREDENTIALS. +required"
+ "client", ":ref:`ref_flyteidl.core.OAuth2Client`", "", "client references the client_id/secret to use to request the OAuth2 token. +required"
+ "idp_discovery_endpoint", ":ref:`ref_string`", "", "idp_discovery_endpoint references the discovery endpoint used to retrieve token endpoint and other related information. +optional"
+ "token_endpoint", ":ref:`ref_string`", "", "token_endpoint references the token issuance endpoint. If idp_discovery_endpoint is not provided, this parameter is mandatory. +optional"
+
+
+
+
+
+
+
+.. _ref_flyteidl.core.Secret:
+
+Secret
+------------------------------------------------------------------
+
+Secret encapsulates information about the secret a task needs to proceed. An environment variable
+FLYTE_SECRETS_ENV_PREFIX will be passed to indicate the prefix of the environment variables that will be present if
+secrets are passed through environment variables.
+FLYTE_SECRETS_DEFAULT_DIR will be passed to indicate the prefix of the path where secrets will be mounted if secrets
+are passed through file mounts.
+
+
+
+.. csv-table:: Secret type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "group", ":ref:`ref_string`", "", "The name of the secret group where to find the key referenced below. For K8s secrets, this should be the name of the v1/secret object. For Confidant, this should be the Credential name. For Vault, this should be the secret name. For AWS Secret Manager, this should be the name of the secret. +required"
+ "group_version", ":ref:`ref_string`", "", "The group version to fetch. This is not supported in all secret management systems. It'll be ignored for the ones that do not support it. +optional"
+ "key", ":ref:`ref_string`", "", "The name of the secret to mount. This has to match an existing secret in the system. It's up to the implementation of the secret management system to require case sensitivity. For K8s secrets, Confidant and Vault, this should match one of the keys inside the secret. For AWS Secret Manager, it's ignored. +optional"
+ "mount_requirement", ":ref:`ref_flyteidl.core.Secret.MountType`", "", "mount_requirement is optional. Indicates where the secret has to be mounted. If provided, the execution will fail if the underlying key management system cannot satisfy that requirement. If not provided, the default location will depend on the key management system. +optional"
+
+
+
+
+
+
+
+.. _ref_flyteidl.core.SecurityContext:
+
+SecurityContext
+------------------------------------------------------------------
+
+SecurityContext holds security attributes that apply to tasks.
+
+
+
+.. csv-table:: SecurityContext type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "run_as", ":ref:`ref_flyteidl.core.Identity`", "", "run_as encapsulates the identity a pod should run as. If the task fills in multiple fields here, it'll be up to the backend plugin to choose the appropriate identity for the execution engine the task will run on."
+ "secrets", ":ref:`ref_flyteidl.core.Secret`", "repeated", "secrets indicate the list of secrets the task needs in order to proceed. Secrets will be mounted/passed to the pod as it starts. If the plugin responsible for kicking of the task will not run it on a flyte cluster (e.g. AWS Batch), it's the responsibility of the plugin to fetch the secret (which means propeller identity will need access to the secret) and to pass it to the remote execution engine."
+ "tokens", ":ref:`ref_flyteidl.core.OAuth2TokenRequest`", "repeated", "tokens indicate the list of token requests the task needs in order to proceed. Tokens will be mounted/passed to the pod as it starts. If the plugin responsible for kicking of the task will not run it on a flyte cluster (e.g. AWS Batch), it's the responsibility of the plugin to fetch the secret (which means propeller identity will need access to the secret) and to pass it to the remote execution engine."
+
+
+
+
+
+
+
+
+
+.. _ref_flyteidl.core.OAuth2TokenRequest.Type:
+
+OAuth2TokenRequest.Type
+------------------------------------------------------------------
+
+Type of the token requested.
+
+.. csv-table:: Enum OAuth2TokenRequest.Type values
+ :header: "Name", "Number", "Description"
+ :widths: auto
+
+ "CLIENT_CREDENTIALS", "0", "CLIENT_CREDENTIALS indicates a 2-legged OAuth token requested using client credentials."
+
+
+
+.. _ref_flyteidl.core.Secret.MountType:
+
+Secret.MountType
+------------------------------------------------------------------
+
+
+
+.. csv-table:: Enum Secret.MountType values
+ :header: "Name", "Number", "Description"
+ :widths: auto
+
+ "ANY", "0", "Default case, indicates the client can tolerate either mounting options."
+ "ENV_VAR", "1", "ENV_VAR indicates the secret needs to be mounted as an environment variable."
+ "FILE", "2", "FILE indicates the secret needs to be mounted as a file."
+
+
+
+
+
+
+
+
+
+
+.. _ref_flyteidl/core/tasks.proto:
+
+flyteidl/core/tasks.proto
+==================================================================
+
+
+
+
+
+.. _ref_flyteidl.core.Container:
+
+Container
+------------------------------------------------------------------
+
+
+
+
+
+.. csv-table:: Container type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "image", ":ref:`ref_string`", "", "Container image url. Eg: docker/redis:latest"
+ "command", ":ref:`ref_string`", "repeated", "Command to be executed, if not provided, the default entrypoint in the container image will be used."
+ "args", ":ref:`ref_string`", "repeated", "These will default to Flyte given paths. If provided, the system will not append known paths. If the task still needs flyte's inputs and outputs path, add $(FLYTE_INPUT_FILE), $(FLYTE_OUTPUT_FILE) wherever makes sense and the system will populate these before executing the container."
+ "resources", ":ref:`ref_flyteidl.core.Resources`", "", "Container resources requirement as specified by the container engine."
+ "env", ":ref:`ref_flyteidl.core.KeyValuePair`", "repeated", "Environment variables will be set as the container is starting up."
+ "config", ":ref:`ref_flyteidl.core.KeyValuePair`", "repeated", "**Deprecated.** Allows extra configs to be available for the container. TODO: elaborate on how configs will become available. Deprecated, please use TaskTemplate.config instead."
+ "ports", ":ref:`ref_flyteidl.core.ContainerPort`", "repeated", "Ports to open in the container. This feature is not supported by all execution engines. (e.g. supported on K8s but not supported on AWS Batch) Only K8s"
+ "data_config", ":ref:`ref_flyteidl.core.DataLoadingConfig`", "", "BETA: Optional configuration for DataLoading. If not specified, then default values are used. This makes it possible to to run a completely portable container, that uses inputs and outputs only from the local file-system and without having any reference to flyteidl. This is supported only on K8s at the moment. If data loading is enabled, then data will be mounted in accompanying directories specified in the DataLoadingConfig. If the directories are not specified, inputs will be mounted onto and outputs will be uploaded from a pre-determined file-system path. Refer to the documentation to understand the default paths. Only K8s"
+ "architecture", ":ref:`ref_flyteidl.core.Container.Architecture`", "", ""
+
+
+
+
+
+
+
+.. _ref_flyteidl.core.ContainerPort:
+
+ContainerPort
+------------------------------------------------------------------
+
+Defines port properties for a container.
+
+
+
+.. csv-table:: ContainerPort type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "container_port", ":ref:`ref_uint32`", "", "Number of port to expose on the pod's IP address. This must be a valid port number, 0 < x < 65536."
+
+
+
+
+
+
+
+.. _ref_flyteidl.core.DataLoadingConfig:
+
+DataLoadingConfig
+------------------------------------------------------------------
+
+This configuration allows executing raw containers in Flyte using the Flyte CoPilot system.
+Flyte CoPilot, eliminates the needs of flytekit or sdk inside the container. Any inputs required by the users container are side-loaded in the input_path
+Any outputs generated by the user container - within output_path are automatically uploaded.
+
+
+
+.. csv-table:: DataLoadingConfig type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "enabled", ":ref:`ref_bool`", "", "Flag enables DataLoading Config. If this is not set, data loading will not be used!"
+ "input_path", ":ref:`ref_string`", "", "File system path (start at root). This folder will contain all the inputs exploded to a separate file. Example, if the input interface needs (x: int, y: blob, z: multipart_blob) and the input path is "/var/flyte/inputs", then the file system will look like /var/flyte/inputs/inputs.<metadata format dependent -> .pb .json .yaml> -> Format as defined previously. The Blob and Multipart blob will reference local filesystem instead of remote locations /var/flyte/inputs/x -> X is a file that contains the value of x (integer) in string format /var/flyte/inputs/y -> Y is a file in Binary format /var/flyte/inputs/z/... -> Note Z itself is a directory More information about the protocol - refer to docs #TODO reference docs here"
+ "output_path", ":ref:`ref_string`", "", "File system path (start at root). This folder should contain all the outputs for the task as individual files and/or an error text file"
+ "format", ":ref:`ref_flyteidl.core.DataLoadingConfig.LiteralMapFormat`", "", "In the inputs folder, there will be an additional summary/metadata file that contains references to all files or inlined primitive values. This format decides the actual encoding for the data. Refer to the encoding to understand the specifics of the contents and the encoding"
+ "io_strategy", ":ref:`ref_flyteidl.core.IOStrategy`", "", ""
+
+
+
+
+
+
+
+.. _ref_flyteidl.core.IOStrategy:
+
+IOStrategy
+------------------------------------------------------------------
+
+Strategy to use when dealing with Blob, Schema, or multipart blob data (large datasets)
+
+
+
+.. csv-table:: IOStrategy type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "download_mode", ":ref:`ref_flyteidl.core.IOStrategy.DownloadMode`", "", "Mode to use to manage downloads"
+ "upload_mode", ":ref:`ref_flyteidl.core.IOStrategy.UploadMode`", "", "Mode to use to manage uploads"
+
+
+
+
+
+
+
+.. _ref_flyteidl.core.K8sObjectMetadata:
+
+K8sObjectMetadata
+------------------------------------------------------------------
+
+Metadata for building a kubernetes object when a task is executed.
+
+
+
+.. csv-table:: K8sObjectMetadata type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "labels", ":ref:`ref_flyteidl.core.K8sObjectMetadata.LabelsEntry`", "repeated", "Optional labels to add to the pod definition."
+ "annotations", ":ref:`ref_flyteidl.core.K8sObjectMetadata.AnnotationsEntry`", "repeated", "Optional annotations to add to the pod definition."
+
+
+
+
+
+
+
+.. _ref_flyteidl.core.K8sObjectMetadata.AnnotationsEntry:
+
+K8sObjectMetadata.AnnotationsEntry
+------------------------------------------------------------------
+
+
+
+
+
+.. csv-table:: K8sObjectMetadata.AnnotationsEntry type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "key", ":ref:`ref_string`", "", ""
+ "value", ":ref:`ref_string`", "", ""
+
+
+
+
+
+
+
+.. _ref_flyteidl.core.K8sObjectMetadata.LabelsEntry:
+
+K8sObjectMetadata.LabelsEntry
+------------------------------------------------------------------
+
+
+
+
+
+.. csv-table:: K8sObjectMetadata.LabelsEntry type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "key", ":ref:`ref_string`", "", ""
+ "value", ":ref:`ref_string`", "", ""
+
+
+
+
+
+
+
+.. _ref_flyteidl.core.K8sPod:
+
+K8sPod
+------------------------------------------------------------------
+
+Defines a pod spec and additional pod metadata that is created when a task is executed.
+
+
+
+.. csv-table:: K8sPod type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "metadata", ":ref:`ref_flyteidl.core.K8sObjectMetadata`", "", "Contains additional metadata for building a kubernetes pod."
+ "pod_spec", ":ref:`ref_google.protobuf.Struct`", "", "Defines the primary pod spec created when a task is executed. This should be a JSON-marshalled pod spec, which can be defined in - go, using: https://github.com/kubernetes/api/blob/release-1.21/core/v1/types.go#L2936 - python: using https://github.com/kubernetes-client/python/blob/release-19.0/kubernetes/client/models/v1_pod_spec.py"
+
+
+
+
+
+
+
+.. _ref_flyteidl.core.Resources:
+
+Resources
+------------------------------------------------------------------
+
+A customizable interface to convey resources requested for a container. This can be interpreted differently for different
+container engines.
+
+
+
+.. csv-table:: Resources type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "requests", ":ref:`ref_flyteidl.core.Resources.ResourceEntry`", "repeated", "The desired set of resources requested. ResourceNames must be unique within the list."
+ "limits", ":ref:`ref_flyteidl.core.Resources.ResourceEntry`", "repeated", "Defines a set of bounds (e.g. min/max) within which the task can reliably run. ResourceNames must be unique within the list."
+
+
+
+
+
+
+
+.. _ref_flyteidl.core.Resources.ResourceEntry:
+
+Resources.ResourceEntry
+------------------------------------------------------------------
+
+Encapsulates a resource name and value.
+
+
+
+.. csv-table:: Resources.ResourceEntry type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "name", ":ref:`ref_flyteidl.core.Resources.ResourceName`", "", "Resource name."
+ "value", ":ref:`ref_string`", "", "Value must be a valid k8s quantity. See https://github.com/kubernetes/apimachinery/blob/master/pkg/api/resource/quantity.go#L30-L80"
+
+
+
+
+
+
+
+.. _ref_flyteidl.core.RuntimeMetadata:
+
+RuntimeMetadata
+------------------------------------------------------------------
+
+Runtime information. This is loosely defined to allow for extensibility.
+
+
+
+.. csv-table:: RuntimeMetadata type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "type", ":ref:`ref_flyteidl.core.RuntimeMetadata.RuntimeType`", "", "Type of runtime."
+ "version", ":ref:`ref_string`", "", "Version of the runtime. All versions should be backward compatible. However, certain cases call for version checks to ensure tighter validation or setting expectations."
+ "flavor", ":ref:`ref_string`", "", "+optional It can be used to provide extra information about the runtime (e.g. python, golang... etc.)."
+
+
+
+
+
+
+
+.. _ref_flyteidl.core.Sql:
+
+Sql
+------------------------------------------------------------------
+
+Sql represents a generic sql workload with a statement and dialect.
+
+
+
+.. csv-table:: Sql type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "statement", ":ref:`ref_string`", "", "The actual query to run, the query can have templated parameters. We use Flyte's Golang templating format for Query templating. Refer to the templating documentation. https://docs.flyte.org/projects/cookbook/en/latest/auto/integrations/external_services/hive/hive.html#sphx-glr-auto-integrations-external-services-hive-hive-py For example, insert overwrite directory '{{ .rawOutputDataPrefix }}' stored as parquet select * from my_table where ds = '{{ .Inputs.ds }}'"
+ "dialect", ":ref:`ref_flyteidl.core.Sql.Dialect`", "", ""
+
+
+
+
+
+
+
+.. _ref_flyteidl.core.TaskMetadata:
+
+TaskMetadata
+------------------------------------------------------------------
+
+Task Metadata
+
+
+
+.. csv-table:: TaskMetadata type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "discoverable", ":ref:`ref_bool`", "", "Indicates whether the system should attempt to lookup this task's output to avoid duplication of work."
+ "runtime", ":ref:`ref_flyteidl.core.RuntimeMetadata`", "", "Runtime information about the task."
+ "timeout", ":ref:`ref_google.protobuf.Duration`", "", "The overall timeout of a task including user-triggered retries."
+ "retries", ":ref:`ref_flyteidl.core.RetryStrategy`", "", "Number of retries per task."
+ "discovery_version", ":ref:`ref_string`", "", "Indicates a logical version to apply to this task for the purpose of discovery."
+ "deprecated_error_message", ":ref:`ref_string`", "", "If set, this indicates that this task is deprecated. This will enable owners of tasks to notify consumers of the ending of support for a given task."
+ "interruptible", ":ref:`ref_bool`", "", ""
+ "cache_serializable", ":ref:`ref_bool`", "", "Indicates whether the system should attempt to execute discoverable instances in serial to avoid duplicate work"
+
+
+
+
+
+
+
+.. _ref_flyteidl.core.TaskTemplate:
+
+TaskTemplate
+------------------------------------------------------------------
+
+A Task structure that uniquely identifies a task in the system
+Tasks are registered as a first step in the system.
+
+
+
+.. csv-table:: TaskTemplate type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "id", ":ref:`ref_flyteidl.core.Identifier`", "", "Auto generated taskId by the system. Task Id uniquely identifies this task globally."
+ "type", ":ref:`ref_string`", "", "A predefined yet extensible Task type identifier. This can be used to customize any of the components. If no extensions are provided in the system, Flyte will resolve the this task to its TaskCategory and default the implementation registered for the TaskCategory."
+ "metadata", ":ref:`ref_flyteidl.core.TaskMetadata`", "", "Extra metadata about the task."
+ "interface", ":ref:`ref_flyteidl.core.TypedInterface`", "", "A strongly typed interface for the task. This enables others to use this task within a workflow and guarantees compile-time validation of the workflow to avoid costly runtime failures."
+ "custom", ":ref:`ref_google.protobuf.Struct`", "", "Custom data about the task. This is extensible to allow various plugins in the system."
+ "container", ":ref:`ref_flyteidl.core.Container`", "", ""
+ "k8s_pod", ":ref:`ref_flyteidl.core.K8sPod`", "", ""
+ "sql", ":ref:`ref_flyteidl.core.Sql`", "", ""
+ "task_type_version", ":ref:`ref_int32`", "", "This can be used to customize task handling at execution time for the same task type."
+ "security_context", ":ref:`ref_flyteidl.core.SecurityContext`", "", "security_context encapsulates security attributes requested to run this task."
+ "config", ":ref:`ref_flyteidl.core.TaskTemplate.ConfigEntry`", "repeated", "Metadata about the custom defined for this task. This is extensible to allow various plugins in the system to use as required. reserve the field numbers 1 through 15 for very frequently occurring message elements"
+
+
+
+
+
+
+
+.. _ref_flyteidl.core.TaskTemplate.ConfigEntry:
+
+TaskTemplate.ConfigEntry
+------------------------------------------------------------------
+
+
+
+
+
+.. csv-table:: TaskTemplate.ConfigEntry type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "key", ":ref:`ref_string`", "", ""
+ "value", ":ref:`ref_string`", "", ""
+
+
+
+
+
+
+
+
+
+.. _ref_flyteidl.core.Container.Architecture:
+
+Container.Architecture
+------------------------------------------------------------------
+
+Architecture-type the container image supports.
+
+.. csv-table:: Enum Container.Architecture values
+ :header: "Name", "Number", "Description"
+ :widths: auto
+
+ "UNKNOWN", "0", ""
+ "AMD64", "1", ""
+ "ARM64", "2", ""
+ "ARM_V6", "3", ""
+ "ARM_V7", "4", ""
+
+
+
+.. _ref_flyteidl.core.DataLoadingConfig.LiteralMapFormat:
+
+DataLoadingConfig.LiteralMapFormat
+------------------------------------------------------------------
+
+LiteralMapFormat decides the encoding format in which the input metadata should be made available to the containers.
+If the user has access to the protocol buffer definitions, it is recommended to use the PROTO format.
+JSON and YAML do not need any protobuf definitions to read it
+All remote references in core.LiteralMap are replaced with local filesystem references (the data is downloaded to local filesystem)
+
+.. csv-table:: Enum DataLoadingConfig.LiteralMapFormat values
+ :header: "Name", "Number", "Description"
+ :widths: auto
+
+ "JSON", "0", "JSON / YAML for the metadata (which contains inlined primitive values). The representation is inline with the standard json specification as specified - https://www.json.org/json-en.html"
+ "YAML", "1", ""
+ "PROTO", "2", "Proto is a serialized binary of `core.LiteralMap` defined in flyteidl/core"
+
+
+
+.. _ref_flyteidl.core.IOStrategy.DownloadMode:
+
+IOStrategy.DownloadMode
+------------------------------------------------------------------
+
+Mode to use for downloading
+
+.. csv-table:: Enum IOStrategy.DownloadMode values
+ :header: "Name", "Number", "Description"
+ :widths: auto
+
+ "DOWNLOAD_EAGER", "0", "All data will be downloaded before the main container is executed"
+ "DOWNLOAD_STREAM", "1", "Data will be downloaded as a stream and an End-Of-Stream marker will be written to indicate all data has been downloaded. Refer to protocol for details"
+ "DO_NOT_DOWNLOAD", "2", "Large objects (offloaded) will not be downloaded"
+
+
+
+.. _ref_flyteidl.core.IOStrategy.UploadMode:
+
+IOStrategy.UploadMode
+------------------------------------------------------------------
+
+Mode to use for uploading
+
+.. csv-table:: Enum IOStrategy.UploadMode values
+ :header: "Name", "Number", "Description"
+ :widths: auto
+
+ "UPLOAD_ON_EXIT", "0", "All data will be uploaded after the main container exits"
+ "UPLOAD_EAGER", "1", "Data will be uploaded as it appears. Refer to protocol specification for details"
+ "DO_NOT_UPLOAD", "2", "Data will not be uploaded, only references will be written"
+
+
+
+.. _ref_flyteidl.core.Resources.ResourceName:
+
+Resources.ResourceName
+------------------------------------------------------------------
+
+Known resource names.
+
+.. csv-table:: Enum Resources.ResourceName values
+ :header: "Name", "Number", "Description"
+ :widths: auto
+
+ "UNKNOWN", "0", ""
+ "CPU", "1", ""
+ "GPU", "2", ""
+ "MEMORY", "3", ""
+ "STORAGE", "4", ""
+ "EPHEMERAL_STORAGE", "5", "For Kubernetes-based deployments, pods use ephemeral local storage for scratch space, caching, and for logs."
+
+
+
+.. _ref_flyteidl.core.RuntimeMetadata.RuntimeType:
+
+RuntimeMetadata.RuntimeType
+------------------------------------------------------------------
+
+
+
+.. csv-table:: Enum RuntimeMetadata.RuntimeType values
+ :header: "Name", "Number", "Description"
+ :widths: auto
+
+ "OTHER", "0", ""
+ "FLYTE_SDK", "1", ""
+
+
+
+.. _ref_flyteidl.core.Sql.Dialect:
+
+Sql.Dialect
+------------------------------------------------------------------
+
+The dialect of the SQL statement. This is used to validate and parse SQL statements at compilation time to avoid
+expensive runtime operations. If set to an unsupported dialect, no validation will be done on the statement.
+We support the following dialect: ansi, hive.
+
+.. csv-table:: Enum Sql.Dialect values
+ :header: "Name", "Number", "Description"
+ :widths: auto
+
+ "UNDEFINED", "0", ""
+ "ANSI", "1", ""
+ "HIVE", "2", ""
+ "OTHER", "3", ""
+
+
+
+
+
+
+
+
+
+
+.. _ref_flyteidl/core/types.proto:
+
+flyteidl/core/types.proto
+==================================================================
+
+
+
+
+
+.. _ref_flyteidl.core.BlobType:
+
+BlobType
+------------------------------------------------------------------
+
+Defines type behavior for blob objects
+
+
+
+.. csv-table:: BlobType type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "format", ":ref:`ref_string`", "", "Format can be a free form string understood by SDK/UI etc like csv, parquet etc"
+ "dimensionality", ":ref:`ref_flyteidl.core.BlobType.BlobDimensionality`", "", ""
+
+
+
+
+
+
+
+.. _ref_flyteidl.core.EnumType:
+
+EnumType
+------------------------------------------------------------------
+
+Enables declaring enum types, with predefined string values
+For len(values) > 0, the first value in the ordered list is regarded as the default value. If you wish
+To provide no defaults, make the first value as undefined.
+
+
+
+.. csv-table:: EnumType type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "values", ":ref:`ref_string`", "repeated", "Predefined set of enum values."
+
+
+
+
+
+
+
+.. _ref_flyteidl.core.Error:
+
+Error
+------------------------------------------------------------------
+
+Represents an error thrown from a node.
+
+
+
+.. csv-table:: Error type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "failed_node_id", ":ref:`ref_string`", "", "The node id that threw the error."
+ "message", ":ref:`ref_string`", "", "Error message thrown."
+
+
+
+
+
+
+
+.. _ref_flyteidl.core.LiteralType:
+
+LiteralType
+------------------------------------------------------------------
+
+Defines a strong type to allow type checking between interfaces.
+
+
+
+.. csv-table:: LiteralType type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "simple", ":ref:`ref_flyteidl.core.SimpleType`", "", "A simple type that can be compared one-to-one with another."
+ "schema", ":ref:`ref_flyteidl.core.SchemaType`", "", "A complex type that requires matching of inner fields."
+ "collection_type", ":ref:`ref_flyteidl.core.LiteralType`", "", "Defines the type of the value of a collection. Only homogeneous collections are allowed."
+ "map_value_type", ":ref:`ref_flyteidl.core.LiteralType`", "", "Defines the type of the value of a map type. The type of the key is always a string."
+ "blob", ":ref:`ref_flyteidl.core.BlobType`", "", "A blob might have specialized implementation details depending on associated metadata."
+ "enum_type", ":ref:`ref_flyteidl.core.EnumType`", "", "Defines an enum with pre-defined string values."
+ "structured_dataset_type", ":ref:`ref_flyteidl.core.StructuredDatasetType`", "", "Generalized schema support"
+ "union_type", ":ref:`ref_flyteidl.core.UnionType`", "", "Defines an union type with pre-defined LiteralTypes."
+ "metadata", ":ref:`ref_google.protobuf.Struct`", "", "This field contains type metadata that is descriptive of the type, but is NOT considered in type-checking. This might be used by consumers to identify special behavior or display extended information for the type."
+ "annotation", ":ref:`ref_flyteidl.core.TypeAnnotation`", "", "This field contains arbitrary data that might have special semantic meaning for the client but does not effect internal flyte behavior."
+ "structure", ":ref:`ref_flyteidl.core.TypeStructure`", "", "Hints to improve type matching."
+
+
+
+
+
+
+
+.. _ref_flyteidl.core.OutputReference:
+
+OutputReference
+------------------------------------------------------------------
+
+A reference to an output produced by a node. The type can be retrieved -and validated- from
+the underlying interface of the node.
+
+
+
+.. csv-table:: OutputReference type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "node_id", ":ref:`ref_string`", "", "Node id must exist at the graph layer."
+ "var", ":ref:`ref_string`", "", "Variable name must refer to an output variable for the node."
+
+
+
+
+
+
+
+.. _ref_flyteidl.core.SchemaType:
+
+SchemaType
+------------------------------------------------------------------
+
+Defines schema columns and types to strongly type-validate schemas interoperability.
+
+
+
+.. csv-table:: SchemaType type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "columns", ":ref:`ref_flyteidl.core.SchemaType.SchemaColumn`", "repeated", "A list of ordered columns this schema comprises of."
+
+
+
+
+
+
+
+.. _ref_flyteidl.core.SchemaType.SchemaColumn:
+
+SchemaType.SchemaColumn
+------------------------------------------------------------------
+
+
+
+
+
+.. csv-table:: SchemaType.SchemaColumn type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "name", ":ref:`ref_string`", "", "A unique name -within the schema type- for the column"
+ "type", ":ref:`ref_flyteidl.core.SchemaType.SchemaColumn.SchemaColumnType`", "", "The column type. This allows a limited set of types currently."
+
+
+
+
+
+
+
+.. _ref_flyteidl.core.StructuredDatasetType:
+
+StructuredDatasetType
+------------------------------------------------------------------
+
+
+
+
+
+.. csv-table:: StructuredDatasetType type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "columns", ":ref:`ref_flyteidl.core.StructuredDatasetType.DatasetColumn`", "repeated", "A list of ordered columns this schema comprises of."
+ "format", ":ref:`ref_string`", "", "This is the storage format, the format of the bits at rest parquet, feather, csv, etc. For two types to be compatible, the format will need to be an exact match."
+ "external_schema_type", ":ref:`ref_string`", "", "This is a string representing the type that the bytes in external_schema_bytes are formatted in. This is an optional field that will not be used for type checking."
+ "external_schema_bytes", ":ref:`ref_bytes`", "", "The serialized bytes of a third-party schema library like Arrow. This is an optional field that will not be used for type checking."
+
+
+
+
+
+
+
+.. _ref_flyteidl.core.StructuredDatasetType.DatasetColumn:
+
+StructuredDatasetType.DatasetColumn
+------------------------------------------------------------------
+
+
+
+
+
+.. csv-table:: StructuredDatasetType.DatasetColumn type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "name", ":ref:`ref_string`", "", "A unique name within the schema type for the column."
+ "literal_type", ":ref:`ref_flyteidl.core.LiteralType`", "", "The column type."
+
+
+
+
+
+
+
+.. _ref_flyteidl.core.TypeAnnotation:
+
+TypeAnnotation
+------------------------------------------------------------------
+
+TypeAnnotation encapsulates registration time information about a type. This can be used for various control-plane operations. TypeAnnotation will not be available at runtime when a task runs.
+
+
+
+.. csv-table:: TypeAnnotation type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "annotations", ":ref:`ref_google.protobuf.Struct`", "", "A arbitrary JSON payload to describe a type."
+
+
+
+
+
+
+
+.. _ref_flyteidl.core.TypeStructure:
+
+TypeStructure
+------------------------------------------------------------------
+
+Hints to improve type matching
+e.g. allows distinguishing output from custom type transformers
+even if the underlying IDL serialization matches.
+
+
+
+.. csv-table:: TypeStructure type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "tag", ":ref:`ref_string`", "", "Must exactly match for types to be castable"
+
+
+
+
+
+
+
+.. _ref_flyteidl.core.UnionType:
+
+UnionType
+------------------------------------------------------------------
+
+Defines a tagged union type, also known as a variant (and formally as the sum type).
+
+A sum type S is defined by a sequence of types (A, B, C, ...), each tagged by a string tag
+A value of type S is constructed from a value of any of the variant types. The specific choice of type is recorded by
+storing the varaint's tag with the literal value and can be examined in runtime.
+
+Type S is typically written as
+S := Apple A | Banana B | Cantaloupe C | ...
+
+Notably, a nullable (optional) type is a sum type between some type X and the singleton type representing a null-value:
+Optional X := X | Null
+
+See also: https://en.wikipedia.org/wiki/Tagged_union
+
+
+
+.. csv-table:: UnionType type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "variants", ":ref:`ref_flyteidl.core.LiteralType`", "repeated", "Predefined set of variants in union."
+
+
+
+
+
+
+
+
+
+.. _ref_flyteidl.core.BlobType.BlobDimensionality:
+
+BlobType.BlobDimensionality
+------------------------------------------------------------------
+
+
+
+.. csv-table:: Enum BlobType.BlobDimensionality values
+ :header: "Name", "Number", "Description"
+ :widths: auto
+
+ "SINGLE", "0", ""
+ "MULTIPART", "1", ""
+
+
+
+.. _ref_flyteidl.core.SchemaType.SchemaColumn.SchemaColumnType:
+
+SchemaType.SchemaColumn.SchemaColumnType
+------------------------------------------------------------------
+
+
+
+.. csv-table:: Enum SchemaType.SchemaColumn.SchemaColumnType values
+ :header: "Name", "Number", "Description"
+ :widths: auto
+
+ "INTEGER", "0", ""
+ "FLOAT", "1", ""
+ "STRING", "2", ""
+ "BOOLEAN", "3", ""
+ "DATETIME", "4", ""
+ "DURATION", "5", ""
+
+
+
+.. _ref_flyteidl.core.SimpleType:
+
+SimpleType
+------------------------------------------------------------------
+
+Define a set of simple types.
+
+.. csv-table:: Enum SimpleType values
+ :header: "Name", "Number", "Description"
+ :widths: auto
+
+ "NONE", "0", ""
+ "INTEGER", "1", ""
+ "FLOAT", "2", ""
+ "STRING", "3", ""
+ "BOOLEAN", "4", ""
+ "DATETIME", "5", ""
+ "DURATION", "6", ""
+ "BINARY", "7", ""
+ "ERROR", "8", ""
+ "STRUCT", "9", ""
+
+
+
+
+
+
+
+
+
+
+.. _ref_flyteidl/core/workflow.proto:
+
+flyteidl/core/workflow.proto
+==================================================================
+
+
+
+
+
+.. _ref_flyteidl.core.Alias:
+
+Alias
+------------------------------------------------------------------
+
+Links a variable to an alias.
+
+
+
+.. csv-table:: Alias type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "var", ":ref:`ref_string`", "", "Must match one of the output variable names on a node."
+ "alias", ":ref:`ref_string`", "", "A workflow-level unique alias that downstream nodes can refer to in their input."
+
+
+
+
+
+
+
+.. _ref_flyteidl.core.BranchNode:
+
+BranchNode
+------------------------------------------------------------------
+
+BranchNode is a special node that alter the flow of the workflow graph. It allows the control flow to branch at
+runtime based on a series of conditions that get evaluated on various parameters (e.g. inputs, primitives).
+
+
+
+.. csv-table:: BranchNode type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "if_else", ":ref:`ref_flyteidl.core.IfElseBlock`", "", "+required"
+
+
+
+
+
+
+
+.. _ref_flyteidl.core.IfBlock:
+
+IfBlock
+------------------------------------------------------------------
+
+Defines a condition and the execution unit that should be executed if the condition is satisfied.
+
+
+
+.. csv-table:: IfBlock type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "condition", ":ref:`ref_flyteidl.core.BooleanExpression`", "", ""
+ "then_node", ":ref:`ref_flyteidl.core.Node`", "", ""
+
+
+
+
+
+
+
+.. _ref_flyteidl.core.IfElseBlock:
+
+IfElseBlock
+------------------------------------------------------------------
+
+Defines a series of if/else blocks. The first branch whose condition evaluates to true is the one to execute.
+If no conditions were satisfied, the else_node or the error will execute.
+
+
+
+.. csv-table:: IfElseBlock type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "case", ":ref:`ref_flyteidl.core.IfBlock`", "", "+required. First condition to evaluate."
+ "other", ":ref:`ref_flyteidl.core.IfBlock`", "repeated", "+optional. Additional branches to evaluate."
+ "else_node", ":ref:`ref_flyteidl.core.Node`", "", "The node to execute in case none of the branches were taken."
+ "error", ":ref:`ref_flyteidl.core.Error`", "", "An error to throw in case none of the branches were taken."
+
+
+
+
+
+
+
+.. _ref_flyteidl.core.Node:
+
+Node
+------------------------------------------------------------------
+
+A Workflow graph Node. One unit of execution in the graph. Each node can be linked to a Task, a Workflow or a branch
+node.
+
+
+
+.. csv-table:: Node type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "id", ":ref:`ref_string`", "", "A workflow-level unique identifier that identifies this node in the workflow. "inputs" and "outputs" are reserved node ids that cannot be used by other nodes."
+ "metadata", ":ref:`ref_flyteidl.core.NodeMetadata`", "", "Extra metadata about the node."
+ "inputs", ":ref:`ref_flyteidl.core.Binding`", "repeated", "Specifies how to bind the underlying interface's inputs. All required inputs specified in the underlying interface must be fulfilled."
+ "upstream_node_ids", ":ref:`ref_string`", "repeated", "+optional Specifies execution dependency for this node ensuring it will only get scheduled to run after all its upstream nodes have completed. This node will have an implicit dependency on any node that appears in inputs field."
+ "output_aliases", ":ref:`ref_flyteidl.core.Alias`", "repeated", "+optional. A node can define aliases for a subset of its outputs. This is particularly useful if different nodes need to conform to the same interface (e.g. all branches in a branch node). Downstream nodes must refer to this nodes outputs using the alias if one's specified."
+ "task_node", ":ref:`ref_flyteidl.core.TaskNode`", "", "Information about the Task to execute in this node."
+ "workflow_node", ":ref:`ref_flyteidl.core.WorkflowNode`", "", "Information about the Workflow to execute in this mode."
+ "branch_node", ":ref:`ref_flyteidl.core.BranchNode`", "", "Information about the branch node to evaluate in this node."
+
+
+
+
+
+
+
+.. _ref_flyteidl.core.NodeMetadata:
+
+NodeMetadata
+------------------------------------------------------------------
+
+Defines extra information about the Node.
+
+
+
+.. csv-table:: NodeMetadata type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "name", ":ref:`ref_string`", "", "A friendly name for the Node"
+ "timeout", ":ref:`ref_google.protobuf.Duration`", "", "The overall timeout of a task."
+ "retries", ":ref:`ref_flyteidl.core.RetryStrategy`", "", "Number of retries per task."
+ "interruptible", ":ref:`ref_bool`", "", ""
+
+
+
+
+
+
+
+.. _ref_flyteidl.core.TaskNode:
+
+TaskNode
+------------------------------------------------------------------
+
+Refers to the task that the Node is to execute.
+
+
+
+.. csv-table:: TaskNode type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "reference_id", ":ref:`ref_flyteidl.core.Identifier`", "", "A globally unique identifier for the task."
+ "overrides", ":ref:`ref_flyteidl.core.TaskNodeOverrides`", "", "Optional overrides applied at task execution time."
+
+
+
+
+
+
+
+.. _ref_flyteidl.core.TaskNodeOverrides:
+
+TaskNodeOverrides
+------------------------------------------------------------------
+
+Optional task node overrides that will be applied at task execution time.
+
+
+
+.. csv-table:: TaskNodeOverrides type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "resources", ":ref:`ref_flyteidl.core.Resources`", "", "A customizable interface to convey resources requested for a task container."
+
+
+
+
+
+
+
+.. _ref_flyteidl.core.WorkflowMetadata:
+
+WorkflowMetadata
+------------------------------------------------------------------
+
+This is workflow layer metadata. These settings are only applicable to the workflow as a whole, and do not
+percolate down to child entities (like tasks) launched by the workflow.
+
+
+
+.. csv-table:: WorkflowMetadata type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "quality_of_service", ":ref:`ref_flyteidl.core.QualityOfService`", "", "Indicates the runtime priority of workflow executions."
+ "on_failure", ":ref:`ref_flyteidl.core.WorkflowMetadata.OnFailurePolicy`", "", "Defines how the system should behave when a failure is detected in the workflow execution."
+
+
+
+
+
+
+
+.. _ref_flyteidl.core.WorkflowMetadataDefaults:
+
+WorkflowMetadataDefaults
+------------------------------------------------------------------
+
+The difference between these settings and the WorkflowMetadata ones is that these are meant to be passed down to
+a workflow's underlying entities (like tasks). For instance, 'interruptible' has no meaning at the workflow layer, it
+is only relevant when a task executes. The settings here are the defaults that are passed to all nodes
+unless explicitly overridden at the node layer.
+If you are adding a setting that applies to both the Workflow itself, and everything underneath it, it should be
+added to both this object and the WorkflowMetadata object above.
+
+
+
+.. csv-table:: WorkflowMetadataDefaults type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "interruptible", ":ref:`ref_bool`", "", "Whether child nodes of the workflow are interruptible."
+
+
+
+
+
+
+
+.. _ref_flyteidl.core.WorkflowNode:
+
+WorkflowNode
+------------------------------------------------------------------
+
+Refers to a the workflow the node is to execute.
+
+
+
+.. csv-table:: WorkflowNode type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "launchplan_ref", ":ref:`ref_flyteidl.core.Identifier`", "", "A globally unique identifier for the launch plan."
+ "sub_workflow_ref", ":ref:`ref_flyteidl.core.Identifier`", "", "Reference to a subworkflow, that should be defined with the compiler context"
+
+
+
+
+
+
+
+.. _ref_flyteidl.core.WorkflowTemplate:
+
+WorkflowTemplate
+------------------------------------------------------------------
+
+Flyte Workflow Structure that encapsulates task, branch and subworkflow nodes to form a statically analyzable,
+directed acyclic graph.
+
+
+
+.. csv-table:: WorkflowTemplate type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "id", ":ref:`ref_flyteidl.core.Identifier`", "", "A globally unique identifier for the workflow."
+ "metadata", ":ref:`ref_flyteidl.core.WorkflowMetadata`", "", "Extra metadata about the workflow."
+ "interface", ":ref:`ref_flyteidl.core.TypedInterface`", "", "Defines a strongly typed interface for the Workflow. This can include some optional parameters."
+ "nodes", ":ref:`ref_flyteidl.core.Node`", "repeated", "A list of nodes. In addition, "globals" is a special reserved node id that can be used to consume workflow inputs."
+ "outputs", ":ref:`ref_flyteidl.core.Binding`", "repeated", "A list of output bindings that specify how to construct workflow outputs. Bindings can pull node outputs or specify literals. All workflow outputs specified in the interface field must be bound in order for the workflow to be validated. A workflow has an implicit dependency on all of its nodes to execute successfully in order to bind final outputs. Most of these outputs will be Binding's with a BindingData of type OutputReference. That is, your workflow can just have an output of some constant (`Output(5)`), but usually, the workflow will be pulling outputs from the output of a task."
+ "failure_node", ":ref:`ref_flyteidl.core.Node`", "", "+optional A catch-all node. This node is executed whenever the execution engine determines the workflow has failed. The interface of this node must match the Workflow interface with an additional input named "error" of type pb.lyft.flyte.core.Error."
+ "metadata_defaults", ":ref:`ref_flyteidl.core.WorkflowMetadataDefaults`", "", "workflow defaults"
+
+
+
+
+
+
+
+
+
+.. _ref_flyteidl.core.WorkflowMetadata.OnFailurePolicy:
+
+WorkflowMetadata.OnFailurePolicy
+------------------------------------------------------------------
+
+Failure Handling Strategy
+
+.. csv-table:: Enum WorkflowMetadata.OnFailurePolicy values
+ :header: "Name", "Number", "Description"
+ :widths: auto
+
+ "FAIL_IMMEDIATELY", "0", "FAIL_IMMEDIATELY instructs the system to fail as soon as a node fails in the workflow. It'll automatically abort all currently running nodes and clean up resources before finally marking the workflow executions as failed."
+ "FAIL_AFTER_EXECUTABLE_NODES_COMPLETE", "1", "FAIL_AFTER_EXECUTABLE_NODES_COMPLETE instructs the system to make as much progress as it can. The system will not alter the dependencies of the execution graph so any node that depend on the failed node will not be run. Other nodes that will be executed to completion before cleaning up resources and marking the workflow execution as failed."
+
+
+
+
+
+
+
+
+
+
+.. _ref_flyteidl/core/workflow_closure.proto:
+
+flyteidl/core/workflow_closure.proto
+==================================================================
+
+
+
+
+
+.. _ref_flyteidl.core.WorkflowClosure:
+
+WorkflowClosure
+------------------------------------------------------------------
+
+Defines an enclosed package of workflow and tasks it references.
+
+
+
+.. csv-table:: WorkflowClosure type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "workflow", ":ref:`ref_flyteidl.core.WorkflowTemplate`", "", "required. Workflow template."
+ "tasks", ":ref:`ref_flyteidl.core.TaskTemplate`", "repeated", "optional. A collection of tasks referenced by the workflow. Only needed if the workflow references tasks."
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+.. _ref_google/protobuf/timestamp.proto:
+
+google/protobuf/timestamp.proto
+==================================================================
+
+
+
+
+
+.. _ref_google.protobuf.Timestamp:
+
+Timestamp
+------------------------------------------------------------------
+
+A Timestamp represents a point in time independent of any time zone or local
+calendar, encoded as a count of seconds and fractions of seconds at
+nanosecond resolution. The count is relative to an epoch at UTC midnight on
+January 1, 1970, in the proleptic Gregorian calendar which extends the
+Gregorian calendar backwards to year one.
+
+All minutes are 60 seconds long. Leap seconds are "smeared" so that no leap
+second table is needed for interpretation, using a [24-hour linear
+smear](https://developers.google.com/time/smear).
+
+The range is from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59.999999999Z. By
+restricting to that range, we ensure that we can convert to and from [RFC
+3339](https://www.ietf.org/rfc/rfc3339.txt) date strings.
+
+# Examples
+
+Example 1: Compute Timestamp from POSIX `time()`.
+
+ Timestamp timestamp;
+ timestamp.set_seconds(time(NULL));
+ timestamp.set_nanos(0);
+
+Example 2: Compute Timestamp from POSIX `gettimeofday()`.
+
+ struct timeval tv;
+ gettimeofday(&tv, NULL);
+
+ Timestamp timestamp;
+ timestamp.set_seconds(tv.tv_sec);
+ timestamp.set_nanos(tv.tv_usec * 1000);
+
+Example 3: Compute Timestamp from Win32 `GetSystemTimeAsFileTime()`.
+
+ FILETIME ft;
+ GetSystemTimeAsFileTime(&ft);
+ UINT64 ticks = (((UINT64)ft.dwHighDateTime) << 32) | ft.dwLowDateTime;
+
+ // A Windows tick is 100 nanoseconds. Windows epoch 1601-01-01T00:00:00Z
+ // is 11644473600 seconds before Unix epoch 1970-01-01T00:00:00Z.
+ Timestamp timestamp;
+ timestamp.set_seconds((INT64) ((ticks / 10000000) - 11644473600LL));
+ timestamp.set_nanos((INT32) ((ticks % 10000000) * 100));
+
+Example 4: Compute Timestamp from Java `System.currentTimeMillis()`.
+
+ long millis = System.currentTimeMillis();
+
+ Timestamp timestamp = Timestamp.newBuilder().setSeconds(millis / 1000)
+ .setNanos((int) ((millis % 1000) * 1000000)).build();
+
+
+Example 5: Compute Timestamp from Java `Instant.now()`.
+
+ Instant now = Instant.now();
+
+ Timestamp timestamp =
+ Timestamp.newBuilder().setSeconds(now.getEpochSecond())
+ .setNanos(now.getNano()).build();
+
+
+Example 6: Compute Timestamp from current time in Python.
+
+ timestamp = Timestamp()
+ timestamp.GetCurrentTime()
+
+# JSON Mapping
+
+In JSON format, the Timestamp type is encoded as a string in the
+[RFC 3339](https://www.ietf.org/rfc/rfc3339.txt) format. That is, the
+format is "{year}-{month}-{day}T{hour}:{min}:{sec}[.{frac_sec}]Z"
+where {year} is always expressed using four digits while {month}, {day},
+{hour}, {min}, and {sec} are zero-padded to two digits each. The fractional
+seconds, which can go up to 9 digits (i.e. up to 1 nanosecond resolution),
+are optional. The "Z" suffix indicates the timezone ("UTC"); the timezone
+is required. A proto3 JSON serializer should always use UTC (as indicated by
+"Z") when printing the Timestamp type and a proto3 JSON parser should be
+able to accept both UTC and other timezones (as indicated by an offset).
+
+For example, "2017-01-15T01:30:15.01Z" encodes 15.01 seconds past
+01:30 UTC on January 15, 2017.
+
+In JavaScript, one can convert a Date object to this format using the
+standard
+[toISOString()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Date/toISOString)
+method. In Python, a standard `datetime.datetime` object can be converted
+to this format using
+[`strftime`](https://docs.python.org/2/library/time.html#time.strftime) with
+the time format spec '%Y-%m-%dT%H:%M:%S.%fZ'. Likewise, in Java, one can use
+the Joda Time's [`ISODateTimeFormat.dateTime()`](
+http://www.joda.org/joda-time/apidocs/org/joda/time/format/ISODateTimeFormat.html#dateTime%2D%2D
+) to obtain a formatter capable of generating timestamps in this format.
+
+
+
+.. csv-table:: Timestamp type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "seconds", ":ref:`ref_int64`", "", "Represents seconds of UTC time since Unix epoch 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59Z inclusive."
+ "nanos", ":ref:`ref_int32`", "", "Non-negative fractions of a second at nanosecond resolution. Negative second values with fractions must still have non-negative nanos values that count forward in time. Must be from 0 to 999,999,999 inclusive."
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+.. _ref_google/protobuf/duration.proto:
+
+google/protobuf/duration.proto
+==================================================================
+
+
+
+
+
+.. _ref_google.protobuf.Duration:
+
+Duration
+------------------------------------------------------------------
+
+A Duration represents a signed, fixed-length span of time represented
+as a count of seconds and fractions of seconds at nanosecond
+resolution. It is independent of any calendar and concepts like "day"
+or "month". It is related to Timestamp in that the difference between
+two Timestamp values is a Duration and it can be added or subtracted
+from a Timestamp. Range is approximately +-10,000 years.
+
+# Examples
+
+Example 1: Compute Duration from two Timestamps in pseudo code.
+
+ Timestamp start = ...;
+ Timestamp end = ...;
+ Duration duration = ...;
+
+ duration.seconds = end.seconds - start.seconds;
+ duration.nanos = end.nanos - start.nanos;
+
+ if (duration.seconds < 0 && duration.nanos > 0) {
+ duration.seconds += 1;
+ duration.nanos -= 1000000000;
+ } else if (duration.seconds > 0 && duration.nanos < 0) {
+ duration.seconds -= 1;
+ duration.nanos += 1000000000;
+ }
+
+Example 2: Compute Timestamp from Timestamp + Duration in pseudo code.
+
+ Timestamp start = ...;
+ Duration duration = ...;
+ Timestamp end = ...;
+
+ end.seconds = start.seconds + duration.seconds;
+ end.nanos = start.nanos + duration.nanos;
+
+ if (end.nanos < 0) {
+ end.seconds -= 1;
+ end.nanos += 1000000000;
+ } else if (end.nanos >= 1000000000) {
+ end.seconds += 1;
+ end.nanos -= 1000000000;
+ }
+
+Example 3: Compute Duration from datetime.timedelta in Python.
+
+ td = datetime.timedelta(days=3, minutes=10)
+ duration = Duration()
+ duration.FromTimedelta(td)
+
+# JSON Mapping
+
+In JSON format, the Duration type is encoded as a string rather than an
+object, where the string ends in the suffix "s" (indicating seconds) and
+is preceded by the number of seconds, with nanoseconds expressed as
+fractional seconds. For example, 3 seconds with 0 nanoseconds should be
+encoded in JSON format as "3s", while 3 seconds and 1 nanosecond should
+be expressed in JSON format as "3.000000001s", and 3 seconds and 1
+microsecond should be expressed in JSON format as "3.000001s".
+
+
+
+.. csv-table:: Duration type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "seconds", ":ref:`ref_int64`", "", "Signed seconds of the span of time. Must be from -315,576,000,000 to +315,576,000,000 inclusive. Note: these bounds are computed from: 60 sec/min * 60 min/hr * 24 hr/day * 365.25 days/year * 10000 years"
+ "nanos", ":ref:`ref_int32`", "", "Signed fractions of a second at nanosecond resolution of the span of time. Durations less than one second are represented with a 0 `seconds` field and a positive or negative `nanos` field. For durations of one second or more, a non-zero value for the `nanos` field must be of the same sign as the `seconds` field. Must be from -999,999,999 to +999,999,999 inclusive."
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+.. _ref_google/protobuf/struct.proto:
+
+google/protobuf/struct.proto
+==================================================================
+
+
+
+
+
+.. _ref_google.protobuf.ListValue:
+
+ListValue
+------------------------------------------------------------------
+
+`ListValue` is a wrapper around a repeated field of values.
+
+The JSON representation for `ListValue` is JSON array.
+
+
+
+.. csv-table:: ListValue type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "values", ":ref:`ref_google.protobuf.Value`", "repeated", "Repeated field of dynamically typed values."
+
+
+
+
+
+
+
+.. _ref_google.protobuf.Struct:
+
+Struct
+------------------------------------------------------------------
+
+`Struct` represents a structured data value, consisting of fields
+which map to dynamically typed values. In some languages, `Struct`
+might be supported by a native representation. For example, in
+scripting languages like JS a struct is represented as an
+object. The details of that representation are described together
+with the proto support for the language.
+
+The JSON representation for `Struct` is JSON object.
+
+
+
+.. csv-table:: Struct type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "fields", ":ref:`ref_google.protobuf.Struct.FieldsEntry`", "repeated", "Unordered map of dynamically typed values."
+
+
+
+
+
+
+
+.. _ref_google.protobuf.Struct.FieldsEntry:
+
+Struct.FieldsEntry
+------------------------------------------------------------------
+
+
+
+
+
+.. csv-table:: Struct.FieldsEntry type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "key", ":ref:`ref_string`", "", ""
+ "value", ":ref:`ref_google.protobuf.Value`", "", ""
+
+
+
+
+
+
+
+.. _ref_google.protobuf.Value:
+
+Value
+------------------------------------------------------------------
+
+`Value` represents a dynamically typed value which can be either
+null, a number, a string, a boolean, a recursive struct value, or a
+list of values. A producer of value is expected to set one of these
+variants. Absence of any variant indicates an error.
+
+The JSON representation for `Value` is JSON value.
+
+
+
+.. csv-table:: Value type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "null_value", ":ref:`ref_google.protobuf.NullValue`", "", "Represents a null value."
+ "number_value", ":ref:`ref_double`", "", "Represents a double value."
+ "string_value", ":ref:`ref_string`", "", "Represents a string value."
+ "bool_value", ":ref:`ref_bool`", "", "Represents a boolean value."
+ "struct_value", ":ref:`ref_google.protobuf.Struct`", "", "Represents a structured value."
+ "list_value", ":ref:`ref_google.protobuf.ListValue`", "", "Represents a repeated `Value`."
+
+
+
+
+
+
+
+
+
+.. _ref_google.protobuf.NullValue:
+
+NullValue
+------------------------------------------------------------------
+
+`NullValue` is a singleton enumeration to represent the null value for the
+`Value` type union.
+
+ The JSON representation for `NullValue` is JSON `null`.
+
+.. csv-table:: Enum NullValue values
+ :header: "Name", "Number", "Description"
+ :widths: auto
+
+ "NULL_VALUE", "0", "Null value."
+
+
+
+
+
+
+
+
+
+.. _ref_scala_types:
+
+Scalar Value Types
+==================
+
+
+
+.. _ref_double:
+
+double
+-----------------------------
+
+
+
+.. csv-table:: double language representation
+ :header: ".proto Type", "C++", "Java", "Python", "Go", "C#", "PHP", "Ruby"
+ :widths: auto
+
+ "double", "double", "double", "float", "float64", "double", "float", "Float"
+
+
+
+.. _ref_float:
+
+float
+-----------------------------
+
+
+
+.. csv-table:: float language representation
+ :header: ".proto Type", "C++", "Java", "Python", "Go", "C#", "PHP", "Ruby"
+ :widths: auto
+
+ "float", "float", "float", "float", "float32", "float", "float", "Float"
+
+
+
+.. _ref_int32:
+
+int32
+-----------------------------
+
+Uses variable-length encoding. Inefficient for encoding negative numbers – if your field is likely to have negative values, use sint32 instead.
+
+.. csv-table:: int32 language representation
+ :header: ".proto Type", "C++", "Java", "Python", "Go", "C#", "PHP", "Ruby"
+ :widths: auto
+
+ "int32", "int32", "int", "int", "int32", "int", "integer", "Bignum or Fixnum (as required)"
+
+
+
+.. _ref_int64:
+
+int64
+-----------------------------
+
+Uses variable-length encoding. Inefficient for encoding negative numbers – if your field is likely to have negative values, use sint64 instead.
+
+.. csv-table:: int64 language representation
+ :header: ".proto Type", "C++", "Java", "Python", "Go", "C#", "PHP", "Ruby"
+ :widths: auto
+
+ "int64", "int64", "long", "int/long", "int64", "long", "integer/string", "Bignum"
+
+
+
+.. _ref_uint32:
+
+uint32
+-----------------------------
+
+Uses variable-length encoding.
+
+.. csv-table:: uint32 language representation
+ :header: ".proto Type", "C++", "Java", "Python", "Go", "C#", "PHP", "Ruby"
+ :widths: auto
+
+ "uint32", "uint32", "int", "int/long", "uint32", "uint", "integer", "Bignum or Fixnum (as required)"
+
+
+
+.. _ref_uint64:
+
+uint64
+-----------------------------
+
+Uses variable-length encoding.
+
+.. csv-table:: uint64 language representation
+ :header: ".proto Type", "C++", "Java", "Python", "Go", "C#", "PHP", "Ruby"
+ :widths: auto
+
+ "uint64", "uint64", "long", "int/long", "uint64", "ulong", "integer/string", "Bignum or Fixnum (as required)"
+
+
+
+.. _ref_sint32:
+
+sint32
+-----------------------------
+
+Uses variable-length encoding. Signed int value. These more efficiently encode negative numbers than regular int32s.
+
+.. csv-table:: sint32 language representation
+ :header: ".proto Type", "C++", "Java", "Python", "Go", "C#", "PHP", "Ruby"
+ :widths: auto
+
+ "sint32", "int32", "int", "int", "int32", "int", "integer", "Bignum or Fixnum (as required)"
+
+
+
+.. _ref_sint64:
+
+sint64
+-----------------------------
+
+Uses variable-length encoding. Signed int value. These more efficiently encode negative numbers than regular int64s.
+
+.. csv-table:: sint64 language representation
+ :header: ".proto Type", "C++", "Java", "Python", "Go", "C#", "PHP", "Ruby"
+ :widths: auto
+
+ "sint64", "int64", "long", "int/long", "int64", "long", "integer/string", "Bignum"
+
+
+
+.. _ref_fixed32:
+
+fixed32
+-----------------------------
+
+Always four bytes. More efficient than uint32 if values are often greater than 2^28.
+
+.. csv-table:: fixed32 language representation
+ :header: ".proto Type", "C++", "Java", "Python", "Go", "C#", "PHP", "Ruby"
+ :widths: auto
+
+ "fixed32", "uint32", "int", "int", "uint32", "uint", "integer", "Bignum or Fixnum (as required)"
+
+
+
+.. _ref_fixed64:
+
+fixed64
+-----------------------------
+
+Always eight bytes. More efficient than uint64 if values are often greater than 2^56.
+
+.. csv-table:: fixed64 language representation
+ :header: ".proto Type", "C++", "Java", "Python", "Go", "C#", "PHP", "Ruby"
+ :widths: auto
+
+ "fixed64", "uint64", "long", "int/long", "uint64", "ulong", "integer/string", "Bignum"
+
+
+
+.. _ref_sfixed32:
+
+sfixed32
+-----------------------------
+
+Always four bytes.
+
+.. csv-table:: sfixed32 language representation
+ :header: ".proto Type", "C++", "Java", "Python", "Go", "C#", "PHP", "Ruby"
+ :widths: auto
+
+ "sfixed32", "int32", "int", "int", "int32", "int", "integer", "Bignum or Fixnum (as required)"
+
+
+
+.. _ref_sfixed64:
+
+sfixed64
+-----------------------------
+
+Always eight bytes.
+
+.. csv-table:: sfixed64 language representation
+ :header: ".proto Type", "C++", "Java", "Python", "Go", "C#", "PHP", "Ruby"
+ :widths: auto
+
+ "sfixed64", "int64", "long", "int/long", "int64", "long", "integer/string", "Bignum"
+
+
+
+.. _ref_bool:
+
+bool
+-----------------------------
+
+
+
+.. csv-table:: bool language representation
+ :header: ".proto Type", "C++", "Java", "Python", "Go", "C#", "PHP", "Ruby"
+ :widths: auto
+
+ "bool", "bool", "boolean", "boolean", "bool", "bool", "boolean", "TrueClass/FalseClass"
+
+
+
+.. _ref_string:
+
+string
+-----------------------------
+
+A string must always contain UTF-8 encoded or 7-bit ASCII text.
+
+.. csv-table:: string language representation
+ :header: ".proto Type", "C++", "Java", "Python", "Go", "C#", "PHP", "Ruby"
+ :widths: auto
+
+ "string", "string", "String", "str/unicode", "string", "string", "string", "String (UTF-8)"
+
+
+
+.. _ref_bytes:
+
+bytes
+-----------------------------
+
+May contain any arbitrary sequence of bytes.
+
+.. csv-table:: bytes language representation
+ :header: ".proto Type", "C++", "Java", "Python", "Go", "C#", "PHP", "Ruby"
+ :widths: auto
+
+ "bytes", "string", "ByteString", "str", "[]byte", "ByteString", "string", "String (ASCII-8BIT)"
+
+
\ No newline at end of file
diff --git a/flyteidl/protos/docs/datacatalog/datacatalog.rst b/flyteidl/protos/docs/datacatalog/datacatalog.rst
new file mode 100644
index 00000000000..6a2477cf19b
--- /dev/null
+++ b/flyteidl/protos/docs/datacatalog/datacatalog.rst
@@ -0,0 +1,1237 @@
+######################
+Protocol Documentation
+######################
+
+
+
+
+.. _ref_flyteidl/datacatalog/datacatalog.proto:
+
+flyteidl/datacatalog/datacatalog.proto
+==================================================================
+
+
+
+
+
+.. _ref_datacatalog.AddTagRequest:
+
+AddTagRequest
+------------------------------------------------------------------
+
+Request message for tagging an Artifact.
+
+
+
+.. csv-table:: AddTagRequest type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "tag", ":ref:`ref_datacatalog.Tag`", "", ""
+
+
+
+
+
+
+
+.. _ref_datacatalog.AddTagResponse:
+
+AddTagResponse
+------------------------------------------------------------------
+
+Response message for tagging an Artifact.
+
+
+
+
+
+
+
+
+.. _ref_datacatalog.Artifact:
+
+Artifact
+------------------------------------------------------------------
+
+Artifact message. It is composed of several string fields.
+
+
+
+.. csv-table:: Artifact type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "id", ":ref:`ref_string`", "", "The unique ID of the artifact"
+ "dataset", ":ref:`ref_datacatalog.DatasetID`", "", "The Dataset that the artifact belongs to"
+ "data", ":ref:`ref_datacatalog.ArtifactData`", "repeated", "A list of data that is associated with the artifact"
+ "metadata", ":ref:`ref_datacatalog.Metadata`", "", "Free-form metadata associated with the artifact"
+ "partitions", ":ref:`ref_datacatalog.Partition`", "repeated", ""
+ "tags", ":ref:`ref_datacatalog.Tag`", "repeated", ""
+ "created_at", ":ref:`ref_google.protobuf.Timestamp`", "", "creation timestamp of artifact, autogenerated by service"
+
+
+
+
+
+
+
+.. _ref_datacatalog.ArtifactData:
+
+ArtifactData
+------------------------------------------------------------------
+
+ArtifactData that belongs to an artifact
+
+
+
+.. csv-table:: ArtifactData type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "name", ":ref:`ref_string`", "", ""
+ "value", ":ref:`ref_flyteidl.core.Literal`", "", ""
+
+
+
+
+
+
+
+.. _ref_datacatalog.ArtifactPropertyFilter:
+
+ArtifactPropertyFilter
+------------------------------------------------------------------
+
+Artifact properties we can filter by
+
+
+
+.. csv-table:: ArtifactPropertyFilter type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "artifact_id", ":ref:`ref_string`", "", ""
+
+
+
+
+
+
+
+.. _ref_datacatalog.CreateArtifactRequest:
+
+CreateArtifactRequest
+------------------------------------------------------------------
+
+Request message for creating an Artifact and its associated artifact Data.
+
+
+
+.. csv-table:: CreateArtifactRequest type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "artifact", ":ref:`ref_datacatalog.Artifact`", "", ""
+
+
+
+
+
+
+
+.. _ref_datacatalog.CreateArtifactResponse:
+
+CreateArtifactResponse
+------------------------------------------------------------------
+
+Response message for creating an Artifact.
+
+
+
+
+
+
+
+
+.. _ref_datacatalog.CreateDatasetRequest:
+
+CreateDatasetRequest
+------------------------------------------------------------------
+
+Request message for creating a Dataset.
+
+
+
+.. csv-table:: CreateDatasetRequest type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "dataset", ":ref:`ref_datacatalog.Dataset`", "", ""
+
+
+
+
+
+
+
+.. _ref_datacatalog.CreateDatasetResponse:
+
+CreateDatasetResponse
+------------------------------------------------------------------
+
+Response message for creating a Dataset
+
+
+
+
+
+
+
+
+.. _ref_datacatalog.Dataset:
+
+Dataset
+------------------------------------------------------------------
+
+Dataset message. It is uniquely identified by DatasetID.
+
+
+
+.. csv-table:: Dataset type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "id", ":ref:`ref_datacatalog.DatasetID`", "", ""
+ "metadata", ":ref:`ref_datacatalog.Metadata`", "", ""
+ "partitionKeys", ":ref:`ref_string`", "repeated", ""
+
+
+
+
+
+
+
+.. _ref_datacatalog.DatasetID:
+
+DatasetID
+------------------------------------------------------------------
+
+DatasetID message that is composed of several string fields.
+
+
+
+.. csv-table:: DatasetID type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "project", ":ref:`ref_string`", "", "The name of the project"
+ "name", ":ref:`ref_string`", "", "The name of the dataset"
+ "domain", ":ref:`ref_string`", "", "The domain (eg. environment)"
+ "version", ":ref:`ref_string`", "", "Version of the data schema"
+ "UUID", ":ref:`ref_string`", "", "UUID for the dataset (if set the above fields are optional)"
+
+
+
+
+
+
+
+.. _ref_datacatalog.DatasetPropertyFilter:
+
+DatasetPropertyFilter
+------------------------------------------------------------------
+
+Dataset properties we can filter by
+
+
+
+.. csv-table:: DatasetPropertyFilter type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "project", ":ref:`ref_string`", "", ""
+ "name", ":ref:`ref_string`", "", ""
+ "domain", ":ref:`ref_string`", "", ""
+ "version", ":ref:`ref_string`", "", ""
+
+
+
+
+
+
+
+.. _ref_datacatalog.FilterExpression:
+
+FilterExpression
+------------------------------------------------------------------
+
+Filter expression that is composed of a combination of single filters
+
+
+
+.. csv-table:: FilterExpression type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "filters", ":ref:`ref_datacatalog.SinglePropertyFilter`", "repeated", ""
+
+
+
+
+
+
+
+.. _ref_datacatalog.GetArtifactRequest:
+
+GetArtifactRequest
+------------------------------------------------------------------
+
+Request message for retrieving an Artifact. Retrieve an artifact based on a query handle that
+can be one of artifact_id or tag. The result returned will include the artifact data and metadata
+associated with the artifact.
+
+
+
+.. csv-table:: GetArtifactRequest type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "dataset", ":ref:`ref_datacatalog.DatasetID`", "", ""
+ "artifact_id", ":ref:`ref_string`", "", ""
+ "tag_name", ":ref:`ref_string`", "", ""
+
+
+
+
+
+
+
+.. _ref_datacatalog.GetArtifactResponse:
+
+GetArtifactResponse
+------------------------------------------------------------------
+
+Response message for retrieving an Artifact. The result returned will include the artifact data
+and metadata associated with the artifact.
+
+
+
+.. csv-table:: GetArtifactResponse type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "artifact", ":ref:`ref_datacatalog.Artifact`", "", ""
+
+
+
+
+
+
+
+.. _ref_datacatalog.GetDatasetRequest:
+
+GetDatasetRequest
+------------------------------------------------------------------
+
+Request message for retrieving a Dataset. The Dataset is retrieved by it's unique identifier
+which is a combination of several fields.
+
+
+
+.. csv-table:: GetDatasetRequest type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "dataset", ":ref:`ref_datacatalog.DatasetID`", "", ""
+
+
+
+
+
+
+
+.. _ref_datacatalog.GetDatasetResponse:
+
+GetDatasetResponse
+------------------------------------------------------------------
+
+Response message for retrieving a Dataset. The response will include the metadata for the
+Dataset.
+
+
+
+.. csv-table:: GetDatasetResponse type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "dataset", ":ref:`ref_datacatalog.Dataset`", "", ""
+
+
+
+
+
+
+
+.. _ref_datacatalog.GetOrExtendReservationRequest:
+
+GetOrExtendReservationRequest
+------------------------------------------------------------------
+
+Try to acquire or extend an artifact reservation. If an active reservation exists, retreive that instance.
+
+
+
+.. csv-table:: GetOrExtendReservationRequest type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "reservation_id", ":ref:`ref_datacatalog.ReservationID`", "", ""
+ "owner_id", ":ref:`ref_string`", "", ""
+ "heartbeat_interval", ":ref:`ref_google.protobuf.Duration`", "", "Requested reservation extension heartbeat interval"
+
+
+
+
+
+
+
+.. _ref_datacatalog.GetOrExtendReservationResponse:
+
+GetOrExtendReservationResponse
+------------------------------------------------------------------
+
+Response including either a newly minted reservation or the existing reservation
+
+
+
+.. csv-table:: GetOrExtendReservationResponse type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "reservation", ":ref:`ref_datacatalog.Reservation`", "", ""
+
+
+
+
+
+
+
+.. _ref_datacatalog.KeyValuePair:
+
+KeyValuePair
+------------------------------------------------------------------
+
+
+
+
+
+.. csv-table:: KeyValuePair type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "key", ":ref:`ref_string`", "", ""
+ "value", ":ref:`ref_string`", "", ""
+
+
+
+
+
+
+
+.. _ref_datacatalog.ListArtifactsRequest:
+
+ListArtifactsRequest
+------------------------------------------------------------------
+
+List the artifacts that belong to the Dataset, optionally filtered using filtered expression.
+
+
+
+.. csv-table:: ListArtifactsRequest type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "dataset", ":ref:`ref_datacatalog.DatasetID`", "", "Use a datasetID for which you want to retrieve the artifacts"
+ "filter", ":ref:`ref_datacatalog.FilterExpression`", "", "Apply the filter expression to this query"
+ "pagination", ":ref:`ref_datacatalog.PaginationOptions`", "", "Pagination options to get a page of artifacts"
+
+
+
+
+
+
+
+.. _ref_datacatalog.ListArtifactsResponse:
+
+ListArtifactsResponse
+------------------------------------------------------------------
+
+Response to list artifacts
+
+
+
+.. csv-table:: ListArtifactsResponse type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "artifacts", ":ref:`ref_datacatalog.Artifact`", "repeated", "The list of artifacts"
+ "next_token", ":ref:`ref_string`", "", "Token to use to request the next page, pass this into the next requests PaginationOptions"
+
+
+
+
+
+
+
+.. _ref_datacatalog.ListDatasetsRequest:
+
+ListDatasetsRequest
+------------------------------------------------------------------
+
+List the datasets for the given query
+
+
+
+.. csv-table:: ListDatasetsRequest type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "filter", ":ref:`ref_datacatalog.FilterExpression`", "", "Apply the filter expression to this query"
+ "pagination", ":ref:`ref_datacatalog.PaginationOptions`", "", "Pagination options to get a page of datasets"
+
+
+
+
+
+
+
+.. _ref_datacatalog.ListDatasetsResponse:
+
+ListDatasetsResponse
+------------------------------------------------------------------
+
+List the datasets response with token for next pagination
+
+
+
+.. csv-table:: ListDatasetsResponse type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "datasets", ":ref:`ref_datacatalog.Dataset`", "repeated", "The list of datasets"
+ "next_token", ":ref:`ref_string`", "", "Token to use to request the next page, pass this into the next requests PaginationOptions"
+
+
+
+
+
+
+
+.. _ref_datacatalog.Metadata:
+
+Metadata
+------------------------------------------------------------------
+
+Metadata representation for artifacts and datasets
+
+
+
+.. csv-table:: Metadata type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "key_map", ":ref:`ref_datacatalog.Metadata.KeyMapEntry`", "repeated", "key map is a dictionary of key/val strings that represent metadata"
+
+
+
+
+
+
+
+.. _ref_datacatalog.Metadata.KeyMapEntry:
+
+Metadata.KeyMapEntry
+------------------------------------------------------------------
+
+
+
+
+
+.. csv-table:: Metadata.KeyMapEntry type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "key", ":ref:`ref_string`", "", ""
+ "value", ":ref:`ref_string`", "", ""
+
+
+
+
+
+
+
+.. _ref_datacatalog.PaginationOptions:
+
+PaginationOptions
+------------------------------------------------------------------
+
+Pagination options for making list requests
+
+
+
+.. csv-table:: PaginationOptions type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "limit", ":ref:`ref_uint32`", "", "the max number of results to return"
+ "token", ":ref:`ref_string`", "", "the token to pass to fetch the next page"
+ "sortKey", ":ref:`ref_datacatalog.PaginationOptions.SortKey`", "", "the property that we want to sort the results by"
+ "sortOrder", ":ref:`ref_datacatalog.PaginationOptions.SortOrder`", "", "the sort order of the results"
+
+
+
+
+
+
+
+.. _ref_datacatalog.Partition:
+
+Partition
+------------------------------------------------------------------
+
+An artifact could have multiple partitions and each partition can have an arbitrary string key/value pair
+
+
+
+.. csv-table:: Partition type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "key", ":ref:`ref_string`", "", ""
+ "value", ":ref:`ref_string`", "", ""
+
+
+
+
+
+
+
+.. _ref_datacatalog.PartitionPropertyFilter:
+
+PartitionPropertyFilter
+------------------------------------------------------------------
+
+Partition properties we can filter by
+
+
+
+.. csv-table:: PartitionPropertyFilter type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "key_val", ":ref:`ref_datacatalog.KeyValuePair`", "", ""
+
+
+
+
+
+
+
+.. _ref_datacatalog.ReleaseReservationRequest:
+
+ReleaseReservationRequest
+------------------------------------------------------------------
+
+Request to release reservation
+
+
+
+.. csv-table:: ReleaseReservationRequest type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "reservation_id", ":ref:`ref_datacatalog.ReservationID`", "", ""
+ "owner_id", ":ref:`ref_string`", "", ""
+
+
+
+
+
+
+
+.. _ref_datacatalog.ReleaseReservationResponse:
+
+ReleaseReservationResponse
+------------------------------------------------------------------
+
+Response to release reservation
+
+
+
+
+
+
+
+
+.. _ref_datacatalog.Reservation:
+
+Reservation
+------------------------------------------------------------------
+
+A reservation including owner, heartbeat interval, expiration timestamp, and various metadata.
+
+
+
+.. csv-table:: Reservation type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "reservation_id", ":ref:`ref_datacatalog.ReservationID`", "", ""
+ "owner_id", ":ref:`ref_string`", "", ""
+ "heartbeat_interval", ":ref:`ref_google.protobuf.Duration`", "", "Recommended heartbeat interval to extend reservation"
+ "expires_at", ":ref:`ref_google.protobuf.Timestamp`", "", "Expiration timestamp of this reservation"
+ "metadata", ":ref:`ref_datacatalog.Metadata`", "", ""
+
+
+
+
+
+
+
+.. _ref_datacatalog.ReservationID:
+
+ReservationID
+------------------------------------------------------------------
+
+ReservationID message that is composed of several string fields.
+
+
+
+.. csv-table:: ReservationID type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "dataset_id", ":ref:`ref_datacatalog.DatasetID`", "", ""
+ "tag_name", ":ref:`ref_string`", "", ""
+
+
+
+
+
+
+
+.. _ref_datacatalog.SinglePropertyFilter:
+
+SinglePropertyFilter
+------------------------------------------------------------------
+
+A single property to filter on.
+
+
+
+.. csv-table:: SinglePropertyFilter type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "tag_filter", ":ref:`ref_datacatalog.TagPropertyFilter`", "", ""
+ "partition_filter", ":ref:`ref_datacatalog.PartitionPropertyFilter`", "", ""
+ "artifact_filter", ":ref:`ref_datacatalog.ArtifactPropertyFilter`", "", ""
+ "dataset_filter", ":ref:`ref_datacatalog.DatasetPropertyFilter`", "", ""
+ "operator", ":ref:`ref_datacatalog.SinglePropertyFilter.ComparisonOperator`", "", "field 10 in case we add more entities to query"
+
+
+
+
+
+
+
+.. _ref_datacatalog.Tag:
+
+Tag
+------------------------------------------------------------------
+
+Tag message that is unique to a Dataset. It is associated to a single artifact and
+can be retrieved by name later.
+
+
+
+.. csv-table:: Tag type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "name", ":ref:`ref_string`", "", "Name of tag"
+ "artifact_id", ":ref:`ref_string`", "", "The tagged artifact"
+ "dataset", ":ref:`ref_datacatalog.DatasetID`", "", "The Dataset that this tag belongs to"
+
+
+
+
+
+
+
+.. _ref_datacatalog.TagPropertyFilter:
+
+TagPropertyFilter
+------------------------------------------------------------------
+
+Tag properties we can filter by
+
+
+
+.. csv-table:: TagPropertyFilter type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "tag_name", ":ref:`ref_string`", "", ""
+
+
+
+
+
+
+
+
+
+.. _ref_datacatalog.PaginationOptions.SortKey:
+
+PaginationOptions.SortKey
+------------------------------------------------------------------
+
+
+
+.. csv-table:: Enum PaginationOptions.SortKey values
+ :header: "Name", "Number", "Description"
+ :widths: auto
+
+ "CREATION_TIME", "0", ""
+
+
+
+.. _ref_datacatalog.PaginationOptions.SortOrder:
+
+PaginationOptions.SortOrder
+------------------------------------------------------------------
+
+
+
+.. csv-table:: Enum PaginationOptions.SortOrder values
+ :header: "Name", "Number", "Description"
+ :widths: auto
+
+ "DESCENDING", "0", ""
+ "ASCENDING", "1", ""
+
+
+
+.. _ref_datacatalog.SinglePropertyFilter.ComparisonOperator:
+
+SinglePropertyFilter.ComparisonOperator
+------------------------------------------------------------------
+
+as use-cases come up we can add more operators, ex: gte, like, not eq etc.
+
+.. csv-table:: Enum SinglePropertyFilter.ComparisonOperator values
+ :header: "Name", "Number", "Description"
+ :widths: auto
+
+ "EQUALS", "0", ""
+
+
+
+
+
+
+
+.. _ref_datacatalog.DataCatalog:
+
+DataCatalog
+------------------------------------------------------------------
+
+Data Catalog service definition
+Data Catalog is a service for indexing parameterized, strongly-typed data artifacts across revisions.
+Artifacts are associated with a Dataset, and can be tagged for retrieval.
+
+.. csv-table:: DataCatalog service methods
+ :header: "Method Name", "Request Type", "Response Type", "Description"
+ :widths: auto
+
+ "CreateDataset", ":ref:`ref_datacatalog.CreateDatasetRequest`", ":ref:`ref_datacatalog.CreateDatasetResponse`", "Create a new Dataset. Datasets are unique based on the DatasetID. Datasets are logical groupings of artifacts. Each dataset can have one or more artifacts"
+ "GetDataset", ":ref:`ref_datacatalog.GetDatasetRequest`", ":ref:`ref_datacatalog.GetDatasetResponse`", "Get a Dataset by the DatasetID. This returns the Dataset with the associated metadata."
+ "CreateArtifact", ":ref:`ref_datacatalog.CreateArtifactRequest`", ":ref:`ref_datacatalog.CreateArtifactResponse`", "Create an artifact and the artifact data associated with it. An artifact can be a hive partition or arbitrary files or data values"
+ "GetArtifact", ":ref:`ref_datacatalog.GetArtifactRequest`", ":ref:`ref_datacatalog.GetArtifactResponse`", "Retrieve an artifact by an identifying handle. This returns an artifact along with the artifact data."
+ "AddTag", ":ref:`ref_datacatalog.AddTagRequest`", ":ref:`ref_datacatalog.AddTagResponse`", "Associate a tag with an artifact. Tags are unique within a Dataset."
+ "ListArtifacts", ":ref:`ref_datacatalog.ListArtifactsRequest`", ":ref:`ref_datacatalog.ListArtifactsResponse`", "Return a paginated list of artifacts"
+ "ListDatasets", ":ref:`ref_datacatalog.ListDatasetsRequest`", ":ref:`ref_datacatalog.ListDatasetsResponse`", "Return a paginated list of datasets"
+ "GetOrExtendReservation", ":ref:`ref_datacatalog.GetOrExtendReservationRequest`", ":ref:`ref_datacatalog.GetOrExtendReservationResponse`", "Attempts to get or extend a reservation for the corresponding artifact. If one already exists (ie. another entity owns the reservation) then that reservation is retrieved. Once you acquire a reservation, you need to periodically extend the reservation with an identical call. If the reservation is not extended before the defined expiration, it may be acquired by another task. Note: We may have multiple concurrent tasks with the same signature and the same input that try to populate the same artifact at the same time. Thus with reservation, only one task can run at a time, until the reservation expires. Note: If task A does not extend the reservation in time and the reservation expires, another task B may take over the reservation, resulting in two tasks A and B running in parallel. So a third task C may get the Artifact from A or B, whichever writes last."
+ "ReleaseReservation", ":ref:`ref_datacatalog.ReleaseReservationRequest`", ":ref:`ref_datacatalog.ReleaseReservationResponse`", "Release the reservation when the task holding the spot fails so that the other tasks can grab the spot."
+
+
+
+
+
+.. _ref_google/protobuf/timestamp.proto:
+
+google/protobuf/timestamp.proto
+==================================================================
+
+
+
+
+
+.. _ref_google.protobuf.Timestamp:
+
+Timestamp
+------------------------------------------------------------------
+
+A Timestamp represents a point in time independent of any time zone or local
+calendar, encoded as a count of seconds and fractions of seconds at
+nanosecond resolution. The count is relative to an epoch at UTC midnight on
+January 1, 1970, in the proleptic Gregorian calendar which extends the
+Gregorian calendar backwards to year one.
+
+All minutes are 60 seconds long. Leap seconds are "smeared" so that no leap
+second table is needed for interpretation, using a [24-hour linear
+smear](https://developers.google.com/time/smear).
+
+The range is from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59.999999999Z. By
+restricting to that range, we ensure that we can convert to and from [RFC
+3339](https://www.ietf.org/rfc/rfc3339.txt) date strings.
+
+# Examples
+
+Example 1: Compute Timestamp from POSIX `time()`.
+
+ Timestamp timestamp;
+ timestamp.set_seconds(time(NULL));
+ timestamp.set_nanos(0);
+
+Example 2: Compute Timestamp from POSIX `gettimeofday()`.
+
+ struct timeval tv;
+ gettimeofday(&tv, NULL);
+
+ Timestamp timestamp;
+ timestamp.set_seconds(tv.tv_sec);
+ timestamp.set_nanos(tv.tv_usec * 1000);
+
+Example 3: Compute Timestamp from Win32 `GetSystemTimeAsFileTime()`.
+
+ FILETIME ft;
+ GetSystemTimeAsFileTime(&ft);
+ UINT64 ticks = (((UINT64)ft.dwHighDateTime) << 32) | ft.dwLowDateTime;
+
+ // A Windows tick is 100 nanoseconds. Windows epoch 1601-01-01T00:00:00Z
+ // is 11644473600 seconds before Unix epoch 1970-01-01T00:00:00Z.
+ Timestamp timestamp;
+ timestamp.set_seconds((INT64) ((ticks / 10000000) - 11644473600LL));
+ timestamp.set_nanos((INT32) ((ticks % 10000000) * 100));
+
+Example 4: Compute Timestamp from Java `System.currentTimeMillis()`.
+
+ long millis = System.currentTimeMillis();
+
+ Timestamp timestamp = Timestamp.newBuilder().setSeconds(millis / 1000)
+ .setNanos((int) ((millis % 1000) * 1000000)).build();
+
+
+Example 5: Compute Timestamp from Java `Instant.now()`.
+
+ Instant now = Instant.now();
+
+ Timestamp timestamp =
+ Timestamp.newBuilder().setSeconds(now.getEpochSecond())
+ .setNanos(now.getNano()).build();
+
+
+Example 6: Compute Timestamp from current time in Python.
+
+ timestamp = Timestamp()
+ timestamp.GetCurrentTime()
+
+# JSON Mapping
+
+In JSON format, the Timestamp type is encoded as a string in the
+[RFC 3339](https://www.ietf.org/rfc/rfc3339.txt) format. That is, the
+format is "{year}-{month}-{day}T{hour}:{min}:{sec}[.{frac_sec}]Z"
+where {year} is always expressed using four digits while {month}, {day},
+{hour}, {min}, and {sec} are zero-padded to two digits each. The fractional
+seconds, which can go up to 9 digits (i.e. up to 1 nanosecond resolution),
+are optional. The "Z" suffix indicates the timezone ("UTC"); the timezone
+is required. A proto3 JSON serializer should always use UTC (as indicated by
+"Z") when printing the Timestamp type and a proto3 JSON parser should be
+able to accept both UTC and other timezones (as indicated by an offset).
+
+For example, "2017-01-15T01:30:15.01Z" encodes 15.01 seconds past
+01:30 UTC on January 15, 2017.
+
+In JavaScript, one can convert a Date object to this format using the
+standard
+[toISOString()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Date/toISOString)
+method. In Python, a standard `datetime.datetime` object can be converted
+to this format using
+[`strftime`](https://docs.python.org/2/library/time.html#time.strftime) with
+the time format spec '%Y-%m-%dT%H:%M:%S.%fZ'. Likewise, in Java, one can use
+the Joda Time's [`ISODateTimeFormat.dateTime()`](
+http://www.joda.org/joda-time/apidocs/org/joda/time/format/ISODateTimeFormat.html#dateTime%2D%2D
+) to obtain a formatter capable of generating timestamps in this format.
+
+
+
+.. csv-table:: Timestamp type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "seconds", ":ref:`ref_int64`", "", "Represents seconds of UTC time since Unix epoch 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59Z inclusive."
+ "nanos", ":ref:`ref_int32`", "", "Non-negative fractions of a second at nanosecond resolution. Negative second values with fractions must still have non-negative nanos values that count forward in time. Must be from 0 to 999,999,999 inclusive."
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+.. _ref_google/protobuf/duration.proto:
+
+google/protobuf/duration.proto
+==================================================================
+
+
+
+
+
+.. _ref_google.protobuf.Duration:
+
+Duration
+------------------------------------------------------------------
+
+A Duration represents a signed, fixed-length span of time represented
+as a count of seconds and fractions of seconds at nanosecond
+resolution. It is independent of any calendar and concepts like "day"
+or "month". It is related to Timestamp in that the difference between
+two Timestamp values is a Duration and it can be added or subtracted
+from a Timestamp. Range is approximately +-10,000 years.
+
+# Examples
+
+Example 1: Compute Duration from two Timestamps in pseudo code.
+
+ Timestamp start = ...;
+ Timestamp end = ...;
+ Duration duration = ...;
+
+ duration.seconds = end.seconds - start.seconds;
+ duration.nanos = end.nanos - start.nanos;
+
+ if (duration.seconds < 0 && duration.nanos > 0) {
+ duration.seconds += 1;
+ duration.nanos -= 1000000000;
+ } else if (duration.seconds > 0 && duration.nanos < 0) {
+ duration.seconds -= 1;
+ duration.nanos += 1000000000;
+ }
+
+Example 2: Compute Timestamp from Timestamp + Duration in pseudo code.
+
+ Timestamp start = ...;
+ Duration duration = ...;
+ Timestamp end = ...;
+
+ end.seconds = start.seconds + duration.seconds;
+ end.nanos = start.nanos + duration.nanos;
+
+ if (end.nanos < 0) {
+ end.seconds -= 1;
+ end.nanos += 1000000000;
+ } else if (end.nanos >= 1000000000) {
+ end.seconds += 1;
+ end.nanos -= 1000000000;
+ }
+
+Example 3: Compute Duration from datetime.timedelta in Python.
+
+ td = datetime.timedelta(days=3, minutes=10)
+ duration = Duration()
+ duration.FromTimedelta(td)
+
+# JSON Mapping
+
+In JSON format, the Duration type is encoded as a string rather than an
+object, where the string ends in the suffix "s" (indicating seconds) and
+is preceded by the number of seconds, with nanoseconds expressed as
+fractional seconds. For example, 3 seconds with 0 nanoseconds should be
+encoded in JSON format as "3s", while 3 seconds and 1 nanosecond should
+be expressed in JSON format as "3.000000001s", and 3 seconds and 1
+microsecond should be expressed in JSON format as "3.000001s".
+
+
+
+.. csv-table:: Duration type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "seconds", ":ref:`ref_int64`", "", "Signed seconds of the span of time. Must be from -315,576,000,000 to +315,576,000,000 inclusive. Note: these bounds are computed from: 60 sec/min * 60 min/hr * 24 hr/day * 365.25 days/year * 10000 years"
+ "nanos", ":ref:`ref_int32`", "", "Signed fractions of a second at nanosecond resolution of the span of time. Durations less than one second are represented with a 0 `seconds` field and a positive or negative `nanos` field. For durations of one second or more, a non-zero value for the `nanos` field must be of the same sign as the `seconds` field. Must be from -999,999,999 to +999,999,999 inclusive."
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+.. _ref_google/protobuf/struct.proto:
+
+google/protobuf/struct.proto
+==================================================================
+
+
+
+
+
+.. _ref_google.protobuf.ListValue:
+
+ListValue
+------------------------------------------------------------------
+
+`ListValue` is a wrapper around a repeated field of values.
+
+The JSON representation for `ListValue` is JSON array.
+
+
+
+.. csv-table:: ListValue type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "values", ":ref:`ref_google.protobuf.Value`", "repeated", "Repeated field of dynamically typed values."
+
+
+
+
+
+
+
+.. _ref_google.protobuf.Struct:
+
+Struct
+------------------------------------------------------------------
+
+`Struct` represents a structured data value, consisting of fields
+which map to dynamically typed values. In some languages, `Struct`
+might be supported by a native representation. For example, in
+scripting languages like JS a struct is represented as an
+object. The details of that representation are described together
+with the proto support for the language.
+
+The JSON representation for `Struct` is JSON object.
+
+
+
+.. csv-table:: Struct type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "fields", ":ref:`ref_google.protobuf.Struct.FieldsEntry`", "repeated", "Unordered map of dynamically typed values."
+
+
+
+
+
+
+
+.. _ref_google.protobuf.Struct.FieldsEntry:
+
+Struct.FieldsEntry
+------------------------------------------------------------------
+
+
+
+
+
+.. csv-table:: Struct.FieldsEntry type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "key", ":ref:`ref_string`", "", ""
+ "value", ":ref:`ref_google.protobuf.Value`", "", ""
+
+
+
+
+
+
+
+.. _ref_google.protobuf.Value:
+
+Value
+------------------------------------------------------------------
+
+`Value` represents a dynamically typed value which can be either
+null, a number, a string, a boolean, a recursive struct value, or a
+list of values. A producer of value is expected to set one of these
+variants. Absence of any variant indicates an error.
+
+The JSON representation for `Value` is JSON value.
+
+
+
+.. csv-table:: Value type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "null_value", ":ref:`ref_google.protobuf.NullValue`", "", "Represents a null value."
+ "number_value", ":ref:`ref_double`", "", "Represents a double value."
+ "string_value", ":ref:`ref_string`", "", "Represents a string value."
+ "bool_value", ":ref:`ref_bool`", "", "Represents a boolean value."
+ "struct_value", ":ref:`ref_google.protobuf.Struct`", "", "Represents a structured value."
+ "list_value", ":ref:`ref_google.protobuf.ListValue`", "", "Represents a repeated `Value`."
+
+
+
+
+
+
+
+
+
+.. _ref_google.protobuf.NullValue:
+
+NullValue
+------------------------------------------------------------------
+
+`NullValue` is a singleton enumeration to represent the null value for the
+`Value` type union.
+
+ The JSON representation for `NullValue` is JSON `null`.
+
+.. csv-table:: Enum NullValue values
+ :header: "Name", "Number", "Description"
+ :widths: auto
+
+ "NULL_VALUE", "0", "Null value."
+
+
+
+
+
+
+
+
diff --git a/flyteidl/protos/docs/event/event.rst b/flyteidl/protos/docs/event/event.rst
new file mode 100644
index 00000000000..b1bcc69ca37
--- /dev/null
+++ b/flyteidl/protos/docs/event/event.rst
@@ -0,0 +1,693 @@
+######################
+Protocol Documentation
+######################
+
+
+
+
+.. _ref_flyteidl/event/event.proto:
+
+flyteidl/event/event.proto
+==================================================================
+
+
+
+
+
+.. _ref_flyteidl.event.DynamicWorkflowNodeMetadata:
+
+DynamicWorkflowNodeMetadata
+------------------------------------------------------------------
+
+For dynamic workflow nodes we send information about the dynamic workflow definition that gets generated.
+
+
+
+.. csv-table:: DynamicWorkflowNodeMetadata type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "id", ":ref:`ref_flyteidl.core.Identifier`", "", "id represents the unique identifier of the workflow."
+ "compiled_workflow", ":ref:`ref_flyteidl.core.CompiledWorkflowClosure`", "", "Represents the compiled representation of the embedded dynamic workflow."
+
+
+
+
+
+
+
+.. _ref_flyteidl.event.ExternalResourceInfo:
+
+ExternalResourceInfo
+------------------------------------------------------------------
+
+This message contains metadata about external resources produced or used by a specific task execution.
+
+
+
+.. csv-table:: ExternalResourceInfo type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "external_id", ":ref:`ref_string`", "", "Identifier for an external resource created by this task execution, for example Qubole query ID or presto query ids."
+ "index", ":ref:`ref_uint32`", "", "A unique index for the external resource with respect to all external resources for this task. Although the identifier may change between task reporting events or retries, this will remain the same to enable aggregating information from multiple reports."
+ "retry_attempt", ":ref:`ref_uint32`", "", "Retry attempt number for this external resource, ie., 2 for the second attempt"
+ "phase", ":ref:`ref_flyteidl.core.TaskExecution.Phase`", "", "Phase associated with the external resource"
+ "cache_status", ":ref:`ref_flyteidl.core.CatalogCacheStatus`", "", "Captures the status of caching for this external resource execution."
+ "logs", ":ref:`ref_flyteidl.core.TaskLog`", "repeated", "log information for the external resource execution"
+
+
+
+
+
+
+
+.. _ref_flyteidl.event.NodeExecutionEvent:
+
+NodeExecutionEvent
+------------------------------------------------------------------
+
+
+
+
+
+.. csv-table:: NodeExecutionEvent type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "id", ":ref:`ref_flyteidl.core.NodeExecutionIdentifier`", "", "Unique identifier for this node execution"
+ "producer_id", ":ref:`ref_string`", "", "the id of the originator (Propeller) of the event"
+ "phase", ":ref:`ref_flyteidl.core.NodeExecution.Phase`", "", ""
+ "occurred_at", ":ref:`ref_google.protobuf.Timestamp`", "", "This timestamp represents when the original event occurred, it is generated by the executor of the node."
+ "input_uri", ":ref:`ref_string`", "", ""
+ "output_uri", ":ref:`ref_string`", "", "URL to the output of the execution, it encodes all the information including Cloud source provider. ie., s3://..."
+ "error", ":ref:`ref_flyteidl.core.ExecutionError`", "", "Error information for the execution"
+ "output_data", ":ref:`ref_flyteidl.core.LiteralMap`", "", "Raw output data produced by this node execution."
+ "workflow_node_metadata", ":ref:`ref_flyteidl.event.WorkflowNodeMetadata`", "", ""
+ "task_node_metadata", ":ref:`ref_flyteidl.event.TaskNodeMetadata`", "", ""
+ "parent_task_metadata", ":ref:`ref_flyteidl.event.ParentTaskExecutionMetadata`", "", "[To be deprecated] Specifies which task (if any) launched this node."
+ "parent_node_metadata", ":ref:`ref_flyteidl.event.ParentNodeExecutionMetadata`", "", "Specifies the parent node of the current node execution. Node executions at level zero will not have a parent node."
+ "retry_group", ":ref:`ref_string`", "", "Retry group to indicate grouping of nodes by retries"
+ "spec_node_id", ":ref:`ref_string`", "", "Identifier of the node in the original workflow/graph This maps to value of WorkflowTemplate.nodes[X].id"
+ "node_name", ":ref:`ref_string`", "", "Friendly readable name for the node"
+ "event_version", ":ref:`ref_int32`", "", ""
+ "is_parent", ":ref:`ref_bool`", "", "Whether this node launched a subworkflow."
+ "is_dynamic", ":ref:`ref_bool`", "", "Whether this node yielded a dynamic workflow."
+
+
+
+
+
+
+
+.. _ref_flyteidl.event.ParentNodeExecutionMetadata:
+
+ParentNodeExecutionMetadata
+------------------------------------------------------------------
+
+
+
+
+
+.. csv-table:: ParentNodeExecutionMetadata type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "node_id", ":ref:`ref_string`", "", "Unique identifier of the parent node id within the execution This is value of core.NodeExecutionIdentifier.node_id of the parent node"
+
+
+
+
+
+
+
+.. _ref_flyteidl.event.ParentTaskExecutionMetadata:
+
+ParentTaskExecutionMetadata
+------------------------------------------------------------------
+
+
+
+
+
+.. csv-table:: ParentTaskExecutionMetadata type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "id", ":ref:`ref_flyteidl.core.TaskExecutionIdentifier`", "", ""
+
+
+
+
+
+
+
+.. _ref_flyteidl.event.ResourcePoolInfo:
+
+ResourcePoolInfo
+------------------------------------------------------------------
+
+This message holds task execution metadata specific to resource allocation used to manage concurrent
+executions for a project namespace.
+
+
+
+.. csv-table:: ResourcePoolInfo type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "allocation_token", ":ref:`ref_string`", "", "Unique resource ID used to identify this execution when allocating a token."
+ "namespace", ":ref:`ref_string`", "", "Namespace under which this task execution requested an allocation token."
+
+
+
+
+
+
+
+.. _ref_flyteidl.event.TaskExecutionEvent:
+
+TaskExecutionEvent
+------------------------------------------------------------------
+
+Plugin specific execution event information. For tasks like Python, Hive, Spark, DynamicJob.
+
+
+
+.. csv-table:: TaskExecutionEvent type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "task_id", ":ref:`ref_flyteidl.core.Identifier`", "", "ID of the task. In combination with the retryAttempt this will indicate the task execution uniquely for a given parent node execution."
+ "parent_node_execution_id", ":ref:`ref_flyteidl.core.NodeExecutionIdentifier`", "", "A task execution is always kicked off by a node execution, the event consumer will use the parent_id to relate the task to it's parent node execution"
+ "retry_attempt", ":ref:`ref_uint32`", "", "retry attempt number for this task, ie., 2 for the second attempt"
+ "phase", ":ref:`ref_flyteidl.core.TaskExecution.Phase`", "", "Phase associated with the event"
+ "producer_id", ":ref:`ref_string`", "", "id of the process that sent this event, mainly for trace debugging"
+ "logs", ":ref:`ref_flyteidl.core.TaskLog`", "repeated", "log information for the task execution"
+ "occurred_at", ":ref:`ref_google.protobuf.Timestamp`", "", "This timestamp represents when the original event occurred, it is generated by the executor of the task."
+ "input_uri", ":ref:`ref_string`", "", "URI of the input file, it encodes all the information including Cloud source provider. ie., s3://..."
+ "output_uri", ":ref:`ref_string`", "", "URI to the output of the execution, it will be in a format that encodes all the information including Cloud source provider. ie., s3://..."
+ "error", ":ref:`ref_flyteidl.core.ExecutionError`", "", "Error information for the execution"
+ "output_data", ":ref:`ref_flyteidl.core.LiteralMap`", "", "Raw output data produced by this task execution."
+ "custom_info", ":ref:`ref_google.protobuf.Struct`", "", "Custom data that the task plugin sends back. This is extensible to allow various plugins in the system."
+ "phase_version", ":ref:`ref_uint32`", "", "Some phases, like RUNNING, can send multiple events with changed metadata (new logs, additional custom_info, etc) that should be recorded regardless of the lack of phase change. The version field should be incremented when metadata changes across the duration of an individual phase."
+ "reason", ":ref:`ref_string`", "", "An optional explanation for the phase transition."
+ "task_type", ":ref:`ref_string`", "", "A predefined yet extensible Task type identifier. If the task definition is already registered in flyte admin this type will be identical, but not all task executions necessarily use pre-registered definitions and this type is useful to render the task in the UI, filter task executions, etc."
+ "metadata", ":ref:`ref_flyteidl.event.TaskExecutionMetadata`", "", "Metadata around how a task was executed."
+
+
+
+
+
+
+
+.. _ref_flyteidl.event.TaskExecutionMetadata:
+
+TaskExecutionMetadata
+------------------------------------------------------------------
+
+Holds metadata around how a task was executed.
+As a task transitions across event phases during execution some attributes, such its generated name, generated external resources,
+and more may grow in size but not change necessarily based on the phase transition that sparked the event update.
+Metadata is a container for these attributes across the task execution lifecycle.
+
+
+
+.. csv-table:: TaskExecutionMetadata type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "generated_name", ":ref:`ref_string`", "", "Unique, generated name for this task execution used by the backend."
+ "external_resources", ":ref:`ref_flyteidl.event.ExternalResourceInfo`", "repeated", "Additional data on external resources on other back-ends or platforms (e.g. Hive, Qubole, etc) launched by this task execution."
+ "resource_pool_info", ":ref:`ref_flyteidl.event.ResourcePoolInfo`", "repeated", "Includes additional data on concurrent resource management used during execution.. This is a repeated field because a plugin can request multiple resource allocations during execution."
+ "plugin_identifier", ":ref:`ref_string`", "", "The identifier of the plugin used to execute this task."
+ "instance_class", ":ref:`ref_flyteidl.event.TaskExecutionMetadata.InstanceClass`", "", ""
+
+
+
+
+
+
+
+.. _ref_flyteidl.event.TaskNodeMetadata:
+
+TaskNodeMetadata
+------------------------------------------------------------------
+
+
+
+
+
+.. csv-table:: TaskNodeMetadata type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "cache_status", ":ref:`ref_flyteidl.core.CatalogCacheStatus`", "", "Captures the status of caching for this execution."
+ "catalog_key", ":ref:`ref_flyteidl.core.CatalogMetadata`", "", "This structure carries the catalog artifact information"
+ "reservation_status", ":ref:`ref_flyteidl.core.CatalogReservation.Status`", "", "Captures the status of cache reservations for this execution."
+ "dynamic_workflow", ":ref:`ref_flyteidl.event.DynamicWorkflowNodeMetadata`", "", "In the case this task launched a dynamic workflow we capture its structure here."
+
+
+
+
+
+
+
+.. _ref_flyteidl.event.WorkflowExecutionEvent:
+
+WorkflowExecutionEvent
+------------------------------------------------------------------
+
+
+
+
+
+.. csv-table:: WorkflowExecutionEvent type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "execution_id", ":ref:`ref_flyteidl.core.WorkflowExecutionIdentifier`", "", "Workflow execution id"
+ "producer_id", ":ref:`ref_string`", "", "the id of the originator (Propeller) of the event"
+ "phase", ":ref:`ref_flyteidl.core.WorkflowExecution.Phase`", "", ""
+ "occurred_at", ":ref:`ref_google.protobuf.Timestamp`", "", "This timestamp represents when the original event occurred, it is generated by the executor of the workflow."
+ "output_uri", ":ref:`ref_string`", "", "URL to the output of the execution, it encodes all the information including Cloud source provider. ie., s3://..."
+ "error", ":ref:`ref_flyteidl.core.ExecutionError`", "", "Error information for the execution"
+ "output_data", ":ref:`ref_flyteidl.core.LiteralMap`", "", "Raw output data produced by this workflow execution."
+
+
+
+
+
+
+
+.. _ref_flyteidl.event.WorkflowNodeMetadata:
+
+WorkflowNodeMetadata
+------------------------------------------------------------------
+
+For Workflow Nodes we need to send information about the workflow that's launched
+
+
+
+.. csv-table:: WorkflowNodeMetadata type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "execution_id", ":ref:`ref_flyteidl.core.WorkflowExecutionIdentifier`", "", ""
+
+
+
+
+
+
+
+
+
+.. _ref_flyteidl.event.TaskExecutionMetadata.InstanceClass:
+
+TaskExecutionMetadata.InstanceClass
+------------------------------------------------------------------
+
+Includes the broad category of machine used for this specific task execution.
+
+.. csv-table:: Enum TaskExecutionMetadata.InstanceClass values
+ :header: "Name", "Number", "Description"
+ :widths: auto
+
+ "DEFAULT", "0", "The default instance class configured for the flyte application platform."
+ "INTERRUPTIBLE", "1", "The instance class configured for interruptible tasks."
+
+
+
+
+
+
+
+
+
+
+.. _ref_google/protobuf/timestamp.proto:
+
+google/protobuf/timestamp.proto
+==================================================================
+
+
+
+
+
+.. _ref_google.protobuf.Timestamp:
+
+Timestamp
+------------------------------------------------------------------
+
+A Timestamp represents a point in time independent of any time zone or local
+calendar, encoded as a count of seconds and fractions of seconds at
+nanosecond resolution. The count is relative to an epoch at UTC midnight on
+January 1, 1970, in the proleptic Gregorian calendar which extends the
+Gregorian calendar backwards to year one.
+
+All minutes are 60 seconds long. Leap seconds are "smeared" so that no leap
+second table is needed for interpretation, using a [24-hour linear
+smear](https://developers.google.com/time/smear).
+
+The range is from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59.999999999Z. By
+restricting to that range, we ensure that we can convert to and from [RFC
+3339](https://www.ietf.org/rfc/rfc3339.txt) date strings.
+
+# Examples
+
+Example 1: Compute Timestamp from POSIX `time()`.
+
+ Timestamp timestamp;
+ timestamp.set_seconds(time(NULL));
+ timestamp.set_nanos(0);
+
+Example 2: Compute Timestamp from POSIX `gettimeofday()`.
+
+ struct timeval tv;
+ gettimeofday(&tv, NULL);
+
+ Timestamp timestamp;
+ timestamp.set_seconds(tv.tv_sec);
+ timestamp.set_nanos(tv.tv_usec * 1000);
+
+Example 3: Compute Timestamp from Win32 `GetSystemTimeAsFileTime()`.
+
+ FILETIME ft;
+ GetSystemTimeAsFileTime(&ft);
+ UINT64 ticks = (((UINT64)ft.dwHighDateTime) << 32) | ft.dwLowDateTime;
+
+ // A Windows tick is 100 nanoseconds. Windows epoch 1601-01-01T00:00:00Z
+ // is 11644473600 seconds before Unix epoch 1970-01-01T00:00:00Z.
+ Timestamp timestamp;
+ timestamp.set_seconds((INT64) ((ticks / 10000000) - 11644473600LL));
+ timestamp.set_nanos((INT32) ((ticks % 10000000) * 100));
+
+Example 4: Compute Timestamp from Java `System.currentTimeMillis()`.
+
+ long millis = System.currentTimeMillis();
+
+ Timestamp timestamp = Timestamp.newBuilder().setSeconds(millis / 1000)
+ .setNanos((int) ((millis % 1000) * 1000000)).build();
+
+
+Example 5: Compute Timestamp from Java `Instant.now()`.
+
+ Instant now = Instant.now();
+
+ Timestamp timestamp =
+ Timestamp.newBuilder().setSeconds(now.getEpochSecond())
+ .setNanos(now.getNano()).build();
+
+
+Example 6: Compute Timestamp from current time in Python.
+
+ timestamp = Timestamp()
+ timestamp.GetCurrentTime()
+
+# JSON Mapping
+
+In JSON format, the Timestamp type is encoded as a string in the
+[RFC 3339](https://www.ietf.org/rfc/rfc3339.txt) format. That is, the
+format is "{year}-{month}-{day}T{hour}:{min}:{sec}[.{frac_sec}]Z"
+where {year} is always expressed using four digits while {month}, {day},
+{hour}, {min}, and {sec} are zero-padded to two digits each. The fractional
+seconds, which can go up to 9 digits (i.e. up to 1 nanosecond resolution),
+are optional. The "Z" suffix indicates the timezone ("UTC"); the timezone
+is required. A proto3 JSON serializer should always use UTC (as indicated by
+"Z") when printing the Timestamp type and a proto3 JSON parser should be
+able to accept both UTC and other timezones (as indicated by an offset).
+
+For example, "2017-01-15T01:30:15.01Z" encodes 15.01 seconds past
+01:30 UTC on January 15, 2017.
+
+In JavaScript, one can convert a Date object to this format using the
+standard
+[toISOString()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Date/toISOString)
+method. In Python, a standard `datetime.datetime` object can be converted
+to this format using
+[`strftime`](https://docs.python.org/2/library/time.html#time.strftime) with
+the time format spec '%Y-%m-%dT%H:%M:%S.%fZ'. Likewise, in Java, one can use
+the Joda Time's [`ISODateTimeFormat.dateTime()`](
+http://www.joda.org/joda-time/apidocs/org/joda/time/format/ISODateTimeFormat.html#dateTime%2D%2D
+) to obtain a formatter capable of generating timestamps in this format.
+
+
+
+.. csv-table:: Timestamp type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "seconds", ":ref:`ref_int64`", "", "Represents seconds of UTC time since Unix epoch 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59Z inclusive."
+ "nanos", ":ref:`ref_int32`", "", "Non-negative fractions of a second at nanosecond resolution. Negative second values with fractions must still have non-negative nanos values that count forward in time. Must be from 0 to 999,999,999 inclusive."
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+.. _ref_google/protobuf/duration.proto:
+
+google/protobuf/duration.proto
+==================================================================
+
+
+
+
+
+.. _ref_google.protobuf.Duration:
+
+Duration
+------------------------------------------------------------------
+
+A Duration represents a signed, fixed-length span of time represented
+as a count of seconds and fractions of seconds at nanosecond
+resolution. It is independent of any calendar and concepts like "day"
+or "month". It is related to Timestamp in that the difference between
+two Timestamp values is a Duration and it can be added or subtracted
+from a Timestamp. Range is approximately +-10,000 years.
+
+# Examples
+
+Example 1: Compute Duration from two Timestamps in pseudo code.
+
+ Timestamp start = ...;
+ Timestamp end = ...;
+ Duration duration = ...;
+
+ duration.seconds = end.seconds - start.seconds;
+ duration.nanos = end.nanos - start.nanos;
+
+ if (duration.seconds < 0 && duration.nanos > 0) {
+ duration.seconds += 1;
+ duration.nanos -= 1000000000;
+ } else if (duration.seconds > 0 && duration.nanos < 0) {
+ duration.seconds -= 1;
+ duration.nanos += 1000000000;
+ }
+
+Example 2: Compute Timestamp from Timestamp + Duration in pseudo code.
+
+ Timestamp start = ...;
+ Duration duration = ...;
+ Timestamp end = ...;
+
+ end.seconds = start.seconds + duration.seconds;
+ end.nanos = start.nanos + duration.nanos;
+
+ if (end.nanos < 0) {
+ end.seconds -= 1;
+ end.nanos += 1000000000;
+ } else if (end.nanos >= 1000000000) {
+ end.seconds += 1;
+ end.nanos -= 1000000000;
+ }
+
+Example 3: Compute Duration from datetime.timedelta in Python.
+
+ td = datetime.timedelta(days=3, minutes=10)
+ duration = Duration()
+ duration.FromTimedelta(td)
+
+# JSON Mapping
+
+In JSON format, the Duration type is encoded as a string rather than an
+object, where the string ends in the suffix "s" (indicating seconds) and
+is preceded by the number of seconds, with nanoseconds expressed as
+fractional seconds. For example, 3 seconds with 0 nanoseconds should be
+encoded in JSON format as "3s", while 3 seconds and 1 nanosecond should
+be expressed in JSON format as "3.000000001s", and 3 seconds and 1
+microsecond should be expressed in JSON format as "3.000001s".
+
+
+
+.. csv-table:: Duration type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "seconds", ":ref:`ref_int64`", "", "Signed seconds of the span of time. Must be from -315,576,000,000 to +315,576,000,000 inclusive. Note: these bounds are computed from: 60 sec/min * 60 min/hr * 24 hr/day * 365.25 days/year * 10000 years"
+ "nanos", ":ref:`ref_int32`", "", "Signed fractions of a second at nanosecond resolution of the span of time. Durations less than one second are represented with a 0 `seconds` field and a positive or negative `nanos` field. For durations of one second or more, a non-zero value for the `nanos` field must be of the same sign as the `seconds` field. Must be from -999,999,999 to +999,999,999 inclusive."
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+.. _ref_google/protobuf/struct.proto:
+
+google/protobuf/struct.proto
+==================================================================
+
+
+
+
+
+.. _ref_google.protobuf.ListValue:
+
+ListValue
+------------------------------------------------------------------
+
+`ListValue` is a wrapper around a repeated field of values.
+
+The JSON representation for `ListValue` is JSON array.
+
+
+
+.. csv-table:: ListValue type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "values", ":ref:`ref_google.protobuf.Value`", "repeated", "Repeated field of dynamically typed values."
+
+
+
+
+
+
+
+.. _ref_google.protobuf.Struct:
+
+Struct
+------------------------------------------------------------------
+
+`Struct` represents a structured data value, consisting of fields
+which map to dynamically typed values. In some languages, `Struct`
+might be supported by a native representation. For example, in
+scripting languages like JS a struct is represented as an
+object. The details of that representation are described together
+with the proto support for the language.
+
+The JSON representation for `Struct` is JSON object.
+
+
+
+.. csv-table:: Struct type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "fields", ":ref:`ref_google.protobuf.Struct.FieldsEntry`", "repeated", "Unordered map of dynamically typed values."
+
+
+
+
+
+
+
+.. _ref_google.protobuf.Struct.FieldsEntry:
+
+Struct.FieldsEntry
+------------------------------------------------------------------
+
+
+
+
+
+.. csv-table:: Struct.FieldsEntry type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "key", ":ref:`ref_string`", "", ""
+ "value", ":ref:`ref_google.protobuf.Value`", "", ""
+
+
+
+
+
+
+
+.. _ref_google.protobuf.Value:
+
+Value
+------------------------------------------------------------------
+
+`Value` represents a dynamically typed value which can be either
+null, a number, a string, a boolean, a recursive struct value, or a
+list of values. A producer of value is expected to set one of these
+variants. Absence of any variant indicates an error.
+
+The JSON representation for `Value` is JSON value.
+
+
+
+.. csv-table:: Value type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "null_value", ":ref:`ref_google.protobuf.NullValue`", "", "Represents a null value."
+ "number_value", ":ref:`ref_double`", "", "Represents a double value."
+ "string_value", ":ref:`ref_string`", "", "Represents a string value."
+ "bool_value", ":ref:`ref_bool`", "", "Represents a boolean value."
+ "struct_value", ":ref:`ref_google.protobuf.Struct`", "", "Represents a structured value."
+ "list_value", ":ref:`ref_google.protobuf.ListValue`", "", "Represents a repeated `Value`."
+
+
+
+
+
+
+
+
+
+.. _ref_google.protobuf.NullValue:
+
+NullValue
+------------------------------------------------------------------
+
+`NullValue` is a singleton enumeration to represent the null value for the
+`Value` type union.
+
+ The JSON representation for `NullValue` is JSON `null`.
+
+.. csv-table:: Enum NullValue values
+ :header: "Name", "Number", "Description"
+ :widths: auto
+
+ "NULL_VALUE", "0", "Null value."
+
+
+
+
+
+
+
+
diff --git a/flyteidl/protos/docs/plugins/plugins.rst b/flyteidl/protos/docs/plugins/plugins.rst
new file mode 100644
index 00000000000..29f4953625d
--- /dev/null
+++ b/flyteidl/protos/docs/plugins/plugins.rst
@@ -0,0 +1,549 @@
+######################
+Protocol Documentation
+######################
+
+
+
+
+.. _ref_flyteidl/plugins/array_job.proto:
+
+flyteidl/plugins/array_job.proto
+==================================================================
+
+
+
+
+
+.. _ref_flyteidl.plugins.ArrayJob:
+
+ArrayJob
+------------------------------------------------------------------
+
+Describes a job that can process independent pieces of data concurrently. Multiple copies of the runnable component
+will be executed concurrently.
+
+
+
+.. csv-table:: ArrayJob type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "parallelism", ":ref:`ref_int64`", "", "Defines the minimum number of instances to bring up concurrently at any given point. Note that this is an optimistic restriction and that, due to network partitioning or other failures, the actual number of currently running instances might be more. This has to be a positive number if assigned. Default value is size."
+ "size", ":ref:`ref_int64`", "", "Defines the number of instances to launch at most. This number should match the size of the input if the job requires processing of all input data. This has to be a positive number. In the case this is not defined, the back-end will determine the size at run-time by reading the inputs."
+ "min_successes", ":ref:`ref_int64`", "", "An absolute number of the minimum number of successful completions of subtasks. As soon as this criteria is met, the array job will be marked as successful and outputs will be computed. This has to be a non-negative number if assigned. Default value is size (if specified)."
+ "min_success_ratio", ":ref:`ref_float`", "", "If the array job size is not known beforehand, the min_success_ratio can instead be used to determine when an array job can be marked successful."
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+.. _ref_flyteidl/plugins/mpi.proto:
+
+flyteidl/plugins/mpi.proto
+==================================================================
+
+
+
+
+
+.. _ref_flyteidl.plugins.DistributedMPITrainingTask:
+
+DistributedMPITrainingTask
+------------------------------------------------------------------
+
+MPI operator proposal https://github.com/kubeflow/community/blob/master/proposals/mpi-operator-proposal.md
+Custom proto for plugin that enables distributed training using https://github.com/kubeflow/mpi-operator
+
+
+
+.. csv-table:: DistributedMPITrainingTask type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "num_workers", ":ref:`ref_int32`", "", "number of worker spawned in the cluster for this job"
+ "num_launcher_replicas", ":ref:`ref_int32`", "", "number of launcher replicas spawned in the cluster for this job The launcher pod invokes mpirun and communicates with worker pods through MPI."
+ "slots", ":ref:`ref_int32`", "", "number of slots per worker used in hostfile. The available slots (GPUs) in each pod."
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+.. _ref_flyteidl/plugins/presto.proto:
+
+flyteidl/plugins/presto.proto
+==================================================================
+
+
+
+
+
+.. _ref_flyteidl.plugins.PrestoQuery:
+
+PrestoQuery
+------------------------------------------------------------------
+
+This message works with the 'presto' task type in the SDK and is the object that will be in the 'custom' field
+of a Presto task's TaskTemplate
+
+
+
+.. csv-table:: PrestoQuery type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "routing_group", ":ref:`ref_string`", "", ""
+ "catalog", ":ref:`ref_string`", "", ""
+ "schema", ":ref:`ref_string`", "", ""
+ "statement", ":ref:`ref_string`", "", ""
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+.. _ref_flyteidl/plugins/pytorch.proto:
+
+flyteidl/plugins/pytorch.proto
+==================================================================
+
+
+
+
+
+.. _ref_flyteidl.plugins.DistributedPyTorchTrainingTask:
+
+DistributedPyTorchTrainingTask
+------------------------------------------------------------------
+
+Custom proto for plugin that enables distributed training using https://github.com/kubeflow/pytorch-operator
+
+
+
+.. csv-table:: DistributedPyTorchTrainingTask type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "workers", ":ref:`ref_int32`", "", "number of worker replicas spawned in the cluster for this job"
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+.. _ref_flyteidl/plugins/qubole.proto:
+
+flyteidl/plugins/qubole.proto
+==================================================================
+
+
+
+
+
+.. _ref_flyteidl.plugins.HiveQuery:
+
+HiveQuery
+------------------------------------------------------------------
+
+Defines a query to execute on a hive cluster.
+
+
+
+.. csv-table:: HiveQuery type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "query", ":ref:`ref_string`", "", ""
+ "timeout_sec", ":ref:`ref_uint32`", "", ""
+ "retryCount", ":ref:`ref_uint32`", "", ""
+
+
+
+
+
+
+
+.. _ref_flyteidl.plugins.HiveQueryCollection:
+
+HiveQueryCollection
+------------------------------------------------------------------
+
+Defines a collection of hive queries.
+
+
+
+.. csv-table:: HiveQueryCollection type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "queries", ":ref:`ref_flyteidl.plugins.HiveQuery`", "repeated", ""
+
+
+
+
+
+
+
+.. _ref_flyteidl.plugins.QuboleHiveJob:
+
+QuboleHiveJob
+------------------------------------------------------------------
+
+This message works with the 'hive' task type in the SDK and is the object that will be in the 'custom' field
+of a hive task's TaskTemplate
+
+
+
+.. csv-table:: QuboleHiveJob type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "cluster_label", ":ref:`ref_string`", "", ""
+ "query_collection", ":ref:`ref_flyteidl.plugins.HiveQueryCollection`", "", "**Deprecated.** "
+ "tags", ":ref:`ref_string`", "repeated", ""
+ "query", ":ref:`ref_flyteidl.plugins.HiveQuery`", "", ""
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+.. _ref_flyteidl/plugins/sidecar.proto:
+
+flyteidl/plugins/sidecar.proto
+==================================================================
+
+
+
+
+
+.. _ref_flyteidl.plugins.SidecarJob:
+
+SidecarJob
+------------------------------------------------------------------
+
+A sidecar job brings up the desired pod_spec.
+The plugin executor is responsible for keeping the pod alive until the primary container terminates
+or the task itself times out.
+
+
+
+.. csv-table:: SidecarJob type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "pod_spec", ":ref:`ref_k8s.io.api.core.v1.PodSpec`", "", ""
+ "primary_container_name", ":ref:`ref_string`", "", ""
+ "annotations", ":ref:`ref_flyteidl.plugins.SidecarJob.AnnotationsEntry`", "repeated", "Pod annotations"
+ "labels", ":ref:`ref_flyteidl.plugins.SidecarJob.LabelsEntry`", "repeated", "Pod labels"
+
+
+
+
+
+
+
+.. _ref_flyteidl.plugins.SidecarJob.AnnotationsEntry:
+
+SidecarJob.AnnotationsEntry
+------------------------------------------------------------------
+
+
+
+
+
+.. csv-table:: SidecarJob.AnnotationsEntry type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "key", ":ref:`ref_string`", "", ""
+ "value", ":ref:`ref_string`", "", ""
+
+
+
+
+
+
+
+.. _ref_flyteidl.plugins.SidecarJob.LabelsEntry:
+
+SidecarJob.LabelsEntry
+------------------------------------------------------------------
+
+
+
+
+
+.. csv-table:: SidecarJob.LabelsEntry type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "key", ":ref:`ref_string`", "", ""
+ "value", ":ref:`ref_string`", "", ""
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+.. _ref_flyteidl/plugins/spark.proto:
+
+flyteidl/plugins/spark.proto
+==================================================================
+
+
+
+
+
+.. _ref_flyteidl.plugins.SparkApplication:
+
+SparkApplication
+------------------------------------------------------------------
+
+
+
+
+
+
+
+
+
+
+.. _ref_flyteidl.plugins.SparkJob:
+
+SparkJob
+------------------------------------------------------------------
+
+Custom Proto for Spark Plugin.
+
+
+
+.. csv-table:: SparkJob type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "applicationType", ":ref:`ref_flyteidl.plugins.SparkApplication.Type`", "", ""
+ "mainApplicationFile", ":ref:`ref_string`", "", ""
+ "mainClass", ":ref:`ref_string`", "", ""
+ "sparkConf", ":ref:`ref_flyteidl.plugins.SparkJob.SparkConfEntry`", "repeated", ""
+ "hadoopConf", ":ref:`ref_flyteidl.plugins.SparkJob.HadoopConfEntry`", "repeated", ""
+ "executorPath", ":ref:`ref_string`", "", "Executor path for Python jobs."
+
+
+
+
+
+
+
+.. _ref_flyteidl.plugins.SparkJob.HadoopConfEntry:
+
+SparkJob.HadoopConfEntry
+------------------------------------------------------------------
+
+
+
+
+
+.. csv-table:: SparkJob.HadoopConfEntry type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "key", ":ref:`ref_string`", "", ""
+ "value", ":ref:`ref_string`", "", ""
+
+
+
+
+
+
+
+.. _ref_flyteidl.plugins.SparkJob.SparkConfEntry:
+
+SparkJob.SparkConfEntry
+------------------------------------------------------------------
+
+
+
+
+
+.. csv-table:: SparkJob.SparkConfEntry type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "key", ":ref:`ref_string`", "", ""
+ "value", ":ref:`ref_string`", "", ""
+
+
+
+
+
+
+
+
+
+.. _ref_flyteidl.plugins.SparkApplication.Type:
+
+SparkApplication.Type
+------------------------------------------------------------------
+
+
+
+.. csv-table:: Enum SparkApplication.Type values
+ :header: "Name", "Number", "Description"
+ :widths: auto
+
+ "PYTHON", "0", ""
+ "JAVA", "1", ""
+ "SCALA", "2", ""
+ "R", "3", ""
+
+
+
+
+
+
+
+
+
+
+.. _ref_flyteidl/plugins/tensorflow.proto:
+
+flyteidl/plugins/tensorflow.proto
+==================================================================
+
+
+
+
+
+.. _ref_flyteidl.plugins.DistributedTensorflowTrainingTask:
+
+DistributedTensorflowTrainingTask
+------------------------------------------------------------------
+
+Custom proto for plugin that enables distributed training using https://github.com/kubeflow/tf-operator
+
+
+
+.. csv-table:: DistributedTensorflowTrainingTask type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "workers", ":ref:`ref_int32`", "", "number of worker, ps, chief replicas spawned in the cluster for this job"
+ "ps_replicas", ":ref:`ref_int32`", "", "PS -> Parameter server"
+ "chief_replicas", ":ref:`ref_int32`", "", ""
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+.. _ref_flyteidl/plugins/waitable.proto:
+
+flyteidl/plugins/waitable.proto
+==================================================================
+
+
+
+
+
+.. _ref_flyteidl.plugins.Waitable:
+
+Waitable
+------------------------------------------------------------------
+
+Represents an Execution that was launched and could be waited on.
+
+
+
+.. csv-table:: Waitable type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "wf_exec_id", ":ref:`ref_flyteidl.core.WorkflowExecutionIdentifier`", "", ""
+ "phase", ":ref:`ref_flyteidl.core.WorkflowExecution.Phase`", "", ""
+ "workflow_id", ":ref:`ref_string`", "", ""
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/flyteidl/protos/docs/service/service.rst b/flyteidl/protos/docs/service/service.rst
new file mode 100644
index 00000000000..48d521c00cf
--- /dev/null
+++ b/flyteidl/protos/docs/service/service.rst
@@ -0,0 +1,354 @@
+######################
+Protocol Documentation
+######################
+
+
+
+
+.. _ref_flyteidl/service/admin.proto:
+
+flyteidl/service/admin.proto
+==================================================================
+
+
+
+
+
+
+
+
+
+
+
+.. _ref_flyteidl.service.AdminService:
+
+AdminService
+------------------------------------------------------------------
+
+The following defines an RPC service that is also served over HTTP via grpc-gateway.
+Standard response codes for both are defined here: https://github.com/grpc-ecosystem/grpc-gateway/blob/master/runtime/errors.go
+
+.. csv-table:: AdminService service methods
+ :header: "Method Name", "Request Type", "Response Type", "Description"
+ :widths: auto
+
+ "CreateTask", ":ref:`ref_flyteidl.admin.TaskCreateRequest`", ":ref:`ref_flyteidl.admin.TaskCreateResponse`", "Create and upload a :ref:`ref_flyteidl.admin.Task` definition"
+ "GetTask", ":ref:`ref_flyteidl.admin.ObjectGetRequest`", ":ref:`ref_flyteidl.admin.Task`", "Fetch a :ref:`ref_flyteidl.admin.Task` definition."
+ "ListTaskIds", ":ref:`ref_flyteidl.admin.NamedEntityIdentifierListRequest`", ":ref:`ref_flyteidl.admin.NamedEntityIdentifierList`", "Fetch a list of :ref:`ref_flyteidl.admin.NamedEntityIdentifier` of task objects."
+ "ListTasks", ":ref:`ref_flyteidl.admin.ResourceListRequest`", ":ref:`ref_flyteidl.admin.TaskList`", "Fetch a list of :ref:`ref_flyteidl.admin.Task` definitions."
+ "CreateWorkflow", ":ref:`ref_flyteidl.admin.WorkflowCreateRequest`", ":ref:`ref_flyteidl.admin.WorkflowCreateResponse`", "Create and upload a :ref:`ref_flyteidl.admin.Workflow` definition"
+ "GetWorkflow", ":ref:`ref_flyteidl.admin.ObjectGetRequest`", ":ref:`ref_flyteidl.admin.Workflow`", "Fetch a :ref:`ref_flyteidl.admin.Workflow` definition."
+ "ListWorkflowIds", ":ref:`ref_flyteidl.admin.NamedEntityIdentifierListRequest`", ":ref:`ref_flyteidl.admin.NamedEntityIdentifierList`", "Fetch a list of :ref:`ref_flyteidl.admin.NamedEntityIdentifier` of workflow objects."
+ "ListWorkflows", ":ref:`ref_flyteidl.admin.ResourceListRequest`", ":ref:`ref_flyteidl.admin.WorkflowList`", "Fetch a list of :ref:`ref_flyteidl.admin.Workflow` definitions."
+ "CreateLaunchPlan", ":ref:`ref_flyteidl.admin.LaunchPlanCreateRequest`", ":ref:`ref_flyteidl.admin.LaunchPlanCreateResponse`", "Create and upload a :ref:`ref_flyteidl.admin.LaunchPlan` definition"
+ "GetLaunchPlan", ":ref:`ref_flyteidl.admin.ObjectGetRequest`", ":ref:`ref_flyteidl.admin.LaunchPlan`", "Fetch a :ref:`ref_flyteidl.admin.LaunchPlan` definition."
+ "GetActiveLaunchPlan", ":ref:`ref_flyteidl.admin.ActiveLaunchPlanRequest`", ":ref:`ref_flyteidl.admin.LaunchPlan`", "Fetch the active version of a :ref:`ref_flyteidl.admin.LaunchPlan`."
+ "ListActiveLaunchPlans", ":ref:`ref_flyteidl.admin.ActiveLaunchPlanListRequest`", ":ref:`ref_flyteidl.admin.LaunchPlanList`", "List active versions of :ref:`ref_flyteidl.admin.LaunchPlan`."
+ "ListLaunchPlanIds", ":ref:`ref_flyteidl.admin.NamedEntityIdentifierListRequest`", ":ref:`ref_flyteidl.admin.NamedEntityIdentifierList`", "Fetch a list of :ref:`ref_flyteidl.admin.NamedEntityIdentifier` of launch plan objects."
+ "ListLaunchPlans", ":ref:`ref_flyteidl.admin.ResourceListRequest`", ":ref:`ref_flyteidl.admin.LaunchPlanList`", "Fetch a list of :ref:`ref_flyteidl.admin.LaunchPlan` definitions."
+ "UpdateLaunchPlan", ":ref:`ref_flyteidl.admin.LaunchPlanUpdateRequest`", ":ref:`ref_flyteidl.admin.LaunchPlanUpdateResponse`", "Updates the status of a registered :ref:`ref_flyteidl.admin.LaunchPlan`."
+ "CreateExecution", ":ref:`ref_flyteidl.admin.ExecutionCreateRequest`", ":ref:`ref_flyteidl.admin.ExecutionCreateResponse`", "Triggers the creation of a :ref:`ref_flyteidl.admin.Execution`"
+ "RelaunchExecution", ":ref:`ref_flyteidl.admin.ExecutionRelaunchRequest`", ":ref:`ref_flyteidl.admin.ExecutionCreateResponse`", "Triggers the creation of an identical :ref:`ref_flyteidl.admin.Execution`"
+ "RecoverExecution", ":ref:`ref_flyteidl.admin.ExecutionRecoverRequest`", ":ref:`ref_flyteidl.admin.ExecutionCreateResponse`", "Recreates a previously-run workflow execution that will only start executing from the last known failure point. In Recover mode, users cannot change any input parameters or update the version of the execution. This is extremely useful to recover from system errors and byzantine faults like - Loss of K8s cluster, bugs in platform or instability, machine failures, downstream system failures (downstream services), or simply to recover executions that failed because of retry exhaustion and should complete if tried again. See :ref:`ref_flyteidl.admin.ExecutionRecoverRequest` for more details."
+ "GetExecution", ":ref:`ref_flyteidl.admin.WorkflowExecutionGetRequest`", ":ref:`ref_flyteidl.admin.Execution`", "Fetches a :ref:`ref_flyteidl.admin.Execution`."
+ "UpdateExecution", ":ref:`ref_flyteidl.admin.ExecutionUpdateRequest`", ":ref:`ref_flyteidl.admin.ExecutionUpdateResponse`", "Update execution belonging to project domain :ref:`ref_flyteidl.admin.Execution`."
+ "GetExecutionData", ":ref:`ref_flyteidl.admin.WorkflowExecutionGetDataRequest`", ":ref:`ref_flyteidl.admin.WorkflowExecutionGetDataResponse`", "Fetches input and output data for a :ref:`ref_flyteidl.admin.Execution`."
+ "ListExecutions", ":ref:`ref_flyteidl.admin.ResourceListRequest`", ":ref:`ref_flyteidl.admin.ExecutionList`", "Fetch a list of :ref:`ref_flyteidl.admin.Execution`."
+ "TerminateExecution", ":ref:`ref_flyteidl.admin.ExecutionTerminateRequest`", ":ref:`ref_flyteidl.admin.ExecutionTerminateResponse`", "Terminates an in-progress :ref:`ref_flyteidl.admin.Execution`."
+ "GetNodeExecution", ":ref:`ref_flyteidl.admin.NodeExecutionGetRequest`", ":ref:`ref_flyteidl.admin.NodeExecution`", "Fetches a :ref:`ref_flyteidl.admin.NodeExecution`."
+ "ListNodeExecutions", ":ref:`ref_flyteidl.admin.NodeExecutionListRequest`", ":ref:`ref_flyteidl.admin.NodeExecutionList`", "Fetch a list of :ref:`ref_flyteidl.admin.NodeExecution`."
+ "ListNodeExecutionsForTask", ":ref:`ref_flyteidl.admin.NodeExecutionForTaskListRequest`", ":ref:`ref_flyteidl.admin.NodeExecutionList`", "Fetch a list of :ref:`ref_flyteidl.admin.NodeExecution` launched by the reference :ref:`ref_flyteidl.admin.TaskExecution`."
+ "GetNodeExecutionData", ":ref:`ref_flyteidl.admin.NodeExecutionGetDataRequest`", ":ref:`ref_flyteidl.admin.NodeExecutionGetDataResponse`", "Fetches input and output data for a :ref:`ref_flyteidl.admin.NodeExecution`."
+ "RegisterProject", ":ref:`ref_flyteidl.admin.ProjectRegisterRequest`", ":ref:`ref_flyteidl.admin.ProjectRegisterResponse`", "Registers a :ref:`ref_flyteidl.admin.Project` with the Flyte deployment."
+ "UpdateProject", ":ref:`ref_flyteidl.admin.Project`", ":ref:`ref_flyteidl.admin.ProjectUpdateResponse`", "Updates an existing :ref:`ref_flyteidl.admin.Project` flyteidl.admin.Project should be passed but the domains property should be empty; it will be ignored in the handler as domains cannot be updated via this API."
+ "ListProjects", ":ref:`ref_flyteidl.admin.ProjectListRequest`", ":ref:`ref_flyteidl.admin.Projects`", "Fetches a list of :ref:`ref_flyteidl.admin.Project`"
+ "CreateWorkflowEvent", ":ref:`ref_flyteidl.admin.WorkflowExecutionEventRequest`", ":ref:`ref_flyteidl.admin.WorkflowExecutionEventResponse`", "Indicates a :ref:`ref_flyteidl.event.WorkflowExecutionEvent` has occurred."
+ "CreateNodeEvent", ":ref:`ref_flyteidl.admin.NodeExecutionEventRequest`", ":ref:`ref_flyteidl.admin.NodeExecutionEventResponse`", "Indicates a :ref:`ref_flyteidl.event.NodeExecutionEvent` has occurred."
+ "CreateTaskEvent", ":ref:`ref_flyteidl.admin.TaskExecutionEventRequest`", ":ref:`ref_flyteidl.admin.TaskExecutionEventResponse`", "Indicates a :ref:`ref_flyteidl.event.TaskExecutionEvent` has occurred."
+ "GetTaskExecution", ":ref:`ref_flyteidl.admin.TaskExecutionGetRequest`", ":ref:`ref_flyteidl.admin.TaskExecution`", "Fetches a :ref:`ref_flyteidl.admin.TaskExecution`."
+ "ListTaskExecutions", ":ref:`ref_flyteidl.admin.TaskExecutionListRequest`", ":ref:`ref_flyteidl.admin.TaskExecutionList`", "Fetches a list of :ref:`ref_flyteidl.admin.TaskExecution`."
+ "GetTaskExecutionData", ":ref:`ref_flyteidl.admin.TaskExecutionGetDataRequest`", ":ref:`ref_flyteidl.admin.TaskExecutionGetDataResponse`", "Fetches input and output data for a :ref:`ref_flyteidl.admin.TaskExecution`."
+ "UpdateProjectDomainAttributes", ":ref:`ref_flyteidl.admin.ProjectDomainAttributesUpdateRequest`", ":ref:`ref_flyteidl.admin.ProjectDomainAttributesUpdateResponse`", "Creates or updates custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project and domain."
+ "GetProjectDomainAttributes", ":ref:`ref_flyteidl.admin.ProjectDomainAttributesGetRequest`", ":ref:`ref_flyteidl.admin.ProjectDomainAttributesGetResponse`", "Fetches custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project and domain."
+ "DeleteProjectDomainAttributes", ":ref:`ref_flyteidl.admin.ProjectDomainAttributesDeleteRequest`", ":ref:`ref_flyteidl.admin.ProjectDomainAttributesDeleteResponse`", "Deletes custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project and domain."
+ "UpdateWorkflowAttributes", ":ref:`ref_flyteidl.admin.WorkflowAttributesUpdateRequest`", ":ref:`ref_flyteidl.admin.WorkflowAttributesUpdateResponse`", "Creates or updates custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project, domain and workflow."
+ "GetWorkflowAttributes", ":ref:`ref_flyteidl.admin.WorkflowAttributesGetRequest`", ":ref:`ref_flyteidl.admin.WorkflowAttributesGetResponse`", "Fetches custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project, domain and workflow."
+ "DeleteWorkflowAttributes", ":ref:`ref_flyteidl.admin.WorkflowAttributesDeleteRequest`", ":ref:`ref_flyteidl.admin.WorkflowAttributesDeleteResponse`", "Deletes custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project, domain and workflow."
+ "ListMatchableAttributes", ":ref:`ref_flyteidl.admin.ListMatchableAttributesRequest`", ":ref:`ref_flyteidl.admin.ListMatchableAttributesResponse`", "Lists custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a specific resource type."
+ "ListNamedEntities", ":ref:`ref_flyteidl.admin.NamedEntityListRequest`", ":ref:`ref_flyteidl.admin.NamedEntityList`", "Returns a list of :ref:`ref_flyteidl.admin.NamedEntity` objects."
+ "GetNamedEntity", ":ref:`ref_flyteidl.admin.NamedEntityGetRequest`", ":ref:`ref_flyteidl.admin.NamedEntity`", "Returns a :ref:`ref_flyteidl.admin.NamedEntity` object."
+ "UpdateNamedEntity", ":ref:`ref_flyteidl.admin.NamedEntityUpdateRequest`", ":ref:`ref_flyteidl.admin.NamedEntityUpdateResponse`", "Updates a :ref:`ref_flyteidl.admin.NamedEntity` object."
+ "GetVersion", ":ref:`ref_flyteidl.admin.GetVersionRequest`", ":ref:`ref_flyteidl.admin.GetVersionResponse`", ""
+
+
+
+
+
+.. _ref_flyteidl/service/auth.proto:
+
+flyteidl/service/auth.proto
+==================================================================
+
+
+
+
+
+.. _ref_flyteidl.service.OAuth2MetadataRequest:
+
+OAuth2MetadataRequest
+------------------------------------------------------------------
+
+
+
+
+
+
+
+
+
+
+.. _ref_flyteidl.service.OAuth2MetadataResponse:
+
+OAuth2MetadataResponse
+------------------------------------------------------------------
+
+OAuth2MetadataResponse defines an RFC-Compliant response for /.well-known/oauth-authorization-server metadata
+as defined in https://tools.ietf.org/html/rfc8414
+
+
+
+.. csv-table:: OAuth2MetadataResponse type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "issuer", ":ref:`ref_string`", "", "Defines the issuer string in all JWT tokens this server issues. The issuer can be admin itself or an external issuer."
+ "authorization_endpoint", ":ref:`ref_string`", "", "URL of the authorization server's authorization endpoint [RFC6749]. This is REQUIRED unless no grant types are supported that use the authorization endpoint."
+ "token_endpoint", ":ref:`ref_string`", "", "URL of the authorization server's token endpoint [RFC6749]."
+ "response_types_supported", ":ref:`ref_string`", "repeated", "Array containing a list of the OAuth 2.0 response_type values that this authorization server supports."
+ "scopes_supported", ":ref:`ref_string`", "repeated", "JSON array containing a list of the OAuth 2.0 [RFC6749] scope values that this authorization server supports."
+ "token_endpoint_auth_methods_supported", ":ref:`ref_string`", "repeated", "JSON array containing a list of client authentication methods supported by this token endpoint."
+ "jwks_uri", ":ref:`ref_string`", "", "URL of the authorization server's JWK Set [JWK] document. The referenced document contains the signing key(s) the client uses to validate signatures from the authorization server."
+ "code_challenge_methods_supported", ":ref:`ref_string`", "repeated", "JSON array containing a list of Proof Key for Code Exchange (PKCE) [RFC7636] code challenge methods supported by this authorization server."
+ "grant_types_supported", ":ref:`ref_string`", "repeated", "JSON array containing a list of the OAuth 2.0 grant type values that this authorization server supports."
+
+
+
+
+
+
+
+.. _ref_flyteidl.service.PublicClientAuthConfigRequest:
+
+PublicClientAuthConfigRequest
+------------------------------------------------------------------
+
+
+
+
+
+
+
+
+
+
+.. _ref_flyteidl.service.PublicClientAuthConfigResponse:
+
+PublicClientAuthConfigResponse
+------------------------------------------------------------------
+
+FlyteClientResponse encapsulates public information that flyte clients (CLIs... etc.) can use to authenticate users.
+
+
+
+.. csv-table:: PublicClientAuthConfigResponse type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "client_id", ":ref:`ref_string`", "", "client_id to use when initiating OAuth2 authorization requests."
+ "redirect_uri", ":ref:`ref_string`", "", "redirect uri to use when initiating OAuth2 authorization requests."
+ "scopes", ":ref:`ref_string`", "repeated", "scopes to request when initiating OAuth2 authorization requests."
+ "authorization_metadata_key", ":ref:`ref_string`", "", "Authorization Header to use when passing Access Tokens to the server. If not provided, the client should use the default http `Authorization` header."
+
+
+
+
+
+
+
+
+
+
+
+
+
+.. _ref_flyteidl.service.AuthMetadataService:
+
+AuthMetadataService
+------------------------------------------------------------------
+
+The following defines an RPC service that is also served over HTTP via grpc-gateway.
+Standard response codes for both are defined here: https://github.com/grpc-ecosystem/grpc-gateway/blob/master/runtime/errors.go
+RPCs defined in this service must be anonymously accessible.
+
+.. csv-table:: AuthMetadataService service methods
+ :header: "Method Name", "Request Type", "Response Type", "Description"
+ :widths: auto
+
+ "GetOAuth2Metadata", ":ref:`ref_flyteidl.service.OAuth2MetadataRequest`", ":ref:`ref_flyteidl.service.OAuth2MetadataResponse`", "Anonymously accessible. Retrieves local or external oauth authorization server metadata."
+ "GetPublicClientConfig", ":ref:`ref_flyteidl.service.PublicClientAuthConfigRequest`", ":ref:`ref_flyteidl.service.PublicClientAuthConfigResponse`", "Anonymously accessible. Retrieves the client information clients should use when initiating OAuth2 authorization requests."
+
+
+
+
+
+.. _ref_flyteidl/service/dataproxy.proto:
+
+flyteidl/service/dataproxy.proto
+==================================================================
+
+
+
+
+
+.. _ref_flyteidl.service.CreateUploadLocationRequest:
+
+CreateUploadLocationRequest
+------------------------------------------------------------------
+
+CreateUploadLocationRequest specified request for the CreateUploadLocation API.
+
+
+
+.. csv-table:: CreateUploadLocationRequest type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "project", ":ref:`ref_string`", "", "Project to create the upload location for +required"
+ "domain", ":ref:`ref_string`", "", "Domain to create the upload location for. +required"
+ "filename", ":ref:`ref_string`", "", "Filename specifies a desired suffix for the generated location. E.g. `file.py` or `pre/fix/file.zip`. +optional. By default, the service will generate a consistent name based on the provided parameters."
+ "expires_in", ":ref:`ref_google.protobuf.Duration`", "", "ExpiresIn defines a requested expiration duration for the generated url. The request will be rejected if this exceeds the platform allowed max. +optional. The default value comes from a global config."
+ "content_md5", ":ref:`ref_bytes`", "", "ContentMD5 restricts the upload location to the specific MD5 provided. The ContentMD5 will also appear in the generated path. +required"
+
+
+
+
+
+
+
+.. _ref_flyteidl.service.CreateUploadLocationResponse:
+
+CreateUploadLocationResponse
+------------------------------------------------------------------
+
+
+
+
+
+.. csv-table:: CreateUploadLocationResponse type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "signed_url", ":ref:`ref_string`", "", "SignedUrl specifies the url to use to upload content to (e.g. https://my-bucket.s3.amazonaws.com/randomstring/suffix.tar?X-...)"
+ "native_url", ":ref:`ref_string`", "", "NativeUrl specifies the url in the format of the configured storage provider (e.g. s3://my-bucket/randomstring/suffix.tar)"
+ "expires_at", ":ref:`ref_google.protobuf.Timestamp`", "", "ExpiresAt defines when will the signed URL expires."
+
+
+
+
+
+
+
+
+
+
+
+
+
+.. _ref_flyteidl.service.DataProxyService:
+
+DataProxyService
+------------------------------------------------------------------
+
+DataProxyService defines an RPC Service that allows access to user-data in a controlled manner.
+
+.. csv-table:: DataProxyService service methods
+ :header: "Method Name", "Request Type", "Response Type", "Description"
+ :widths: auto
+
+ "CreateUploadLocation", ":ref:`ref_flyteidl.service.CreateUploadLocationRequest`", ":ref:`ref_flyteidl.service.CreateUploadLocationResponse`", "CreateUploadLocation creates a signed url to upload artifacts to for a given project/domain."
+
+
+
+
+
+.. _ref_flyteidl/service/identity.proto:
+
+flyteidl/service/identity.proto
+==================================================================
+
+
+
+
+
+.. _ref_flyteidl.service.UserInfoRequest:
+
+UserInfoRequest
+------------------------------------------------------------------
+
+
+
+
+
+
+
+
+
+
+.. _ref_flyteidl.service.UserInfoResponse:
+
+UserInfoResponse
+------------------------------------------------------------------
+
+See the OpenID Connect spec at https://openid.net/specs/openid-connect-core-1_0.html#UserInfoResponse for more information.
+
+
+
+.. csv-table:: UserInfoResponse type fields
+ :header: "Field", "Type", "Label", "Description"
+ :widths: auto
+
+ "subject", ":ref:`ref_string`", "", "Locally unique and never reassigned identifier within the Issuer for the End-User, which is intended to be consumed by the Client."
+ "name", ":ref:`ref_string`", "", "Full name"
+ "preferred_username", ":ref:`ref_string`", "", "Shorthand name by which the End-User wishes to be referred to"
+ "given_name", ":ref:`ref_string`", "", "Given name(s) or first name(s)"
+ "family_name", ":ref:`ref_string`", "", "Surname(s) or last name(s)"
+ "email", ":ref:`ref_string`", "", "Preferred e-mail address"
+ "picture", ":ref:`ref_string`", "", "Profile picture URL"
+
+
+
+
+
+
+
+
+
+
+
+
+
+.. _ref_flyteidl.service.IdentityService:
+
+IdentityService
+------------------------------------------------------------------
+
+IdentityService defines an RPC Service that interacts with user/app identities.
+
+.. csv-table:: IdentityService service methods
+ :header: "Method Name", "Request Type", "Response Type", "Description"
+ :widths: auto
+
+ "UserInfo", ":ref:`ref_flyteidl.service.UserInfoRequest`", ":ref:`ref_flyteidl.service.UserInfoResponse`", "Retrieves user information about the currently logged in user."
+
+
+