From b3a44a51c197621a39109f61da09cbad40b01136 Mon Sep 17 00:00:00 2001 From: Niels Bantilan Date: Wed, 2 Oct 2024 16:37:39 -0400 Subject: [PATCH 1/5] fix flyteidl structure so it renders under /api/ Signed-off-by: Niels Bantilan --- docs/api/flyteidl/buf.lock | 18 + docs/api/flyteidl/buf.yaml | 12 + docs/api/flyteidl/docs/admin/admin.rst | 4623 +++++++++++++++++ docs/api/flyteidl/docs/admin/index.rst | 13 + docs/api/flyteidl/docs/contributing.md | 79 + docs/api/flyteidl/docs/core/core.rst | 3952 ++++++++++++++ docs/api/flyteidl/docs/core/index.rst | 15 + .../flyteidl/docs/datacatalog/datacatalog.rst | 1313 +++++ docs/api/flyteidl/docs/datacatalog/index.rst | 16 + docs/api/flyteidl/docs/event/event.rst | 726 +++ docs/api/flyteidl/docs/event/index.rst | 27 + docs/api/flyteidl/docs/plugins/index.rst | 14 + docs/api/flyteidl/docs/plugins/plugins.rst | 780 +++ docs/api/flyteidl/docs/restructuredtext.tmpl | 129 + docs/api/flyteidl/docs/service/index.rst | 13 + docs/api/flyteidl/docs/service/service.rst | 543 ++ .../docs/withoutscalar_restructuredtext.tmpl | 105 + .../api/flyteidl/docs_index.rst | 1 + docs/api/flyteidl/flyteidl/admin/agent.proto | 258 + .../flyteidl/admin/cluster_assignment.proto | 11 + docs/api/flyteidl/flyteidl/admin/common.proto | 327 ++ .../flyteidl/admin/description_entity.proto | 95 + docs/api/flyteidl/flyteidl/admin/event.proto | 70 + .../flyteidl/flyteidl/admin/execution.proto | 428 ++ .../flyteidl/flyteidl/admin/launch_plan.proto | 226 + .../flyteidl/admin/matchable_resource.proto | 194 + .../flyteidl/admin/node_execution.proto | 245 + .../flyteidl/admin/notification.proto | 27 + .../api/flyteidl/flyteidl/admin/project.proto | 132 + .../flyteidl/admin/project_attributes.proto | 69 + .../admin/project_domain_attributes.proto | 80 + .../flyteidl/flyteidl/admin/schedule.proto | 43 + docs/api/flyteidl/flyteidl/admin/signal.proto | 86 + docs/api/flyteidl/flyteidl/admin/task.proto | 71 + .../flyteidl/admin/task_execution.proto | 168 + .../api/flyteidl/flyteidl/admin/version.proto | 27 + .../flyteidl/flyteidl/admin/workflow.proto | 92 + .../flyteidl/admin/workflow_attributes.proto | 89 + .../flyteidl/cacheservice/cacheservice.proto | 143 + .../flyteidl/flyteidl/core/artifact_id.proto | 112 + docs/api/flyteidl/flyteidl/core/catalog.proto | 63 + .../api/flyteidl/flyteidl/core/compiler.proto | 64 + .../flyteidl/flyteidl/core/condition.proto | 63 + .../flyteidl/flyteidl/core/dynamic_job.proto | 32 + docs/api/flyteidl/flyteidl/core/errors.proto | 35 + .../flyteidl/flyteidl/core/execution.proto | 118 + .../flyteidl/core/execution_envs.proto | 45 + .../flyteidl/flyteidl/core/identifier.proto | 80 + .../flyteidl/flyteidl/core/interface.proto | 64 + .../api/flyteidl/flyteidl/core/literals.proto | 200 + docs/api/flyteidl/flyteidl/core/metrics.proto | 50 + .../api/flyteidl/flyteidl/core/security.proto | 130 + docs/api/flyteidl/flyteidl/core/tasks.proto | 351 ++ docs/api/flyteidl/flyteidl/core/types.proto | 208 + .../api/flyteidl/flyteidl/core/workflow.proto | 331 ++ .../flyteidl/core/workflow_closure.proto | 18 + .../flyteidl/datacatalog/datacatalog.proto | 420 ++ .../flyteidl/flyteidl/event/cloudevents.proto | 73 + docs/api/flyteidl/flyteidl/event/event.proto | 328 ++ .../flyteidl/flyteidl/plugins/array_job.proto | 30 + .../flyteidl/flyteidl/plugins/common.proto | 27 + docs/api/flyteidl/flyteidl/plugins/dask.proto | 41 + .../flyteidl/plugins/kubeflow/common.proto | 28 + .../flyteidl/plugins/kubeflow/mpi.proto | 47 + .../flyteidl/plugins/kubeflow/pytorch.proto | 53 + .../plugins/kubeflow/tensorflow.proto | 46 + docs/api/flyteidl/flyteidl/plugins/mpi.proto | 20 + .../flyteidl/flyteidl/plugins/presto.proto | 14 + .../flyteidl/flyteidl/plugins/pytorch.proto | 25 + .../flyteidl/flyteidl/plugins/qubole.proto | 26 + docs/api/flyteidl/flyteidl/plugins/ray.proto | 53 + .../api/flyteidl/flyteidl/plugins/spark.proto | 34 + .../flyteidl/plugins/tensorflow.proto | 18 + .../flyteidl/flyteidl/plugins/waitable.proto | 15 + .../api/flyteidl/flyteidl/service/admin.proto | 668 +++ .../api/flyteidl/flyteidl/service/agent.proto | 79 + docs/api/flyteidl/flyteidl/service/auth.proto | 94 + .../flyteidl/flyteidl/service/dataproxy.proto | 205 + .../service/external_plugin_service.proto | 79 + .../flyteidl/flyteidl/service/identity.proto | 51 + .../flyteidl/flyteidl/service/signal.proto | 55 + docs/api/index.md | 12 +- docs/community/contribute_docs.md | 2 +- docs/conf.py | 21 +- docs/index.md | 3 +- docs/reference_flyteidl.md | 18 - flyteidl/README.md | 69 +- flyteidl/protos/contributing.md | 68 + flyteidl/protos/docs_index.rst | 19 + 89 files changed, 19871 insertions(+), 94 deletions(-) create mode 100644 docs/api/flyteidl/buf.lock create mode 100644 docs/api/flyteidl/buf.yaml create mode 100644 docs/api/flyteidl/docs/admin/admin.rst create mode 100644 docs/api/flyteidl/docs/admin/index.rst create mode 100644 docs/api/flyteidl/docs/contributing.md create mode 100644 docs/api/flyteidl/docs/core/core.rst create mode 100644 docs/api/flyteidl/docs/core/index.rst create mode 100644 docs/api/flyteidl/docs/datacatalog/datacatalog.rst create mode 100644 docs/api/flyteidl/docs/datacatalog/index.rst create mode 100644 docs/api/flyteidl/docs/event/event.rst create mode 100644 docs/api/flyteidl/docs/event/index.rst create mode 100644 docs/api/flyteidl/docs/plugins/index.rst create mode 100644 docs/api/flyteidl/docs/plugins/plugins.rst create mode 100644 docs/api/flyteidl/docs/restructuredtext.tmpl create mode 100644 docs/api/flyteidl/docs/service/index.rst create mode 100644 docs/api/flyteidl/docs/service/service.rst create mode 100644 docs/api/flyteidl/docs/withoutscalar_restructuredtext.tmpl rename flyteidl/protos/index.rst => docs/api/flyteidl/docs_index.rst (96%) create mode 100644 docs/api/flyteidl/flyteidl/admin/agent.proto create mode 100644 docs/api/flyteidl/flyteidl/admin/cluster_assignment.proto create mode 100644 docs/api/flyteidl/flyteidl/admin/common.proto create mode 100644 docs/api/flyteidl/flyteidl/admin/description_entity.proto create mode 100644 docs/api/flyteidl/flyteidl/admin/event.proto create mode 100644 docs/api/flyteidl/flyteidl/admin/execution.proto create mode 100644 docs/api/flyteidl/flyteidl/admin/launch_plan.proto create mode 100644 docs/api/flyteidl/flyteidl/admin/matchable_resource.proto create mode 100644 docs/api/flyteidl/flyteidl/admin/node_execution.proto create mode 100644 docs/api/flyteidl/flyteidl/admin/notification.proto create mode 100644 docs/api/flyteidl/flyteidl/admin/project.proto create mode 100644 docs/api/flyteidl/flyteidl/admin/project_attributes.proto create mode 100644 docs/api/flyteidl/flyteidl/admin/project_domain_attributes.proto create mode 100644 docs/api/flyteidl/flyteidl/admin/schedule.proto create mode 100644 docs/api/flyteidl/flyteidl/admin/signal.proto create mode 100644 docs/api/flyteidl/flyteidl/admin/task.proto create mode 100644 docs/api/flyteidl/flyteidl/admin/task_execution.proto create mode 100644 docs/api/flyteidl/flyteidl/admin/version.proto create mode 100644 docs/api/flyteidl/flyteidl/admin/workflow.proto create mode 100644 docs/api/flyteidl/flyteidl/admin/workflow_attributes.proto create mode 100644 docs/api/flyteidl/flyteidl/cacheservice/cacheservice.proto create mode 100644 docs/api/flyteidl/flyteidl/core/artifact_id.proto create mode 100644 docs/api/flyteidl/flyteidl/core/catalog.proto create mode 100644 docs/api/flyteidl/flyteidl/core/compiler.proto create mode 100644 docs/api/flyteidl/flyteidl/core/condition.proto create mode 100644 docs/api/flyteidl/flyteidl/core/dynamic_job.proto create mode 100644 docs/api/flyteidl/flyteidl/core/errors.proto create mode 100644 docs/api/flyteidl/flyteidl/core/execution.proto create mode 100644 docs/api/flyteidl/flyteidl/core/execution_envs.proto create mode 100644 docs/api/flyteidl/flyteidl/core/identifier.proto create mode 100644 docs/api/flyteidl/flyteidl/core/interface.proto create mode 100644 docs/api/flyteidl/flyteidl/core/literals.proto create mode 100644 docs/api/flyteidl/flyteidl/core/metrics.proto create mode 100644 docs/api/flyteidl/flyteidl/core/security.proto create mode 100644 docs/api/flyteidl/flyteidl/core/tasks.proto create mode 100644 docs/api/flyteidl/flyteidl/core/types.proto create mode 100644 docs/api/flyteidl/flyteidl/core/workflow.proto create mode 100644 docs/api/flyteidl/flyteidl/core/workflow_closure.proto create mode 100644 docs/api/flyteidl/flyteidl/datacatalog/datacatalog.proto create mode 100644 docs/api/flyteidl/flyteidl/event/cloudevents.proto create mode 100644 docs/api/flyteidl/flyteidl/event/event.proto create mode 100644 docs/api/flyteidl/flyteidl/plugins/array_job.proto create mode 100644 docs/api/flyteidl/flyteidl/plugins/common.proto create mode 100644 docs/api/flyteidl/flyteidl/plugins/dask.proto create mode 100644 docs/api/flyteidl/flyteidl/plugins/kubeflow/common.proto create mode 100644 docs/api/flyteidl/flyteidl/plugins/kubeflow/mpi.proto create mode 100644 docs/api/flyteidl/flyteidl/plugins/kubeflow/pytorch.proto create mode 100644 docs/api/flyteidl/flyteidl/plugins/kubeflow/tensorflow.proto create mode 100644 docs/api/flyteidl/flyteidl/plugins/mpi.proto create mode 100644 docs/api/flyteidl/flyteidl/plugins/presto.proto create mode 100644 docs/api/flyteidl/flyteidl/plugins/pytorch.proto create mode 100644 docs/api/flyteidl/flyteidl/plugins/qubole.proto create mode 100644 docs/api/flyteidl/flyteidl/plugins/ray.proto create mode 100644 docs/api/flyteidl/flyteidl/plugins/spark.proto create mode 100644 docs/api/flyteidl/flyteidl/plugins/tensorflow.proto create mode 100644 docs/api/flyteidl/flyteidl/plugins/waitable.proto create mode 100644 docs/api/flyteidl/flyteidl/service/admin.proto create mode 100644 docs/api/flyteidl/flyteidl/service/agent.proto create mode 100644 docs/api/flyteidl/flyteidl/service/auth.proto create mode 100644 docs/api/flyteidl/flyteidl/service/dataproxy.proto create mode 100644 docs/api/flyteidl/flyteidl/service/external_plugin_service.proto create mode 100644 docs/api/flyteidl/flyteidl/service/identity.proto create mode 100644 docs/api/flyteidl/flyteidl/service/signal.proto delete mode 100644 docs/reference_flyteidl.md create mode 100644 flyteidl/protos/contributing.md create mode 100644 flyteidl/protos/docs_index.rst diff --git a/docs/api/flyteidl/buf.lock b/docs/api/flyteidl/buf.lock new file mode 100644 index 0000000000..0e0addc9f2 --- /dev/null +++ b/docs/api/flyteidl/buf.lock @@ -0,0 +1,18 @@ +# Generated by buf. DO NOT EDIT. +version: v1 +deps: + - remote: buf.build + owner: googleapis + repository: googleapis + commit: 62f35d8aed1149c291d606d958a7ce32 + digest: shake256:c5f5c2401cf70b7c9719834954f31000a978397fdfebda861419bb4ab90fa8efae92710fddab0820533908a1e25ed692a8e119432b7b260c895087a4975b32f3 + - remote: buf.build + owner: grpc-ecosystem + repository: grpc-gateway + commit: 3f42134f4c564983838425bc43c7a65f + digest: shake256:3d11d4c0fe5e05fda0131afefbce233940e27f0c31c5d4e385686aea58ccd30f72053f61af432fa83f1fc11cda57f5f18ca3da26a29064f73c5a0d076bba8d92 + - remote: buf.build + owner: unionai + repository: protoc-gen-swagger + commit: fd9d94dc48154d5c94ccc43695df150f + digest: shake256:57743c99f8173b432f0750eac13671fe7721a824cbf5d4fbd85ffdd0d7b45ded507f7b0a49020f9a5eb2a434e9009ad9480140b4c9173ff58bd85c4685197d5b diff --git a/docs/api/flyteidl/buf.yaml b/docs/api/flyteidl/buf.yaml new file mode 100644 index 0000000000..420796f854 --- /dev/null +++ b/docs/api/flyteidl/buf.yaml @@ -0,0 +1,12 @@ +version: v1 +name: buf.build/flyteorg/flyteidl +lint: + use: + - DEFAULT +breaking: + use: + - FILE +deps: + - buf.build/googleapis/googleapis:62f35d8aed1149c291d606d958a7ce32 + - buf.build/unionai/protoc-gen-swagger + - buf.build/grpc-ecosystem/grpc-gateway diff --git a/docs/api/flyteidl/docs/admin/admin.rst b/docs/api/flyteidl/docs/admin/admin.rst new file mode 100644 index 0000000000..832f656c16 --- /dev/null +++ b/docs/api/flyteidl/docs/admin/admin.rst @@ -0,0 +1,4623 @@ +###################### +Protocol Documentation +###################### + + + + +.. _ref_flyteidl/admin/cluster_assignment.proto: + +flyteidl/admin/cluster_assignment.proto +================================================================== + + + + + +.. _ref_flyteidl.admin.ClusterAssignment: + +ClusterAssignment +------------------------------------------------------------------ + +Encapsulates specifications for routing an execution onto a specific cluster. + + + +.. csv-table:: ClusterAssignment type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "cluster_pool_name", ":ref:`ref_string`", "", "" + + + + + + +.. + end messages + + +.. + end enums + + +.. + end HasExtensions + + +.. + end services + + + + +.. _ref_flyteidl/admin/common.proto: + +flyteidl/admin/common.proto +================================================================== + + + + + +.. _ref_flyteidl.admin.Annotations: + +Annotations +------------------------------------------------------------------ + +Annotation values to be applied to an execution resource. +In the future a mode (e.g. OVERRIDE, APPEND, etc) can be defined +to specify how to merge annotations defined at registration and execution time. + + + +.. csv-table:: Annotations type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "values", ":ref:`ref_flyteidl.admin.Annotations.ValuesEntry`", "repeated", "Map of custom annotations to be applied to the execution resource." + + + + + + + +.. _ref_flyteidl.admin.Annotations.ValuesEntry: + +Annotations.ValuesEntry +------------------------------------------------------------------ + + + + + +.. csv-table:: Annotations.ValuesEntry type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "key", ":ref:`ref_string`", "", "" + "value", ":ref:`ref_string`", "", "" + + + + + + + +.. _ref_flyteidl.admin.AuthRole: + +AuthRole +------------------------------------------------------------------ + +Defines permissions associated with executions created by this launch plan spec. +Use either of these roles when they have permissions required by your workflow execution. +Deprecated. + + + +.. csv-table:: AuthRole type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "assumable_iam_role", ":ref:`ref_string`", "", "Defines an optional iam role which will be used for tasks run in executions created with this launch plan." + "kubernetes_service_account", ":ref:`ref_string`", "", "Defines an optional kubernetes service account which will be used for tasks run in executions created with this launch plan." + + + + + + + +.. _ref_flyteidl.admin.EmailNotification: + +EmailNotification +------------------------------------------------------------------ + +Defines an email notification specification. + + + +.. csv-table:: EmailNotification type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "recipients_email", ":ref:`ref_string`", "repeated", "The list of email addresses recipients for this notification. +required" + + + + + + + +.. _ref_flyteidl.admin.Labels: + +Labels +------------------------------------------------------------------ + +Label values to be applied to an execution resource. +In the future a mode (e.g. OVERRIDE, APPEND, etc) can be defined +to specify how to merge labels defined at registration and execution time. + + + +.. csv-table:: Labels type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "values", ":ref:`ref_flyteidl.admin.Labels.ValuesEntry`", "repeated", "Map of custom labels to be applied to the execution resource." + + + + + + + +.. _ref_flyteidl.admin.Labels.ValuesEntry: + +Labels.ValuesEntry +------------------------------------------------------------------ + + + + + +.. csv-table:: Labels.ValuesEntry type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "key", ":ref:`ref_string`", "", "" + "value", ":ref:`ref_string`", "", "" + + + + + + + +.. _ref_flyteidl.admin.NamedEntity: + +NamedEntity +------------------------------------------------------------------ + +Encapsulates information common to a NamedEntity, a Flyte resource such as a task, +workflow or launch plan. A NamedEntity is exclusively identified by its resource type +and identifier. + + + +.. csv-table:: NamedEntity type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "resource_type", ":ref:`ref_flyteidl.core.ResourceType`", "", "Resource type of the named entity. One of Task, Workflow or LaunchPlan." + "id", ":ref:`ref_flyteidl.admin.NamedEntityIdentifier`", "", "" + "metadata", ":ref:`ref_flyteidl.admin.NamedEntityMetadata`", "", "Additional metadata around a named entity." + + + + + + + +.. _ref_flyteidl.admin.NamedEntityGetRequest: + +NamedEntityGetRequest +------------------------------------------------------------------ + +A request to retrieve the metadata associated with a NamedEntityIdentifier + + + +.. csv-table:: NamedEntityGetRequest type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "resource_type", ":ref:`ref_flyteidl.core.ResourceType`", "", "Resource type of the metadata to get. One of Task, Workflow or LaunchPlan. +required" + "id", ":ref:`ref_flyteidl.admin.NamedEntityIdentifier`", "", "The identifier for the named entity for which to fetch metadata. +required" + + + + + + + +.. _ref_flyteidl.admin.NamedEntityIdentifier: + +NamedEntityIdentifier +------------------------------------------------------------------ + +Encapsulation of fields that identifies a Flyte resource. +A Flyte resource can be a task, workflow or launch plan. +A resource can internally have multiple versions and is uniquely identified +by project, domain, and name. + + + +.. csv-table:: NamedEntityIdentifier type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "project", ":ref:`ref_string`", "", "Name of the project the resource belongs to." + "domain", ":ref:`ref_string`", "", "Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project." + "name", ":ref:`ref_string`", "", "User provided value for the resource. The combination of project + domain + name uniquely identifies the resource. +optional - in certain contexts - like 'List API', 'Launch plans'" + + + + + + + +.. _ref_flyteidl.admin.NamedEntityIdentifierList: + +NamedEntityIdentifierList +------------------------------------------------------------------ + +Represents a list of NamedEntityIdentifiers. + + + +.. csv-table:: NamedEntityIdentifierList type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "entities", ":ref:`ref_flyteidl.admin.NamedEntityIdentifier`", "repeated", "A list of identifiers." + "token", ":ref:`ref_string`", "", "In the case of multiple pages of results, the server-provided token can be used to fetch the next page in a query. If there are no more results, this value will be empty." + + + + + + + +.. _ref_flyteidl.admin.NamedEntityIdentifierListRequest: + +NamedEntityIdentifierListRequest +------------------------------------------------------------------ + +Represents a request structure to list NamedEntityIdentifiers. + + + +.. csv-table:: NamedEntityIdentifierListRequest type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "project", ":ref:`ref_string`", "", "Name of the project that contains the identifiers. +required" + "domain", ":ref:`ref_string`", "", "Name of the domain the identifiers belongs to within the project. +required" + "limit", ":ref:`ref_uint32`", "", "Indicates the number of resources to be returned. +required" + "token", ":ref:`ref_string`", "", "In the case of multiple pages of results, the server-provided token can be used to fetch the next page in a query. +optional" + "sort_by", ":ref:`ref_flyteidl.admin.Sort`", "", "Specifies how listed entities should be sorted in the response. +optional" + "filters", ":ref:`ref_string`", "", "Indicates a list of filters passed as string. +optional" + + + + + + + +.. _ref_flyteidl.admin.NamedEntityList: + +NamedEntityList +------------------------------------------------------------------ + +Represents a list of NamedEntityIdentifiers. + + + +.. csv-table:: NamedEntityList type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "entities", ":ref:`ref_flyteidl.admin.NamedEntity`", "repeated", "A list of NamedEntity objects" + "token", ":ref:`ref_string`", "", "In the case of multiple pages of results, the server-provided token can be used to fetch the next page in a query. If there are no more results, this value will be empty." + + + + + + + +.. _ref_flyteidl.admin.NamedEntityListRequest: + +NamedEntityListRequest +------------------------------------------------------------------ + +Represents a request structure to list NamedEntity objects + + + +.. csv-table:: NamedEntityListRequest type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "resource_type", ":ref:`ref_flyteidl.core.ResourceType`", "", "Resource type of the metadata to query. One of Task, Workflow or LaunchPlan. +required" + "project", ":ref:`ref_string`", "", "Name of the project that contains the identifiers. +required" + "domain", ":ref:`ref_string`", "", "Name of the domain the identifiers belongs to within the project." + "limit", ":ref:`ref_uint32`", "", "Indicates the number of resources to be returned." + "token", ":ref:`ref_string`", "", "In the case of multiple pages of results, the server-provided token can be used to fetch the next page in a query. +optional" + "sort_by", ":ref:`ref_flyteidl.admin.Sort`", "", "Specifies how listed entities should be sorted in the response. +optional" + "filters", ":ref:`ref_string`", "", "Indicates a list of filters passed as string. +optional" + + + + + + + +.. _ref_flyteidl.admin.NamedEntityMetadata: + +NamedEntityMetadata +------------------------------------------------------------------ + +Additional metadata around a named entity. + + + +.. csv-table:: NamedEntityMetadata type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "description", ":ref:`ref_string`", "", "Common description across all versions of the entity +optional" + "state", ":ref:`ref_flyteidl.admin.NamedEntityState`", "", "Shared state across all version of the entity At this point in time, only workflow entities can have their state archived." + + + + + + + +.. _ref_flyteidl.admin.NamedEntityUpdateRequest: + +NamedEntityUpdateRequest +------------------------------------------------------------------ + +Request to set the referenced named entity state to the configured value. + + + +.. csv-table:: NamedEntityUpdateRequest type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "resource_type", ":ref:`ref_flyteidl.core.ResourceType`", "", "Resource type of the metadata to update +required" + "id", ":ref:`ref_flyteidl.admin.NamedEntityIdentifier`", "", "Identifier of the metadata to update +required" + "metadata", ":ref:`ref_flyteidl.admin.NamedEntityMetadata`", "", "Metadata object to set as the new value +required" + + + + + + + +.. _ref_flyteidl.admin.NamedEntityUpdateResponse: + +NamedEntityUpdateResponse +------------------------------------------------------------------ + +Purposefully empty, may be populated in the future. + + + + + + + + +.. _ref_flyteidl.admin.Notification: + +Notification +------------------------------------------------------------------ + +Represents a structure for notifications based on execution status. +The notification content is configured within flyte admin but can be templatized. +Future iterations could expose configuring notifications with custom content. + + + +.. csv-table:: Notification type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "phases", ":ref:`ref_flyteidl.core.WorkflowExecution.Phase`", "repeated", "A list of phases to which users can associate the notifications to. +required" + "email", ":ref:`ref_flyteidl.admin.EmailNotification`", "", "" + "pager_duty", ":ref:`ref_flyteidl.admin.PagerDutyNotification`", "", "" + "slack", ":ref:`ref_flyteidl.admin.SlackNotification`", "", "" + + + + + + + +.. _ref_flyteidl.admin.ObjectGetRequest: + +ObjectGetRequest +------------------------------------------------------------------ + +Shared request structure to fetch a single resource. +Resources include: Task, Workflow, LaunchPlan + + + +.. csv-table:: ObjectGetRequest type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "id", ":ref:`ref_flyteidl.core.Identifier`", "", "Indicates a unique version of resource. +required" + + + + + + + +.. _ref_flyteidl.admin.PagerDutyNotification: + +PagerDutyNotification +------------------------------------------------------------------ + +Defines a pager duty notification specification. + + + +.. csv-table:: PagerDutyNotification type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "recipients_email", ":ref:`ref_string`", "repeated", "Currently, PagerDuty notifications leverage email to trigger a notification. +required" + + + + + + + +.. _ref_flyteidl.admin.RawOutputDataConfig: + +RawOutputDataConfig +------------------------------------------------------------------ + +Encapsulates user settings pertaining to offloaded data (i.e. Blobs, Schema, query data, etc.). +See https://github.com/flyteorg/flyte/issues/211 for more background information. + + + +.. csv-table:: RawOutputDataConfig type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "output_location_prefix", ":ref:`ref_string`", "", "Prefix for where offloaded data from user workflows will be written e.g. s3://bucket/key or s3://bucket/" + + + + + + + +.. _ref_flyteidl.admin.ResourceListRequest: + +ResourceListRequest +------------------------------------------------------------------ + +Shared request structure to retrieve a list of resources. +Resources include: Task, Workflow, LaunchPlan + + + +.. csv-table:: ResourceListRequest type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "id", ":ref:`ref_flyteidl.admin.NamedEntityIdentifier`", "", "id represents the unique identifier of the resource. +required" + "limit", ":ref:`ref_uint32`", "", "Indicates the number of resources to be returned. +required" + "token", ":ref:`ref_string`", "", "In the case of multiple pages of results, this server-provided token can be used to fetch the next page in a query. +optional" + "filters", ":ref:`ref_string`", "", "Indicates a list of filters passed as string. More info on constructing filters : +optional" + "sort_by", ":ref:`ref_flyteidl.admin.Sort`", "", "Sort ordering. +optional" + + + + + + + +.. _ref_flyteidl.admin.SlackNotification: + +SlackNotification +------------------------------------------------------------------ + +Defines a slack notification specification. + + + +.. csv-table:: SlackNotification type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "recipients_email", ":ref:`ref_string`", "repeated", "Currently, Slack notifications leverage email to trigger a notification. +required" + + + + + + + +.. _ref_flyteidl.admin.Sort: + +Sort +------------------------------------------------------------------ + +Specifies sort ordering in a list request. + + + +.. csv-table:: Sort type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "key", ":ref:`ref_string`", "", "Indicates an attribute to sort the response values. +required" + "direction", ":ref:`ref_flyteidl.admin.Sort.Direction`", "", "Indicates the direction to apply sort key for response values. +optional" + + + + + + + +.. _ref_flyteidl.admin.UrlBlob: + +UrlBlob +------------------------------------------------------------------ + +Represents a string url and associated metadata used throughout the platform. + + + +.. csv-table:: UrlBlob type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "url", ":ref:`ref_string`", "", "Actual url value." + "bytes", ":ref:`ref_int64`", "", "Represents the size of the file accessible at the above url." + + + + + + +.. + end messages + + + +.. _ref_flyteidl.admin.NamedEntityState: + +NamedEntityState +------------------------------------------------------------------ + +The status of the named entity is used to control its visibility in the UI. + +.. csv-table:: Enum NamedEntityState values + :header: "Name", "Number", "Description" + :widths: auto + + "NAMED_ENTITY_ACTIVE", "0", "By default, all named entities are considered active and under development." + "NAMED_ENTITY_ARCHIVED", "1", "Archived named entities are no longer visible in the UI." + "SYSTEM_GENERATED", "2", "System generated entities that aren't explicitly created or managed by a user." + + + +.. _ref_flyteidl.admin.Sort.Direction: + +Sort.Direction +------------------------------------------------------------------ + + + +.. csv-table:: Enum Sort.Direction values + :header: "Name", "Number", "Description" + :widths: auto + + "DESCENDING", "0", "By default, fields are sorted in descending order." + "ASCENDING", "1", "" + + +.. + end enums + + +.. + end HasExtensions + + +.. + end services + + + + +.. _ref_flyteidl/admin/description_entity.proto: + +flyteidl/admin/description_entity.proto +================================================================== + + + + + +.. _ref_flyteidl.admin.Description: + +Description +------------------------------------------------------------------ + +Full user description with formatting preserved. This can be rendered +by clients, such as the console or command line tools with in-tact +formatting. + + + +.. csv-table:: Description type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "value", ":ref:`ref_string`", "", "long description - no more than 4KB" + "uri", ":ref:`ref_string`", "", "if the description sizes exceed some threshold we can offload the entire description proto altogether to an external data store, like S3 rather than store inline in the db" + "format", ":ref:`ref_flyteidl.admin.DescriptionFormat`", "", "Format of the long description" + "icon_link", ":ref:`ref_string`", "", "Optional link to an icon for the entity" + + + + + + + +.. _ref_flyteidl.admin.DescriptionEntity: + +DescriptionEntity +------------------------------------------------------------------ + +DescriptionEntity contains detailed description for the task/workflow. +Documentation could provide insight into the algorithms, business use case, etc. + + + +.. csv-table:: DescriptionEntity type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "id", ":ref:`ref_flyteidl.core.Identifier`", "", "id represents the unique identifier of the description entity." + "short_description", ":ref:`ref_string`", "", "One-liner overview of the entity." + "long_description", ":ref:`ref_flyteidl.admin.Description`", "", "Full user description with formatting preserved." + "source_code", ":ref:`ref_flyteidl.admin.SourceCode`", "", "Optional link to source code used to define this entity." + "tags", ":ref:`ref_string`", "repeated", "User-specified tags. These are arbitrary and can be used for searching filtering and discovering tasks." + + + + + + + +.. _ref_flyteidl.admin.DescriptionEntityList: + +DescriptionEntityList +------------------------------------------------------------------ + +Represents a list of DescriptionEntities returned from the admin. +See :ref:`ref_flyteidl.admin.DescriptionEntity` for more details + + + +.. csv-table:: DescriptionEntityList type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "descriptionEntities", ":ref:`ref_flyteidl.admin.DescriptionEntity`", "repeated", "A list of DescriptionEntities returned based on the request." + "token", ":ref:`ref_string`", "", "In the case of multiple pages of results, the server-provided token can be used to fetch the next page in a query. If there are no more results, this value will be empty." + + + + + + + +.. _ref_flyteidl.admin.DescriptionEntityListRequest: + +DescriptionEntityListRequest +------------------------------------------------------------------ + +Represents a request structure to retrieve a list of DescriptionEntities. +See :ref:`ref_flyteidl.admin.DescriptionEntity` for more details + + + +.. csv-table:: DescriptionEntityListRequest type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "resource_type", ":ref:`ref_flyteidl.core.ResourceType`", "", "Identifies the specific type of resource that this identifier corresponds to." + "id", ":ref:`ref_flyteidl.admin.NamedEntityIdentifier`", "", "The identifier for the description entity. +required" + "limit", ":ref:`ref_uint32`", "", "Indicates the number of resources to be returned. +required" + "token", ":ref:`ref_string`", "", "In the case of multiple pages of results, the server-provided token can be used to fetch the next page in a query. +optional" + "filters", ":ref:`ref_string`", "", "Indicates a list of filters passed as string. More info on constructing filters : +optional" + "sort_by", ":ref:`ref_flyteidl.admin.Sort`", "", "Sort ordering for returned list. +optional" + + + + + + + +.. _ref_flyteidl.admin.SourceCode: + +SourceCode +------------------------------------------------------------------ + +Link to source code used to define this entity + + + +.. csv-table:: SourceCode type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "link", ":ref:`ref_string`", "", "" + + + + + + +.. + end messages + + + +.. _ref_flyteidl.admin.DescriptionFormat: + +DescriptionFormat +------------------------------------------------------------------ + +The format of the long description + +.. csv-table:: Enum DescriptionFormat values + :header: "Name", "Number", "Description" + :widths: auto + + "DESCRIPTION_FORMAT_UNKNOWN", "0", "" + "DESCRIPTION_FORMAT_MARKDOWN", "1", "" + "DESCRIPTION_FORMAT_HTML", "2", "" + "DESCRIPTION_FORMAT_RST", "3", "python default documentation - comments is rst" + + +.. + end enums + + +.. + end HasExtensions + + +.. + end services + + + + +.. _ref_flyteidl/admin/event.proto: + +flyteidl/admin/event.proto +================================================================== + + + + + +.. _ref_flyteidl.admin.EventErrorAlreadyInTerminalState: + +EventErrorAlreadyInTerminalState +------------------------------------------------------------------ + +Indicates that a sent event was not used to update execution state due to +the referenced execution already being terminated (and therefore ineligible +for further state transitions). + + + +.. csv-table:: EventErrorAlreadyInTerminalState type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "current_phase", ":ref:`ref_string`", "", "+required" + + + + + + + +.. _ref_flyteidl.admin.EventErrorIncompatibleCluster: + +EventErrorIncompatibleCluster +------------------------------------------------------------------ + +Indicates an event was rejected because it came from a different cluster than +is on record as running the execution. + + + +.. csv-table:: EventErrorIncompatibleCluster type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "cluster", ":ref:`ref_string`", "", "The cluster which has been recorded as processing the execution. +required" + + + + + + + +.. _ref_flyteidl.admin.EventFailureReason: + +EventFailureReason +------------------------------------------------------------------ + +Indicates why a sent event was not used to update execution. + + + +.. csv-table:: EventFailureReason type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "already_in_terminal_state", ":ref:`ref_flyteidl.admin.EventErrorAlreadyInTerminalState`", "", "" + "incompatible_cluster", ":ref:`ref_flyteidl.admin.EventErrorIncompatibleCluster`", "", "" + + + + + + + +.. _ref_flyteidl.admin.NodeExecutionEventRequest: + +NodeExecutionEventRequest +------------------------------------------------------------------ + +Request to send a notification that a node execution event has occurred. + + + +.. csv-table:: NodeExecutionEventRequest type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "request_id", ":ref:`ref_string`", "", "Unique ID for this request that can be traced between services" + "event", ":ref:`ref_flyteidl.event.NodeExecutionEvent`", "", "Details about the event that occurred." + + + + + + + +.. _ref_flyteidl.admin.NodeExecutionEventResponse: + +NodeExecutionEventResponse +------------------------------------------------------------------ + +Purposefully empty, may be populated in the future. + + + + + + + + +.. _ref_flyteidl.admin.TaskExecutionEventRequest: + +TaskExecutionEventRequest +------------------------------------------------------------------ + +Request to send a notification that a task execution event has occurred. + + + +.. csv-table:: TaskExecutionEventRequest type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "request_id", ":ref:`ref_string`", "", "Unique ID for this request that can be traced between services" + "event", ":ref:`ref_flyteidl.event.TaskExecutionEvent`", "", "Details about the event that occurred." + + + + + + + +.. _ref_flyteidl.admin.TaskExecutionEventResponse: + +TaskExecutionEventResponse +------------------------------------------------------------------ + +Purposefully empty, may be populated in the future. + + + + + + + + +.. _ref_flyteidl.admin.WorkflowExecutionEventRequest: + +WorkflowExecutionEventRequest +------------------------------------------------------------------ + +Request to send a notification that a workflow execution event has occurred. + + + +.. csv-table:: WorkflowExecutionEventRequest type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "request_id", ":ref:`ref_string`", "", "Unique ID for this request that can be traced between services" + "event", ":ref:`ref_flyteidl.event.WorkflowExecutionEvent`", "", "Details about the event that occurred." + + + + + + + +.. _ref_flyteidl.admin.WorkflowExecutionEventResponse: + +WorkflowExecutionEventResponse +------------------------------------------------------------------ + +Purposefully empty, may be populated in the future. + + + + + + + +.. + end messages + + +.. + end enums + + +.. + end HasExtensions + + +.. + end services + + + + +.. _ref_flyteidl/admin/execution.proto: + +flyteidl/admin/execution.proto +================================================================== + + + + + +.. _ref_flyteidl.admin.AbortMetadata: + +AbortMetadata +------------------------------------------------------------------ + +Specifies metadata around an aborted workflow execution. + + + +.. csv-table:: AbortMetadata type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "cause", ":ref:`ref_string`", "", "In the case of a user-specified abort, this will pass along the user-supplied cause." + "principal", ":ref:`ref_string`", "", "Identifies the entity (if any) responsible for terminating the execution" + + + + + + + +.. _ref_flyteidl.admin.Execution: + +Execution +------------------------------------------------------------------ + +A workflow execution represents an instantiated workflow, including all inputs and additional +metadata as well as computed results included state, outputs, and duration-based attributes. +Used as a response object used in Get and List execution requests. + + + +.. csv-table:: Execution type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "id", ":ref:`ref_flyteidl.core.WorkflowExecutionIdentifier`", "", "Unique identifier of the workflow execution." + "spec", ":ref:`ref_flyteidl.admin.ExecutionSpec`", "", "User-provided configuration and inputs for launching the execution." + "closure", ":ref:`ref_flyteidl.admin.ExecutionClosure`", "", "Execution results." + + + + + + + +.. _ref_flyteidl.admin.ExecutionClosure: + +ExecutionClosure +------------------------------------------------------------------ + +Encapsulates the results of the Execution + + + +.. csv-table:: ExecutionClosure type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "outputs", ":ref:`ref_flyteidl.admin.LiteralMapBlob`", "", "**Deprecated.** Output URI in the case of a successful execution. DEPRECATED. Use GetExecutionData to fetch output data instead." + "error", ":ref:`ref_flyteidl.core.ExecutionError`", "", "Error information in the case of a failed execution." + "abort_cause", ":ref:`ref_string`", "", "**Deprecated.** In the case of a user-specified abort, this will pass along the user-supplied cause." + "abort_metadata", ":ref:`ref_flyteidl.admin.AbortMetadata`", "", "In the case of a user-specified abort, this will pass along the user and their supplied cause." + "output_data", ":ref:`ref_flyteidl.core.LiteralMap`", "", "**Deprecated.** Raw output data produced by this execution. DEPRECATED. Use GetExecutionData to fetch output data instead." + "computed_inputs", ":ref:`ref_flyteidl.core.LiteralMap`", "", "**Deprecated.** Inputs computed and passed for execution. computed_inputs depends on inputs in ExecutionSpec, fixed and default inputs in launch plan" + "phase", ":ref:`ref_flyteidl.core.WorkflowExecution.Phase`", "", "Most recent recorded phase for the execution." + "started_at", ":ref:`ref_google.protobuf.Timestamp`", "", "Reported time at which the execution began running." + "duration", ":ref:`ref_google.protobuf.Duration`", "", "The amount of time the execution spent running." + "created_at", ":ref:`ref_google.protobuf.Timestamp`", "", "Reported time at which the execution was created." + "updated_at", ":ref:`ref_google.protobuf.Timestamp`", "", "Reported time at which the execution was last updated." + "notifications", ":ref:`ref_flyteidl.admin.Notification`", "repeated", "The notification settings to use after merging the CreateExecutionRequest and the launch plan notification settings. An execution launched with notifications will always prefer that definition to notifications defined statically in a launch plan." + "workflow_id", ":ref:`ref_flyteidl.core.Identifier`", "", "Identifies the workflow definition for this execution." + "state_change_details", ":ref:`ref_flyteidl.admin.ExecutionStateChangeDetails`", "", "Provides the details of the last stage change" + + + + + + + +.. _ref_flyteidl.admin.ExecutionCreateRequest: + +ExecutionCreateRequest +------------------------------------------------------------------ + +Request to launch an execution with the given project, domain and optionally-assigned name. + + + +.. csv-table:: ExecutionCreateRequest type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "project", ":ref:`ref_string`", "", "Name of the project the execution belongs to. +required" + "domain", ":ref:`ref_string`", "", "Name of the domain the execution belongs to. A domain can be considered as a subset within a specific project. +required" + "name", ":ref:`ref_string`", "", "User provided value for the resource. If none is provided the system will generate a unique string. +optional" + "spec", ":ref:`ref_flyteidl.admin.ExecutionSpec`", "", "Additional fields necessary to launch the execution. +optional" + "inputs", ":ref:`ref_flyteidl.core.LiteralMap`", "", "The inputs required to start the execution. All required inputs must be included in this map. If not required and not provided, defaults apply. +optional" + + + + + + + +.. _ref_flyteidl.admin.ExecutionCreateResponse: + +ExecutionCreateResponse +------------------------------------------------------------------ + +The unique identifier for a successfully created execution. +If the name was *not* specified in the create request, this identifier will include a generated name. + + + +.. csv-table:: ExecutionCreateResponse type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "id", ":ref:`ref_flyteidl.core.WorkflowExecutionIdentifier`", "", "" + + + + + + + +.. _ref_flyteidl.admin.ExecutionList: + +ExecutionList +------------------------------------------------------------------ + +Used as a response for request to list executions. +See :ref:`ref_flyteidl.admin.Execution` for more details + + + +.. csv-table:: ExecutionList type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "executions", ":ref:`ref_flyteidl.admin.Execution`", "repeated", "" + "token", ":ref:`ref_string`", "", "In the case of multiple pages of results, the server-provided token can be used to fetch the next page in a query. If there are no more results, this value will be empty." + + + + + + + +.. _ref_flyteidl.admin.ExecutionMetadata: + +ExecutionMetadata +------------------------------------------------------------------ + +Represents attributes about an execution which are not required to launch the execution but are useful to record. +These attributes are assigned at launch time and do not change. + + + +.. csv-table:: ExecutionMetadata type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "mode", ":ref:`ref_flyteidl.admin.ExecutionMetadata.ExecutionMode`", "", "" + "principal", ":ref:`ref_string`", "", "Identifier of the entity that triggered this execution. For systems using back-end authentication any value set here will be discarded in favor of the authenticated user context." + "nesting", ":ref:`ref_uint32`", "", "Indicates the nestedness of this execution. If a user launches a workflow execution, the default nesting is 0. If this execution further launches a workflow (child workflow), the nesting level is incremented by 0 => 1 Generally, if workflow at nesting level k launches a workflow then the child workflow will have nesting = k + 1." + "scheduled_at", ":ref:`ref_google.protobuf.Timestamp`", "", "For scheduled executions, the requested time for execution for this specific schedule invocation." + "parent_node_execution", ":ref:`ref_flyteidl.core.NodeExecutionIdentifier`", "", "Which subworkflow node (if any) launched this execution" + "reference_execution", ":ref:`ref_flyteidl.core.WorkflowExecutionIdentifier`", "", "Optional, a reference workflow execution related to this execution. In the case of a relaunch, this references the original workflow execution." + "system_metadata", ":ref:`ref_flyteidl.admin.SystemMetadata`", "", "Optional, platform-specific metadata about the execution. In this the future this may be gated behind an ACL or some sort of authorization." + + + + + + + +.. _ref_flyteidl.admin.ExecutionRecoverRequest: + +ExecutionRecoverRequest +------------------------------------------------------------------ + +Request to recover the referenced execution. + + + +.. csv-table:: ExecutionRecoverRequest type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "id", ":ref:`ref_flyteidl.core.WorkflowExecutionIdentifier`", "", "Identifier of the workflow execution to recover." + "name", ":ref:`ref_string`", "", "User provided value for the recovered execution. If none is provided the system will generate a unique string. +optional" + "metadata", ":ref:`ref_flyteidl.admin.ExecutionMetadata`", "", "Additional metadata which will be used to overwrite any metadata in the reference execution when triggering a recovery execution." + + + + + + + +.. _ref_flyteidl.admin.ExecutionRelaunchRequest: + +ExecutionRelaunchRequest +------------------------------------------------------------------ + +Request to relaunch the referenced execution. + + + +.. csv-table:: ExecutionRelaunchRequest type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "id", ":ref:`ref_flyteidl.core.WorkflowExecutionIdentifier`", "", "Identifier of the workflow execution to relaunch. +required" + "name", ":ref:`ref_string`", "", "User provided value for the relaunched execution. If none is provided the system will generate a unique string. +optional" + "overwrite_cache", ":ref:`ref_bool`", "", "Allows for all cached values of a workflow and its tasks to be overwritten for a single execution. If enabled, all calculations are performed even if cached results would be available, overwriting the stored data once execution finishes successfully." + + + + + + + +.. _ref_flyteidl.admin.ExecutionSpec: + +ExecutionSpec +------------------------------------------------------------------ + +An ExecutionSpec encompasses all data used to launch this execution. The Spec does not change over the lifetime +of an execution as it progresses across phase changes. + + + +.. csv-table:: ExecutionSpec type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "launch_plan", ":ref:`ref_flyteidl.core.Identifier`", "", "Launch plan to be executed" + "inputs", ":ref:`ref_flyteidl.core.LiteralMap`", "", "**Deprecated.** Input values to be passed for the execution" + "metadata", ":ref:`ref_flyteidl.admin.ExecutionMetadata`", "", "Metadata for the execution" + "notifications", ":ref:`ref_flyteidl.admin.NotificationList`", "", "List of notifications based on Execution status transitions When this list is not empty it is used rather than any notifications defined in the referenced launch plan. When this list is empty, the notifications defined for the launch plan will be applied." + "disable_all", ":ref:`ref_bool`", "", "This should be set to true if all notifications are intended to be disabled for this execution." + "labels", ":ref:`ref_flyteidl.admin.Labels`", "", "Labels to apply to the execution resource." + "annotations", ":ref:`ref_flyteidl.admin.Annotations`", "", "Annotations to apply to the execution resource." + "security_context", ":ref:`ref_flyteidl.core.SecurityContext`", "", "Optional: security context override to apply this execution." + "auth_role", ":ref:`ref_flyteidl.admin.AuthRole`", "", "**Deprecated.** Optional: auth override to apply this execution." + "quality_of_service", ":ref:`ref_flyteidl.core.QualityOfService`", "", "Indicates the runtime priority of the execution." + "max_parallelism", ":ref:`ref_int32`", "", "Controls the maximum number of task nodes that can be run in parallel for the entire workflow. This is useful to achieve fairness. Note: MapTasks are regarded as one unit, and parallelism/concurrency of MapTasks is independent from this." + "raw_output_data_config", ":ref:`ref_flyteidl.admin.RawOutputDataConfig`", "", "User setting to configure where to store offloaded data (i.e. Blobs, structured datasets, query data, etc.). This should be a prefix like s3://my-bucket/my-data" + "cluster_assignment", ":ref:`ref_flyteidl.admin.ClusterAssignment`", "", "Controls how to select an available cluster on which this execution should run." + "interruptible", ":ref:`ref_google.protobuf.BoolValue`", "", "Allows for the interruptible flag of a workflow to be overwritten for a single execution. Omitting this field uses the workflow's value as a default. As we need to distinguish between the field not being provided and its default value false, we have to use a wrapper around the bool field." + "overwrite_cache", ":ref:`ref_bool`", "", "Allows for all cached values of a workflow and its tasks to be overwritten for a single execution. If enabled, all calculations are performed even if cached results would be available, overwriting the stored data once execution finishes successfully." + + + + + + + +.. _ref_flyteidl.admin.ExecutionStateChangeDetails: + +ExecutionStateChangeDetails +------------------------------------------------------------------ + + + + + +.. csv-table:: ExecutionStateChangeDetails type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "state", ":ref:`ref_flyteidl.admin.ExecutionState`", "", "The state of the execution is used to control its visibility in the UI/CLI." + "occurred_at", ":ref:`ref_google.protobuf.Timestamp`", "", "This timestamp represents when the state changed." + "principal", ":ref:`ref_string`", "", "Identifies the entity (if any) responsible for causing the state change of the execution" + + + + + + + +.. _ref_flyteidl.admin.ExecutionTerminateRequest: + +ExecutionTerminateRequest +------------------------------------------------------------------ + +Request to terminate an in-progress execution. This action is irreversible. +If an execution is already terminated, this request will simply be a no-op. +This request will fail if it references a non-existent execution. +If the request succeeds the phase "ABORTED" will be recorded for the termination +with the optional cause added to the output_result. + + + +.. csv-table:: ExecutionTerminateRequest type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "id", ":ref:`ref_flyteidl.core.WorkflowExecutionIdentifier`", "", "Uniquely identifies the individual workflow execution to be terminated." + "cause", ":ref:`ref_string`", "", "Optional reason for aborting." + + + + + + + +.. _ref_flyteidl.admin.ExecutionTerminateResponse: + +ExecutionTerminateResponse +------------------------------------------------------------------ + +Purposefully empty, may be populated in the future. + + + + + + + + +.. _ref_flyteidl.admin.ExecutionUpdateRequest: + +ExecutionUpdateRequest +------------------------------------------------------------------ + + + + + +.. csv-table:: ExecutionUpdateRequest type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "id", ":ref:`ref_flyteidl.core.WorkflowExecutionIdentifier`", "", "Identifier of the execution to update" + "state", ":ref:`ref_flyteidl.admin.ExecutionState`", "", "State to set as the new value active/archive" + + + + + + + +.. _ref_flyteidl.admin.ExecutionUpdateResponse: + +ExecutionUpdateResponse +------------------------------------------------------------------ + + + + + + + + + + +.. _ref_flyteidl.admin.LiteralMapBlob: + +LiteralMapBlob +------------------------------------------------------------------ + +Input/output data can represented by actual values or a link to where values are stored + + + +.. csv-table:: LiteralMapBlob type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "values", ":ref:`ref_flyteidl.core.LiteralMap`", "", "**Deprecated.** Data in LiteralMap format" + "uri", ":ref:`ref_string`", "", "In the event that the map is too large, we return a uri to the data" + + + + + + + +.. _ref_flyteidl.admin.NotificationList: + +NotificationList +------------------------------------------------------------------ + + + + + +.. csv-table:: NotificationList type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "notifications", ":ref:`ref_flyteidl.admin.Notification`", "repeated", "" + + + + + + + +.. _ref_flyteidl.admin.SystemMetadata: + +SystemMetadata +------------------------------------------------------------------ + +Represents system, rather than user-facing, metadata about an execution. + + + +.. csv-table:: SystemMetadata type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "execution_cluster", ":ref:`ref_string`", "", "Which execution cluster this execution ran on." + + + + + + + +.. _ref_flyteidl.admin.WorkflowExecutionGetDataRequest: + +WorkflowExecutionGetDataRequest +------------------------------------------------------------------ + +Request structure to fetch inputs, output and other data produced by an execution. +By default this data is not returned inline in :ref:`ref_flyteidl.admin.WorkflowExecutionGetRequest` + + + +.. csv-table:: WorkflowExecutionGetDataRequest type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "id", ":ref:`ref_flyteidl.core.WorkflowExecutionIdentifier`", "", "The identifier of the execution for which to fetch inputs and outputs." + + + + + + + +.. _ref_flyteidl.admin.WorkflowExecutionGetDataResponse: + +WorkflowExecutionGetDataResponse +------------------------------------------------------------------ + +Response structure for WorkflowExecutionGetDataRequest which contains inputs and outputs for an execution. + + + +.. csv-table:: WorkflowExecutionGetDataResponse type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "outputs", ":ref:`ref_flyteidl.admin.UrlBlob`", "", "**Deprecated.** Signed url to fetch a core.LiteralMap of execution outputs. Deprecated: Please use full_outputs instead." + "inputs", ":ref:`ref_flyteidl.admin.UrlBlob`", "", "**Deprecated.** Signed url to fetch a core.LiteralMap of execution inputs. Deprecated: Please use full_inputs instead." + "full_inputs", ":ref:`ref_flyteidl.core.LiteralMap`", "", "Full_inputs will only be populated if they are under a configured size threshold." + "full_outputs", ":ref:`ref_flyteidl.core.LiteralMap`", "", "Full_outputs will only be populated if they are under a configured size threshold." + + + + + + + +.. _ref_flyteidl.admin.WorkflowExecutionGetRequest: + +WorkflowExecutionGetRequest +------------------------------------------------------------------ + +A message used to fetch a single workflow execution entity. +See :ref:`ref_flyteidl.admin.Execution` for more details + + + +.. csv-table:: WorkflowExecutionGetRequest type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "id", ":ref:`ref_flyteidl.core.WorkflowExecutionIdentifier`", "", "Uniquely identifies an individual workflow execution." + + + + + + +.. + end messages + + + +.. _ref_flyteidl.admin.ExecutionMetadata.ExecutionMode: + +ExecutionMetadata.ExecutionMode +------------------------------------------------------------------ + +The method by which this execution was launched. + +.. csv-table:: Enum ExecutionMetadata.ExecutionMode values + :header: "Name", "Number", "Description" + :widths: auto + + "MANUAL", "0", "The default execution mode, MANUAL implies that an execution was launched by an individual." + "SCHEDULED", "1", "A schedule triggered this execution launch." + "SYSTEM", "2", "A system process was responsible for launching this execution rather an individual." + "RELAUNCH", "3", "This execution was launched with identical inputs as a previous execution." + "CHILD_WORKFLOW", "4", "This execution was triggered by another execution." + "RECOVERED", "5", "This execution was recovered from another execution." + + + +.. _ref_flyteidl.admin.ExecutionState: + +ExecutionState +------------------------------------------------------------------ + +The state of the execution is used to control its visibility in the UI/CLI. + +.. csv-table:: Enum ExecutionState values + :header: "Name", "Number", "Description" + :widths: auto + + "EXECUTION_ACTIVE", "0", "By default, all executions are considered active." + "EXECUTION_ARCHIVED", "1", "Archived executions are no longer visible in the UI." + + +.. + end enums + + +.. + end HasExtensions + + +.. + end services + + + + +.. _ref_flyteidl/admin/launch_plan.proto: + +flyteidl/admin/launch_plan.proto +================================================================== + + + + + +.. _ref_flyteidl.admin.ActiveLaunchPlanListRequest: + +ActiveLaunchPlanListRequest +------------------------------------------------------------------ + +Represents a request structure to list active launch plans within a project/domain. +See :ref:`ref_flyteidl.admin.LaunchPlan` for more details + + + +.. csv-table:: ActiveLaunchPlanListRequest type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "project", ":ref:`ref_string`", "", "Name of the project that contains the identifiers. +required." + "domain", ":ref:`ref_string`", "", "Name of the domain the identifiers belongs to within the project. +required." + "limit", ":ref:`ref_uint32`", "", "Indicates the number of resources to be returned. +required." + "token", ":ref:`ref_string`", "", "In the case of multiple pages of results, the server-provided token can be used to fetch the next page in a query. +optional" + "sort_by", ":ref:`ref_flyteidl.admin.Sort`", "", "Sort ordering. +optional" + + + + + + + +.. _ref_flyteidl.admin.ActiveLaunchPlanRequest: + +ActiveLaunchPlanRequest +------------------------------------------------------------------ + +Represents a request struct for finding an active launch plan for a given NamedEntityIdentifier +See :ref:`ref_flyteidl.admin.LaunchPlan` for more details + + + +.. csv-table:: ActiveLaunchPlanRequest type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "id", ":ref:`ref_flyteidl.admin.NamedEntityIdentifier`", "", "+required." + + + + + + + +.. _ref_flyteidl.admin.Auth: + +Auth +------------------------------------------------------------------ + +Defines permissions associated with executions created by this launch plan spec. +Use either of these roles when they have permissions required by your workflow execution. +Deprecated. + + + +.. csv-table:: Auth type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "assumable_iam_role", ":ref:`ref_string`", "", "Defines an optional iam role which will be used for tasks run in executions created with this launch plan." + "kubernetes_service_account", ":ref:`ref_string`", "", "Defines an optional kubernetes service account which will be used for tasks run in executions created with this launch plan." + + + + + + + +.. _ref_flyteidl.admin.LaunchPlan: + +LaunchPlan +------------------------------------------------------------------ + +A LaunchPlan provides the capability to templatize workflow executions. +Launch plans simplify associating one or more schedules, inputs and notifications with your workflows. +Launch plans can be shared and used to trigger executions with predefined inputs even when a workflow +definition doesn't necessarily have a default value for said input. + + + +.. csv-table:: LaunchPlan type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "id", ":ref:`ref_flyteidl.core.Identifier`", "", "Uniquely identifies a launch plan entity." + "spec", ":ref:`ref_flyteidl.admin.LaunchPlanSpec`", "", "User-provided launch plan details, including reference workflow, inputs and other metadata." + "closure", ":ref:`ref_flyteidl.admin.LaunchPlanClosure`", "", "Values computed by the flyte platform after launch plan registration." + + + + + + + +.. _ref_flyteidl.admin.LaunchPlanClosure: + +LaunchPlanClosure +------------------------------------------------------------------ + +Values computed by the flyte platform after launch plan registration. +These include expected_inputs required to be present in a CreateExecutionRequest +to launch the reference workflow as well timestamp values associated with the launch plan. + + + +.. csv-table:: LaunchPlanClosure type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "state", ":ref:`ref_flyteidl.admin.LaunchPlanState`", "", "Indicate the Launch plan state." + "expected_inputs", ":ref:`ref_flyteidl.core.ParameterMap`", "", "Indicates the set of inputs expected when creating an execution with the Launch plan" + "expected_outputs", ":ref:`ref_flyteidl.core.VariableMap`", "", "Indicates the set of outputs expected to be produced by creating an execution with the Launch plan" + "created_at", ":ref:`ref_google.protobuf.Timestamp`", "", "Time at which the launch plan was created." + "updated_at", ":ref:`ref_google.protobuf.Timestamp`", "", "Time at which the launch plan was last updated." + + + + + + + +.. _ref_flyteidl.admin.LaunchPlanCreateRequest: + +LaunchPlanCreateRequest +------------------------------------------------------------------ + +Request to register a launch plan. The included LaunchPlanSpec may have a complete or incomplete set of inputs required +to launch a workflow execution. By default all launch plans are registered in state INACTIVE. If you wish to +set the state to ACTIVE, you must submit a LaunchPlanUpdateRequest, after you have successfully created a launch plan. + + + +.. csv-table:: LaunchPlanCreateRequest type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "id", ":ref:`ref_flyteidl.core.Identifier`", "", "Uniquely identifies a launch plan entity." + "spec", ":ref:`ref_flyteidl.admin.LaunchPlanSpec`", "", "User-provided launch plan details, including reference workflow, inputs and other metadata." + + + + + + + +.. _ref_flyteidl.admin.LaunchPlanCreateResponse: + +LaunchPlanCreateResponse +------------------------------------------------------------------ + +Purposefully empty, may be populated in the future. + + + + + + + + +.. _ref_flyteidl.admin.LaunchPlanList: + +LaunchPlanList +------------------------------------------------------------------ + +Response object for list launch plan requests. +See :ref:`ref_flyteidl.admin.LaunchPlan` for more details + + + +.. csv-table:: LaunchPlanList type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "launch_plans", ":ref:`ref_flyteidl.admin.LaunchPlan`", "repeated", "" + "token", ":ref:`ref_string`", "", "In the case of multiple pages of results, the server-provided token can be used to fetch the next page in a query. If there are no more results, this value will be empty." + + + + + + + +.. _ref_flyteidl.admin.LaunchPlanMetadata: + +LaunchPlanMetadata +------------------------------------------------------------------ + +Additional launch plan attributes included in the LaunchPlanSpec not strictly required to launch +the reference workflow. + + + +.. csv-table:: LaunchPlanMetadata type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "schedule", ":ref:`ref_flyteidl.admin.Schedule`", "", "Schedule to execute the Launch Plan" + "notifications", ":ref:`ref_flyteidl.admin.Notification`", "repeated", "List of notifications based on Execution status transitions" + + + + + + + +.. _ref_flyteidl.admin.LaunchPlanSpec: + +LaunchPlanSpec +------------------------------------------------------------------ + +User-provided launch plan definition and configuration values. + + + +.. csv-table:: LaunchPlanSpec type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "workflow_id", ":ref:`ref_flyteidl.core.Identifier`", "", "Reference to the Workflow template that the launch plan references" + "entity_metadata", ":ref:`ref_flyteidl.admin.LaunchPlanMetadata`", "", "Metadata for the Launch Plan" + "default_inputs", ":ref:`ref_flyteidl.core.ParameterMap`", "", "Input values to be passed for the execution. These can be overridden when an execution is created with this launch plan." + "fixed_inputs", ":ref:`ref_flyteidl.core.LiteralMap`", "", "Fixed, non-overridable inputs for the Launch Plan. These can not be overridden when an execution is created with this launch plan." + "role", ":ref:`ref_string`", "", "**Deprecated.** String to indicate the role to use to execute the workflow underneath" + "labels", ":ref:`ref_flyteidl.admin.Labels`", "", "Custom labels to be applied to the execution resource." + "annotations", ":ref:`ref_flyteidl.admin.Annotations`", "", "Custom annotations to be applied to the execution resource." + "auth", ":ref:`ref_flyteidl.admin.Auth`", "", "**Deprecated.** Indicates the permission associated with workflow executions triggered with this launch plan." + "auth_role", ":ref:`ref_flyteidl.admin.AuthRole`", "", "**Deprecated.** " + "security_context", ":ref:`ref_flyteidl.core.SecurityContext`", "", "Indicates security context for permissions triggered with this launch plan" + "quality_of_service", ":ref:`ref_flyteidl.core.QualityOfService`", "", "Indicates the runtime priority of the execution." + "raw_output_data_config", ":ref:`ref_flyteidl.admin.RawOutputDataConfig`", "", "Encapsulates user settings pertaining to offloaded data (i.e. Blobs, Schema, query data, etc.)." + "max_parallelism", ":ref:`ref_int32`", "", "Controls the maximum number of tasknodes that can be run in parallel for the entire workflow. This is useful to achieve fairness. Note: MapTasks are regarded as one unit, and parallelism/concurrency of MapTasks is independent from this." + "interruptible", ":ref:`ref_google.protobuf.BoolValue`", "", "Allows for the interruptible flag of a workflow to be overwritten for a single execution. Omitting this field uses the workflow's value as a default. As we need to distinguish between the field not being provided and its default value false, we have to use a wrapper around the bool field." + "overwrite_cache", ":ref:`ref_bool`", "", "Allows for all cached values of a workflow and its tasks to be overwritten for a single execution. If enabled, all calculations are performed even if cached results would be available, overwriting the stored data once execution finishes successfully." + + + + + + + +.. _ref_flyteidl.admin.LaunchPlanUpdateRequest: + +LaunchPlanUpdateRequest +------------------------------------------------------------------ + +Request to set the referenced launch plan state to the configured value. +See :ref:`ref_flyteidl.admin.LaunchPlan` for more details + + + +.. csv-table:: LaunchPlanUpdateRequest type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "id", ":ref:`ref_flyteidl.core.Identifier`", "", "Identifier of launch plan for which to change state. +required." + "state", ":ref:`ref_flyteidl.admin.LaunchPlanState`", "", "Desired state to apply to the launch plan. +required." + + + + + + + +.. _ref_flyteidl.admin.LaunchPlanUpdateResponse: + +LaunchPlanUpdateResponse +------------------------------------------------------------------ + +Purposefully empty, may be populated in the future. + + + + + + + +.. + end messages + + + +.. _ref_flyteidl.admin.LaunchPlanState: + +LaunchPlanState +------------------------------------------------------------------ + +By default any launch plan regardless of state can be used to launch a workflow execution. +However, at most one version of a launch plan +(e.g. a NamedEntityIdentifier set of shared project, domain and name values) can be +active at a time in regards to *schedules*. That is, at most one schedule in a NamedEntityIdentifier +group will be observed and trigger executions at a defined cadence. + +.. csv-table:: Enum LaunchPlanState values + :header: "Name", "Number", "Description" + :widths: auto + + "INACTIVE", "0", "" + "ACTIVE", "1", "" + + +.. + end enums + + +.. + end HasExtensions + + +.. + end services + + + + +.. _ref_flyteidl/admin/matchable_resource.proto: + +flyteidl/admin/matchable_resource.proto +================================================================== + + + + + +.. _ref_flyteidl.admin.ClusterResourceAttributes: + +ClusterResourceAttributes +------------------------------------------------------------------ + + + + + +.. csv-table:: ClusterResourceAttributes type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "attributes", ":ref:`ref_flyteidl.admin.ClusterResourceAttributes.AttributesEntry`", "repeated", "Custom resource attributes which will be applied in cluster resource creation (e.g. quotas). Map keys are the *case-sensitive* names of variables in templatized resource files. Map values should be the custom values which get substituted during resource creation." + + + + + + + +.. _ref_flyteidl.admin.ClusterResourceAttributes.AttributesEntry: + +ClusterResourceAttributes.AttributesEntry +------------------------------------------------------------------ + + + + + +.. csv-table:: ClusterResourceAttributes.AttributesEntry type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "key", ":ref:`ref_string`", "", "" + "value", ":ref:`ref_string`", "", "" + + + + + + + +.. _ref_flyteidl.admin.ExecutionClusterLabel: + +ExecutionClusterLabel +------------------------------------------------------------------ + + + + + +.. csv-table:: ExecutionClusterLabel type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "value", ":ref:`ref_string`", "", "Label value to determine where the execution will be run" + + + + + + + +.. _ref_flyteidl.admin.ExecutionQueueAttributes: + +ExecutionQueueAttributes +------------------------------------------------------------------ + + + + + +.. csv-table:: ExecutionQueueAttributes type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "tags", ":ref:`ref_string`", "repeated", "Tags used for assigning execution queues for tasks defined within this project." + + + + + + + +.. _ref_flyteidl.admin.ListMatchableAttributesRequest: + +ListMatchableAttributesRequest +------------------------------------------------------------------ + +Request all matching resource attributes for a resource type. +See :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for more details + + + +.. csv-table:: ListMatchableAttributesRequest type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "resource_type", ":ref:`ref_flyteidl.admin.MatchableResource`", "", "+required" + + + + + + + +.. _ref_flyteidl.admin.ListMatchableAttributesResponse: + +ListMatchableAttributesResponse +------------------------------------------------------------------ + +Response for a request for all matching resource attributes for a resource type. +See :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for more details + + + +.. csv-table:: ListMatchableAttributesResponse type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "configurations", ":ref:`ref_flyteidl.admin.MatchableAttributesConfiguration`", "repeated", "" + + + + + + + +.. _ref_flyteidl.admin.MatchableAttributesConfiguration: + +MatchableAttributesConfiguration +------------------------------------------------------------------ + +Represents a custom set of attributes applied for either a domain; a domain and project; or +domain, project and workflow name. +These are used to override system level defaults for kubernetes cluster resource management, +default execution values, and more all across different levels of specificity. + + + +.. csv-table:: MatchableAttributesConfiguration type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "attributes", ":ref:`ref_flyteidl.admin.MatchingAttributes`", "", "" + "domain", ":ref:`ref_string`", "", "" + "project", ":ref:`ref_string`", "", "" + "workflow", ":ref:`ref_string`", "", "" + "launch_plan", ":ref:`ref_string`", "", "" + + + + + + + +.. _ref_flyteidl.admin.MatchingAttributes: + +MatchingAttributes +------------------------------------------------------------------ + +Generic container for encapsulating all types of the above attributes messages. + + + +.. csv-table:: MatchingAttributes type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "task_resource_attributes", ":ref:`ref_flyteidl.admin.TaskResourceAttributes`", "", "" + "cluster_resource_attributes", ":ref:`ref_flyteidl.admin.ClusterResourceAttributes`", "", "" + "execution_queue_attributes", ":ref:`ref_flyteidl.admin.ExecutionQueueAttributes`", "", "" + "execution_cluster_label", ":ref:`ref_flyteidl.admin.ExecutionClusterLabel`", "", "" + "quality_of_service", ":ref:`ref_flyteidl.core.QualityOfService`", "", "" + "plugin_overrides", ":ref:`ref_flyteidl.admin.PluginOverrides`", "", "" + "workflow_execution_config", ":ref:`ref_flyteidl.admin.WorkflowExecutionConfig`", "", "" + "cluster_assignment", ":ref:`ref_flyteidl.admin.ClusterAssignment`", "", "" + + + + + + + +.. _ref_flyteidl.admin.PluginOverride: + +PluginOverride +------------------------------------------------------------------ + +This MatchableAttribute configures selecting alternate plugin implementations for a given task type. +In addition to an override implementation a selection of fallbacks can be provided or other modes +for handling cases where the desired plugin override is not enabled in a given Flyte deployment. + + + +.. csv-table:: PluginOverride type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "task_type", ":ref:`ref_string`", "", "A predefined yet extensible Task type identifier." + "plugin_id", ":ref:`ref_string`", "repeated", "A set of plugin ids which should handle tasks of this type instead of the default registered plugin. The list will be tried in order until a plugin is found with that id." + "missing_plugin_behavior", ":ref:`ref_flyteidl.admin.PluginOverride.MissingPluginBehavior`", "", "Defines the behavior when no plugin from the plugin_id list is not found." + + + + + + + +.. _ref_flyteidl.admin.PluginOverrides: + +PluginOverrides +------------------------------------------------------------------ + + + + + +.. csv-table:: PluginOverrides type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "overrides", ":ref:`ref_flyteidl.admin.PluginOverride`", "repeated", "" + + + + + + + +.. _ref_flyteidl.admin.TaskResourceAttributes: + +TaskResourceAttributes +------------------------------------------------------------------ + +Defines task resource defaults and limits that will be applied at task registration. + + + +.. csv-table:: TaskResourceAttributes type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "defaults", ":ref:`ref_flyteidl.admin.TaskResourceSpec`", "", "" + "limits", ":ref:`ref_flyteidl.admin.TaskResourceSpec`", "", "" + + + + + + + +.. _ref_flyteidl.admin.TaskResourceSpec: + +TaskResourceSpec +------------------------------------------------------------------ + +Defines a set of overridable task resource attributes set during task registration. + + + +.. csv-table:: TaskResourceSpec type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "cpu", ":ref:`ref_string`", "", "" + "gpu", ":ref:`ref_string`", "", "" + "memory", ":ref:`ref_string`", "", "" + "storage", ":ref:`ref_string`", "", "" + "ephemeral_storage", ":ref:`ref_string`", "", "" + + + + + + + +.. _ref_flyteidl.admin.WorkflowExecutionConfig: + +WorkflowExecutionConfig +------------------------------------------------------------------ + +Adds defaults for customizable workflow-execution specifications and overrides. + + + +.. csv-table:: WorkflowExecutionConfig type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "max_parallelism", ":ref:`ref_int32`", "", "Can be used to control the number of parallel nodes to run within the workflow. This is useful to achieve fairness." + "security_context", ":ref:`ref_flyteidl.core.SecurityContext`", "", "Indicates security context permissions for executions triggered with this matchable attribute." + "raw_output_data_config", ":ref:`ref_flyteidl.admin.RawOutputDataConfig`", "", "Encapsulates user settings pertaining to offloaded data (i.e. Blobs, Schema, query data, etc.)." + "labels", ":ref:`ref_flyteidl.admin.Labels`", "", "Custom labels to be applied to a triggered execution resource." + "annotations", ":ref:`ref_flyteidl.admin.Annotations`", "", "Custom annotations to be applied to a triggered execution resource." + "interruptible", ":ref:`ref_google.protobuf.BoolValue`", "", "Allows for the interruptible flag of a workflow to be overwritten for a single execution. Omitting this field uses the workflow's value as a default. As we need to distinguish between the field not being provided and its default value false, we have to use a wrapper around the bool field." + "overwrite_cache", ":ref:`ref_bool`", "", "Allows for all cached values of a workflow and its tasks to be overwritten for a single execution. If enabled, all calculations are performed even if cached results would be available, overwriting the stored data once execution finishes successfully." + + + + + + +.. + end messages + + + +.. _ref_flyteidl.admin.MatchableResource: + +MatchableResource +------------------------------------------------------------------ + +Defines a resource that can be configured by customizable Project-, ProjectDomain- or WorkflowAttributes +based on matching tags. + +.. csv-table:: Enum MatchableResource values + :header: "Name", "Number", "Description" + :widths: auto + + "TASK_RESOURCE", "0", "Applies to customizable task resource requests and limits." + "CLUSTER_RESOURCE", "1", "Applies to configuring templated kubernetes cluster resources." + "EXECUTION_QUEUE", "2", "Configures task and dynamic task execution queue assignment." + "EXECUTION_CLUSTER_LABEL", "3", "Configures the K8s cluster label to be used for execution to be run" + "QUALITY_OF_SERVICE_SPECIFICATION", "4", "Configures default quality of service when undefined in an execution spec." + "PLUGIN_OVERRIDE", "5", "Selects configurable plugin implementation behavior for a given task type." + "WORKFLOW_EXECUTION_CONFIG", "6", "Adds defaults for customizable workflow-execution specifications and overrides." + "CLUSTER_ASSIGNMENT", "7", "Controls how to select an available cluster on which this execution should run." + + + +.. _ref_flyteidl.admin.PluginOverride.MissingPluginBehavior: + +PluginOverride.MissingPluginBehavior +------------------------------------------------------------------ + + + +.. csv-table:: Enum PluginOverride.MissingPluginBehavior values + :header: "Name", "Number", "Description" + :widths: auto + + "FAIL", "0", "By default, if this plugin is not enabled for a Flyte deployment then execution will fail." + "USE_DEFAULT", "1", "Uses the system-configured default implementation." + + +.. + end enums + + +.. + end HasExtensions + + +.. + end services + + + + +.. _ref_flyteidl/admin/node_execution.proto: + +flyteidl/admin/node_execution.proto +================================================================== + + + + + +.. _ref_flyteidl.admin.DynamicWorkflowNodeMetadata: + +DynamicWorkflowNodeMetadata +------------------------------------------------------------------ + +For dynamic workflow nodes we capture information about the dynamic workflow definition that gets generated. + + + +.. csv-table:: DynamicWorkflowNodeMetadata type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "id", ":ref:`ref_flyteidl.core.Identifier`", "", "id represents the unique identifier of the workflow." + "compiled_workflow", ":ref:`ref_flyteidl.core.CompiledWorkflowClosure`", "", "Represents the compiled representation of the embedded dynamic workflow." + + + + + + + +.. _ref_flyteidl.admin.NodeExecution: + +NodeExecution +------------------------------------------------------------------ + +Encapsulates all details for a single node execution entity. +A node represents a component in the overall workflow graph. A node launch a task, multiple tasks, an entire nested +sub-workflow, or even a separate child-workflow execution. +The same task can be called repeatedly in a single workflow but each node is unique. + + + +.. csv-table:: NodeExecution type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "id", ":ref:`ref_flyteidl.core.NodeExecutionIdentifier`", "", "Uniquely identifies an individual node execution." + "input_uri", ":ref:`ref_string`", "", "Path to remote data store where input blob is stored." + "closure", ":ref:`ref_flyteidl.admin.NodeExecutionClosure`", "", "Computed results associated with this node execution." + "metadata", ":ref:`ref_flyteidl.admin.NodeExecutionMetaData`", "", "Metadata for Node Execution" + + + + + + + +.. _ref_flyteidl.admin.NodeExecutionClosure: + +NodeExecutionClosure +------------------------------------------------------------------ + +Container for node execution details and results. + + + +.. csv-table:: NodeExecutionClosure type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "output_uri", ":ref:`ref_string`", "", "**Deprecated.** Links to a remotely stored, serialized core.LiteralMap of node execution outputs. DEPRECATED. Use GetNodeExecutionData to fetch output data instead." + "error", ":ref:`ref_flyteidl.core.ExecutionError`", "", "Error information for the Node" + "output_data", ":ref:`ref_flyteidl.core.LiteralMap`", "", "**Deprecated.** Raw output data produced by this node execution. DEPRECATED. Use GetNodeExecutionData to fetch output data instead." + "phase", ":ref:`ref_flyteidl.core.NodeExecution.Phase`", "", "The last recorded phase for this node execution." + "started_at", ":ref:`ref_google.protobuf.Timestamp`", "", "Time at which the node execution began running." + "duration", ":ref:`ref_google.protobuf.Duration`", "", "The amount of time the node execution spent running." + "created_at", ":ref:`ref_google.protobuf.Timestamp`", "", "Time at which the node execution was created." + "updated_at", ":ref:`ref_google.protobuf.Timestamp`", "", "Time at which the node execution was last updated." + "workflow_node_metadata", ":ref:`ref_flyteidl.admin.WorkflowNodeMetadata`", "", "" + "task_node_metadata", ":ref:`ref_flyteidl.admin.TaskNodeMetadata`", "", "" + "deck_uri", ":ref:`ref_string`", "", "String location uniquely identifying where the deck HTML file is. NativeUrl specifies the url in the format of the configured storage provider (e.g. s3://my-bucket/randomstring/suffix.tar)" + + + + + + + +.. _ref_flyteidl.admin.NodeExecutionForTaskListRequest: + +NodeExecutionForTaskListRequest +------------------------------------------------------------------ + +Represents a request structure to retrieve a list of node execution entities launched by a specific task. +This can arise when a task yields a subworkflow. + + + +.. csv-table:: NodeExecutionForTaskListRequest type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "task_execution_id", ":ref:`ref_flyteidl.core.TaskExecutionIdentifier`", "", "Indicates the node execution to filter by. +required" + "limit", ":ref:`ref_uint32`", "", "Indicates the number of resources to be returned. +required" + "token", ":ref:`ref_string`", "", "In the case of multiple pages of results, the, server-provided token can be used to fetch the next page in a query. +optional" + "filters", ":ref:`ref_string`", "", "Indicates a list of filters passed as string. More info on constructing filters : +optional" + "sort_by", ":ref:`ref_flyteidl.admin.Sort`", "", "Sort ordering. +optional" + + + + + + + +.. _ref_flyteidl.admin.NodeExecutionGetDataRequest: + +NodeExecutionGetDataRequest +------------------------------------------------------------------ + +Request structure to fetch inputs and output for a node execution. +By default, these are not returned in :ref:`ref_flyteidl.admin.NodeExecutionGetRequest` + + + +.. csv-table:: NodeExecutionGetDataRequest type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "id", ":ref:`ref_flyteidl.core.NodeExecutionIdentifier`", "", "The identifier of the node execution for which to fetch inputs and outputs." + + + + + + + +.. _ref_flyteidl.admin.NodeExecutionGetDataResponse: + +NodeExecutionGetDataResponse +------------------------------------------------------------------ + +Response structure for NodeExecutionGetDataRequest which contains inputs and outputs for a node execution. + + + +.. csv-table:: NodeExecutionGetDataResponse type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "inputs", ":ref:`ref_flyteidl.admin.UrlBlob`", "", "**Deprecated.** Signed url to fetch a core.LiteralMap of node execution inputs. Deprecated: Please use full_inputs instead." + "outputs", ":ref:`ref_flyteidl.admin.UrlBlob`", "", "**Deprecated.** Signed url to fetch a core.LiteralMap of node execution outputs. Deprecated: Please use full_outputs instead." + "full_inputs", ":ref:`ref_flyteidl.core.LiteralMap`", "", "Full_inputs will only be populated if they are under a configured size threshold." + "full_outputs", ":ref:`ref_flyteidl.core.LiteralMap`", "", "Full_outputs will only be populated if they are under a configured size threshold." + "dynamic_workflow", ":ref:`ref_flyteidl.admin.DynamicWorkflowNodeMetadata`", "", "Optional Workflow closure for a dynamically generated workflow, in the case this node yields a dynamic workflow we return its structure here." + + + + + + + +.. _ref_flyteidl.admin.NodeExecutionGetRequest: + +NodeExecutionGetRequest +------------------------------------------------------------------ + +A message used to fetch a single node execution entity. +See :ref:`ref_flyteidl.admin.NodeExecution` for more details + + + +.. csv-table:: NodeExecutionGetRequest type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "id", ":ref:`ref_flyteidl.core.NodeExecutionIdentifier`", "", "Uniquely identifies an individual node execution. +required" + + + + + + + +.. _ref_flyteidl.admin.NodeExecutionList: + +NodeExecutionList +------------------------------------------------------------------ + +Request structure to retrieve a list of node execution entities. +See :ref:`ref_flyteidl.admin.NodeExecution` for more details + + + +.. csv-table:: NodeExecutionList type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "node_executions", ":ref:`ref_flyteidl.admin.NodeExecution`", "repeated", "" + "token", ":ref:`ref_string`", "", "In the case of multiple pages of results, the server-provided token can be used to fetch the next page in a query. If there are no more results, this value will be empty." + + + + + + + +.. _ref_flyteidl.admin.NodeExecutionListRequest: + +NodeExecutionListRequest +------------------------------------------------------------------ + +Represents a request structure to retrieve a list of node execution entities. +See :ref:`ref_flyteidl.admin.NodeExecution` for more details + + + +.. csv-table:: NodeExecutionListRequest type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "workflow_execution_id", ":ref:`ref_flyteidl.core.WorkflowExecutionIdentifier`", "", "Indicates the workflow execution to filter by. +required" + "limit", ":ref:`ref_uint32`", "", "Indicates the number of resources to be returned. +required" + "token", ":ref:`ref_string`", "", "" + "filters", ":ref:`ref_string`", "", "Indicates a list of filters passed as string. More info on constructing filters : +optional" + "sort_by", ":ref:`ref_flyteidl.admin.Sort`", "", "Sort ordering. +optional" + "unique_parent_id", ":ref:`ref_string`", "", "Unique identifier of the parent node in the execution +optional" + + + + + + + +.. _ref_flyteidl.admin.NodeExecutionMetaData: + +NodeExecutionMetaData +------------------------------------------------------------------ + +Represents additional attributes related to a Node Execution + + + +.. csv-table:: NodeExecutionMetaData type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "retry_group", ":ref:`ref_string`", "", "Node executions are grouped depending on retries of the parent Retry group is unique within the context of a parent node." + "is_parent_node", ":ref:`ref_bool`", "", "Boolean flag indicating if the node has child nodes under it This can be true when a node contains a dynamic workflow which then produces child nodes." + "spec_node_id", ":ref:`ref_string`", "", "Node id of the node in the original workflow This maps to value of WorkflowTemplate.nodes[X].id" + "is_dynamic", ":ref:`ref_bool`", "", "Boolean flag indicating if the node has contains a dynamic workflow which then produces child nodes. This is to distinguish between subworkflows and dynamic workflows which can both have is_parent_node as true." + + + + + + + +.. _ref_flyteidl.admin.TaskNodeMetadata: + +TaskNodeMetadata +------------------------------------------------------------------ + +Metadata for the case in which the node is a TaskNode + + + +.. csv-table:: TaskNodeMetadata type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "cache_status", ":ref:`ref_flyteidl.core.CatalogCacheStatus`", "", "Captures the status of caching for this execution." + "catalog_key", ":ref:`ref_flyteidl.core.CatalogMetadata`", "", "This structure carries the catalog artifact information" + "checkpoint_uri", ":ref:`ref_string`", "", "The latest checkpoint location" + + + + + + + +.. _ref_flyteidl.admin.WorkflowNodeMetadata: + +WorkflowNodeMetadata +------------------------------------------------------------------ + +Metadata for a WorkflowNode + + + +.. csv-table:: WorkflowNodeMetadata type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "executionId", ":ref:`ref_flyteidl.core.WorkflowExecutionIdentifier`", "", "The identifier for a workflow execution launched by a node." + + + + + + +.. + end messages + + +.. + end enums + + +.. + end HasExtensions + + +.. + end services + + + + +.. _ref_flyteidl/admin/notification.proto: + +flyteidl/admin/notification.proto +================================================================== + + + + + +.. _ref_flyteidl.admin.EmailMessage: + +EmailMessage +------------------------------------------------------------------ + +Represents the Email object that is sent to a publisher/subscriber +to forward the notification. +Note: This is internal to Admin and doesn't need to be exposed to other components. + + + +.. csv-table:: EmailMessage type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "recipients_email", ":ref:`ref_string`", "repeated", "The list of email addresses to receive an email with the content populated in the other fields. Currently, each email recipient will receive its own email. This populates the TO field." + "sender_email", ":ref:`ref_string`", "", "The email of the sender. This populates the FROM field." + "subject_line", ":ref:`ref_string`", "", "The content of the subject line. This populates the SUBJECT field." + "body", ":ref:`ref_string`", "", "The content of the email body. This populates the BODY field." + + + + + + +.. + end messages + + +.. + end enums + + +.. + end HasExtensions + + +.. + end services + + + + +.. _ref_flyteidl/admin/project.proto: + +flyteidl/admin/project.proto +================================================================== + + + + + +.. _ref_flyteidl.admin.Domain: + +Domain +------------------------------------------------------------------ + +Namespace within a project commonly used to differentiate between different service instances. +e.g. "production", "development", etc. + + + +.. csv-table:: Domain type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "id", ":ref:`ref_string`", "", "Globally unique domain name." + "name", ":ref:`ref_string`", "", "Display name." + + + + + + + +.. _ref_flyteidl.admin.Project: + +Project +------------------------------------------------------------------ + +Top-level namespace used to classify different entities like workflows and executions. + + + +.. csv-table:: Project type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "id", ":ref:`ref_string`", "", "Globally unique project name." + "name", ":ref:`ref_string`", "", "Display name." + "domains", ":ref:`ref_flyteidl.admin.Domain`", "repeated", "" + "description", ":ref:`ref_string`", "", "" + "labels", ":ref:`ref_flyteidl.admin.Labels`", "", "Leverage Labels from flyteidl.admin.common.proto to tag projects with ownership information." + "state", ":ref:`ref_flyteidl.admin.Project.ProjectState`", "", "" + + + + + + + +.. _ref_flyteidl.admin.ProjectListRequest: + +ProjectListRequest +------------------------------------------------------------------ + +Request to retrieve a list of projects matching specified filters. +See :ref:`ref_flyteidl.admin.Project` for more details + + + +.. csv-table:: ProjectListRequest type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "limit", ":ref:`ref_uint32`", "", "Indicates the number of projects to be returned. +required" + "token", ":ref:`ref_string`", "", "In the case of multiple pages of results, this server-provided token can be used to fetch the next page in a query. +optional" + "filters", ":ref:`ref_string`", "", "Indicates a list of filters passed as string. More info on constructing filters : +optional" + "sort_by", ":ref:`ref_flyteidl.admin.Sort`", "", "Sort ordering. +optional" + + + + + + + +.. _ref_flyteidl.admin.ProjectRegisterRequest: + +ProjectRegisterRequest +------------------------------------------------------------------ + +Adds a new user-project within the Flyte deployment. +See :ref:`ref_flyteidl.admin.Project` for more details + + + +.. csv-table:: ProjectRegisterRequest type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "project", ":ref:`ref_flyteidl.admin.Project`", "", "+required" + + + + + + + +.. _ref_flyteidl.admin.ProjectRegisterResponse: + +ProjectRegisterResponse +------------------------------------------------------------------ + +Purposefully empty, may be updated in the future. + + + + + + + + +.. _ref_flyteidl.admin.ProjectUpdateResponse: + +ProjectUpdateResponse +------------------------------------------------------------------ + +Purposefully empty, may be updated in the future. + + + + + + + + +.. _ref_flyteidl.admin.Projects: + +Projects +------------------------------------------------------------------ + +Represents a list of projects. +See :ref:`ref_flyteidl.admin.Project` for more details + + + +.. csv-table:: Projects type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "projects", ":ref:`ref_flyteidl.admin.Project`", "repeated", "" + "token", ":ref:`ref_string`", "", "In the case of multiple pages of results, the server-provided token can be used to fetch the next page in a query. If there are no more results, this value will be empty." + + + + + + +.. + end messages + + + +.. _ref_flyteidl.admin.Project.ProjectState: + +Project.ProjectState +------------------------------------------------------------------ + +The state of the project is used to control its visibility in the UI and validity. + +.. csv-table:: Enum Project.ProjectState values + :header: "Name", "Number", "Description" + :widths: auto + + "ACTIVE", "0", "By default, all projects are considered active." + "ARCHIVED", "1", "Archived projects are no longer visible in the UI and no longer valid." + "SYSTEM_GENERATED", "2", "System generated projects that aren't explicitly created or managed by a user." + + +.. + end enums + + +.. + end HasExtensions + + +.. + end services + + + + +.. _ref_flyteidl/admin/project_attributes.proto: + +flyteidl/admin/project_attributes.proto +================================================================== + + + + + +.. _ref_flyteidl.admin.ProjectAttributes: + +ProjectAttributes +------------------------------------------------------------------ + +Defines a set of custom matching attributes at the project level. +For more info on matchable attributes, see :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` + + + +.. csv-table:: ProjectAttributes type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "project", ":ref:`ref_string`", "", "Unique project id for which this set of attributes will be applied." + "matching_attributes", ":ref:`ref_flyteidl.admin.MatchingAttributes`", "", "" + + + + + + + +.. _ref_flyteidl.admin.ProjectAttributesDeleteRequest: + +ProjectAttributesDeleteRequest +------------------------------------------------------------------ + +Request to delete a set matchable project level attribute override. +For more info on matchable attributes, see :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` + + + +.. csv-table:: ProjectAttributesDeleteRequest type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "project", ":ref:`ref_string`", "", "Unique project id which this set of attributes references. +required" + "resource_type", ":ref:`ref_flyteidl.admin.MatchableResource`", "", "Which type of matchable attributes to delete. +required" + + + + + + + +.. _ref_flyteidl.admin.ProjectAttributesDeleteResponse: + +ProjectAttributesDeleteResponse +------------------------------------------------------------------ + +Purposefully empty, may be populated in the future. + + + + + + + + +.. _ref_flyteidl.admin.ProjectAttributesGetRequest: + +ProjectAttributesGetRequest +------------------------------------------------------------------ + +Request to get an individual project level attribute override. +For more info on matchable attributes, see :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` + + + +.. csv-table:: ProjectAttributesGetRequest type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "project", ":ref:`ref_string`", "", "Unique project id which this set of attributes references. +required" + "resource_type", ":ref:`ref_flyteidl.admin.MatchableResource`", "", "Which type of matchable attributes to return. +required" + + + + + + + +.. _ref_flyteidl.admin.ProjectAttributesGetResponse: + +ProjectAttributesGetResponse +------------------------------------------------------------------ + +Response to get an individual project level attribute override. +For more info on matchable attributes, see :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` + + + +.. csv-table:: ProjectAttributesGetResponse type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "attributes", ":ref:`ref_flyteidl.admin.ProjectAttributes`", "", "" + + + + + + + +.. _ref_flyteidl.admin.ProjectAttributesUpdateRequest: + +ProjectAttributesUpdateRequest +------------------------------------------------------------------ + +Sets custom attributes for a project +For more info on matchable attributes, see :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` + + + +.. csv-table:: ProjectAttributesUpdateRequest type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "attributes", ":ref:`ref_flyteidl.admin.ProjectAttributes`", "", "+required" + + + + + + + +.. _ref_flyteidl.admin.ProjectAttributesUpdateResponse: + +ProjectAttributesUpdateResponse +------------------------------------------------------------------ + +Purposefully empty, may be populated in the future. + + + + + + + +.. + end messages + + +.. + end enums + + +.. + end HasExtensions + + +.. + end services + + + + +.. _ref_flyteidl/admin/project_domain_attributes.proto: + +flyteidl/admin/project_domain_attributes.proto +================================================================== + + + + + +.. _ref_flyteidl.admin.ProjectDomainAttributes: + +ProjectDomainAttributes +------------------------------------------------------------------ + +Defines a set of custom matching attributes which defines resource defaults for a project and domain. +For more info on matchable attributes, see :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` + + + +.. csv-table:: ProjectDomainAttributes type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "project", ":ref:`ref_string`", "", "Unique project id for which this set of attributes will be applied." + "domain", ":ref:`ref_string`", "", "Unique domain id for which this set of attributes will be applied." + "matching_attributes", ":ref:`ref_flyteidl.admin.MatchingAttributes`", "", "" + + + + + + + +.. _ref_flyteidl.admin.ProjectDomainAttributesDeleteRequest: + +ProjectDomainAttributesDeleteRequest +------------------------------------------------------------------ + +Request to delete a set matchable project domain attribute override. +For more info on matchable attributes, see :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` + + + +.. csv-table:: ProjectDomainAttributesDeleteRequest type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "project", ":ref:`ref_string`", "", "Unique project id which this set of attributes references. +required" + "domain", ":ref:`ref_string`", "", "Unique domain id which this set of attributes references. +required" + "resource_type", ":ref:`ref_flyteidl.admin.MatchableResource`", "", "Which type of matchable attributes to delete. +required" + + + + + + + +.. _ref_flyteidl.admin.ProjectDomainAttributesDeleteResponse: + +ProjectDomainAttributesDeleteResponse +------------------------------------------------------------------ + +Purposefully empty, may be populated in the future. + + + + + + + + +.. _ref_flyteidl.admin.ProjectDomainAttributesGetRequest: + +ProjectDomainAttributesGetRequest +------------------------------------------------------------------ + +Request to get an individual project domain attribute override. +For more info on matchable attributes, see :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` + + + +.. csv-table:: ProjectDomainAttributesGetRequest type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "project", ":ref:`ref_string`", "", "Unique project id which this set of attributes references. +required" + "domain", ":ref:`ref_string`", "", "Unique domain id which this set of attributes references. +required" + "resource_type", ":ref:`ref_flyteidl.admin.MatchableResource`", "", "Which type of matchable attributes to return. +required" + + + + + + + +.. _ref_flyteidl.admin.ProjectDomainAttributesGetResponse: + +ProjectDomainAttributesGetResponse +------------------------------------------------------------------ + +Response to get an individual project domain attribute override. +For more info on matchable attributes, see :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` + + + +.. csv-table:: ProjectDomainAttributesGetResponse type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "attributes", ":ref:`ref_flyteidl.admin.ProjectDomainAttributes`", "", "" + + + + + + + +.. _ref_flyteidl.admin.ProjectDomainAttributesUpdateRequest: + +ProjectDomainAttributesUpdateRequest +------------------------------------------------------------------ + +Sets custom attributes for a project-domain combination. +For more info on matchable attributes, see :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` + + + +.. csv-table:: ProjectDomainAttributesUpdateRequest type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "attributes", ":ref:`ref_flyteidl.admin.ProjectDomainAttributes`", "", "+required" + + + + + + + +.. _ref_flyteidl.admin.ProjectDomainAttributesUpdateResponse: + +ProjectDomainAttributesUpdateResponse +------------------------------------------------------------------ + +Purposefully empty, may be populated in the future. + + + + + + + +.. + end messages + + +.. + end enums + + +.. + end HasExtensions + + +.. + end services + + + + +.. _ref_flyteidl/admin/schedule.proto: + +flyteidl/admin/schedule.proto +================================================================== + + + + + +.. _ref_flyteidl.admin.CronSchedule: + +CronSchedule +------------------------------------------------------------------ + +Options for schedules to run according to a cron expression. + + + +.. csv-table:: CronSchedule type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "schedule", ":ref:`ref_string`", "", "Standard/default cron implementation as described by https://en.wikipedia.org/wiki/Cron#CRON_expression; Also supports nonstandard predefined scheduling definitions as described by https://docs.aws.amazon.com/AmazonCloudWatch/latest/events/ScheduledEvents.html#CronExpressions except @reboot" + "offset", ":ref:`ref_string`", "", "ISO 8601 duration as described by https://en.wikipedia.org/wiki/ISO_8601#Durations" + + + + + + + +.. _ref_flyteidl.admin.FixedRate: + +FixedRate +------------------------------------------------------------------ + +Option for schedules run at a certain frequency e.g. every 2 minutes. + + + +.. csv-table:: FixedRate type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "value", ":ref:`ref_uint32`", "", "" + "unit", ":ref:`ref_flyteidl.admin.FixedRateUnit`", "", "" + + + + + + + +.. _ref_flyteidl.admin.Schedule: + +Schedule +------------------------------------------------------------------ + +Defines complete set of information required to trigger an execution on a schedule. + + + +.. csv-table:: Schedule type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "cron_expression", ":ref:`ref_string`", "", "**Deprecated.** Uses AWS syntax: Minutes Hours Day-of-month Month Day-of-week Year e.g. for a schedule that runs every 15 minutes: 0/15 * * * ? *" + "rate", ":ref:`ref_flyteidl.admin.FixedRate`", "", "" + "cron_schedule", ":ref:`ref_flyteidl.admin.CronSchedule`", "", "" + "kickoff_time_input_arg", ":ref:`ref_string`", "", "Name of the input variable that the kickoff time will be supplied to when the workflow is kicked off." + + + + + + +.. + end messages + + + +.. _ref_flyteidl.admin.FixedRateUnit: + +FixedRateUnit +------------------------------------------------------------------ + +Represents a frequency at which to run a schedule. + +.. csv-table:: Enum FixedRateUnit values + :header: "Name", "Number", "Description" + :widths: auto + + "MINUTE", "0", "" + "HOUR", "1", "" + "DAY", "2", "" + + +.. + end enums + + +.. + end HasExtensions + + +.. + end services + + + + +.. _ref_flyteidl/admin/signal.proto: + +flyteidl/admin/signal.proto +================================================================== + + + + + +.. _ref_flyteidl.admin.Signal: + +Signal +------------------------------------------------------------------ + +Signal encapsulates a unique identifier, associated metadata, and a value for a single Flyte +signal. Signals may exist either without a set value (representing a signal request) or with a +populated value (indicating the signal has been given). + + + +.. csv-table:: Signal type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "id", ":ref:`ref_flyteidl.core.SignalIdentifier`", "", "A unique identifier for the requested signal." + "type", ":ref:`ref_flyteidl.core.LiteralType`", "", "A type denoting the required value type for this signal." + "value", ":ref:`ref_flyteidl.core.Literal`", "", "The value of the signal. This is only available if the signal has been "set" and must match the defined the type." + + + + + + + +.. _ref_flyteidl.admin.SignalGetOrCreateRequest: + +SignalGetOrCreateRequest +------------------------------------------------------------------ + +SignalGetOrCreateRequest represents a request structure to retrieve or create a signal. +See :ref:`ref_flyteidl.admin.Signal` for more details + + + +.. csv-table:: SignalGetOrCreateRequest type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "id", ":ref:`ref_flyteidl.core.SignalIdentifier`", "", "A unique identifier for the requested signal." + "type", ":ref:`ref_flyteidl.core.LiteralType`", "", "A type denoting the required value type for this signal." + + + + + + + +.. _ref_flyteidl.admin.SignalList: + +SignalList +------------------------------------------------------------------ + +SignalList represents collection of signals along with the token of the last result. +See :ref:`ref_flyteidl.admin.Signal` for more details + + + +.. csv-table:: SignalList type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "signals", ":ref:`ref_flyteidl.admin.Signal`", "repeated", "A list of signals matching the input filters." + "token", ":ref:`ref_string`", "", "In the case of multiple pages of results, the server-provided token can be used to fetch the next page in a query. If there are no more results, this value will be empty." + + + + + + + +.. _ref_flyteidl.admin.SignalListRequest: + +SignalListRequest +------------------------------------------------------------------ + +SignalListRequest represents a request structure to retrieve a collection of signals. +See :ref:`ref_flyteidl.admin.Signal` for more details + + + +.. csv-table:: SignalListRequest type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "workflow_execution_id", ":ref:`ref_flyteidl.core.WorkflowExecutionIdentifier`", "", "Indicates the workflow execution to filter by. +required" + "limit", ":ref:`ref_uint32`", "", "Indicates the number of resources to be returned. +required" + "token", ":ref:`ref_string`", "", "In the case of multiple pages of results, the, server-provided token can be used to fetch the next page in a query. +optional" + "filters", ":ref:`ref_string`", "", "Indicates a list of filters passed as string. +optional" + "sort_by", ":ref:`ref_flyteidl.admin.Sort`", "", "Sort ordering. +optional" + + + + + + + +.. _ref_flyteidl.admin.SignalSetRequest: + +SignalSetRequest +------------------------------------------------------------------ + +SignalSetRequest represents a request structure to set the value on a signal. Setting a signal +effetively satisfies the signal condition within a Flyte workflow. +See :ref:`ref_flyteidl.admin.Signal` for more details + + + +.. csv-table:: SignalSetRequest type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "id", ":ref:`ref_flyteidl.core.SignalIdentifier`", "", "A unique identifier for the requested signal." + "value", ":ref:`ref_flyteidl.core.Literal`", "", "The value of this signal, must match the defining signal type." + + + + + + + +.. _ref_flyteidl.admin.SignalSetResponse: + +SignalSetResponse +------------------------------------------------------------------ + +SignalSetResponse represents a response structure if signal setting succeeds. + +Purposefully empty, may be populated in the future. + + + + + + + +.. + end messages + + +.. + end enums + + +.. + end HasExtensions + + +.. + end services + + + + +.. _ref_flyteidl/admin/task.proto: + +flyteidl/admin/task.proto +================================================================== + + + + + +.. _ref_flyteidl.admin.Task: + +Task +------------------------------------------------------------------ + +Flyte workflows are composed of many ordered tasks. That is small, reusable, self-contained logical blocks +arranged to process workflow inputs and produce a deterministic set of outputs. +Tasks can come in many varieties tuned for specialized behavior. + + + +.. csv-table:: Task type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "id", ":ref:`ref_flyteidl.core.Identifier`", "", "id represents the unique identifier of the task." + "closure", ":ref:`ref_flyteidl.admin.TaskClosure`", "", "closure encapsulates all the fields that maps to a compiled version of the task." + "short_description", ":ref:`ref_string`", "", "One-liner overview of the entity." + + + + + + + +.. _ref_flyteidl.admin.TaskClosure: + +TaskClosure +------------------------------------------------------------------ + +Compute task attributes which include values derived from the TaskSpec, as well as plugin-specific data +and task metadata. + + + +.. csv-table:: TaskClosure type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "compiled_task", ":ref:`ref_flyteidl.core.CompiledTask`", "", "Represents the compiled representation of the task from the specification provided." + "created_at", ":ref:`ref_google.protobuf.Timestamp`", "", "Time at which the task was created." + + + + + + + +.. _ref_flyteidl.admin.TaskCreateRequest: + +TaskCreateRequest +------------------------------------------------------------------ + +Represents a request structure to create a revision of a task. +See :ref:`ref_flyteidl.admin.Task` for more details + + + +.. csv-table:: TaskCreateRequest type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "id", ":ref:`ref_flyteidl.core.Identifier`", "", "id represents the unique identifier of the task. +required" + "spec", ":ref:`ref_flyteidl.admin.TaskSpec`", "", "Represents the specification for task. +required" + + + + + + + +.. _ref_flyteidl.admin.TaskCreateResponse: + +TaskCreateResponse +------------------------------------------------------------------ + +Represents a response structure if task creation succeeds. + +Purposefully empty, may be populated in the future. + + + + + + + + +.. _ref_flyteidl.admin.TaskList: + +TaskList +------------------------------------------------------------------ + +Represents a list of tasks returned from the admin. +See :ref:`ref_flyteidl.admin.Task` for more details + + + +.. csv-table:: TaskList type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "tasks", ":ref:`ref_flyteidl.admin.Task`", "repeated", "A list of tasks returned based on the request." + "token", ":ref:`ref_string`", "", "In the case of multiple pages of results, the server-provided token can be used to fetch the next page in a query. If there are no more results, this value will be empty." + + + + + + + +.. _ref_flyteidl.admin.TaskSpec: + +TaskSpec +------------------------------------------------------------------ + +Represents a structure that encapsulates the user-configured specification of the task. + + + +.. csv-table:: TaskSpec type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "template", ":ref:`ref_flyteidl.core.TaskTemplate`", "", "Template of the task that encapsulates all the metadata of the task." + "description", ":ref:`ref_flyteidl.admin.DescriptionEntity`", "", "Represents the specification for description entity." + + + + + + +.. + end messages + + +.. + end enums + + +.. + end HasExtensions + + +.. + end services + + + + +.. _ref_flyteidl/admin/task_execution.proto: + +flyteidl/admin/task_execution.proto +================================================================== + + + + + +.. _ref_flyteidl.admin.TaskExecution: + +TaskExecution +------------------------------------------------------------------ + +Encapsulates all details for a single task execution entity. +A task execution represents an instantiated task, including all inputs and additional +metadata as well as computed results included state, outputs, and duration-based attributes. + + + +.. csv-table:: TaskExecution type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "id", ":ref:`ref_flyteidl.core.TaskExecutionIdentifier`", "", "Unique identifier for the task execution." + "input_uri", ":ref:`ref_string`", "", "Path to remote data store where input blob is stored." + "closure", ":ref:`ref_flyteidl.admin.TaskExecutionClosure`", "", "Task execution details and results." + "is_parent", ":ref:`ref_bool`", "", "Whether this task spawned nodes." + + + + + + + +.. _ref_flyteidl.admin.TaskExecutionClosure: + +TaskExecutionClosure +------------------------------------------------------------------ + +Container for task execution details and results. + + + +.. csv-table:: TaskExecutionClosure type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "output_uri", ":ref:`ref_string`", "", "**Deprecated.** Path to remote data store where output blob is stored if the execution succeeded (and produced outputs). DEPRECATED. Use GetTaskExecutionData to fetch output data instead." + "error", ":ref:`ref_flyteidl.core.ExecutionError`", "", "Error information for the task execution. Populated if the execution failed." + "output_data", ":ref:`ref_flyteidl.core.LiteralMap`", "", "**Deprecated.** Raw output data produced by this task execution. DEPRECATED. Use GetTaskExecutionData to fetch output data instead." + "phase", ":ref:`ref_flyteidl.core.TaskExecution.Phase`", "", "The last recorded phase for this task execution." + "logs", ":ref:`ref_flyteidl.core.TaskLog`", "repeated", "Detailed log information output by the task execution." + "started_at", ":ref:`ref_google.protobuf.Timestamp`", "", "Time at which the task execution began running." + "duration", ":ref:`ref_google.protobuf.Duration`", "", "The amount of time the task execution spent running." + "created_at", ":ref:`ref_google.protobuf.Timestamp`", "", "Time at which the task execution was created." + "updated_at", ":ref:`ref_google.protobuf.Timestamp`", "", "Time at which the task execution was last updated." + "custom_info", ":ref:`ref_google.protobuf.Struct`", "", "Custom data specific to the task plugin." + "reason", ":ref:`ref_string`", "", "If there is an explanation for the most recent phase transition, the reason will capture it." + "task_type", ":ref:`ref_string`", "", "A predefined yet extensible Task type identifier." + "metadata", ":ref:`ref_flyteidl.event.TaskExecutionMetadata`", "", "Metadata around how a task was executed." + "event_version", ":ref:`ref_int32`", "", "The event version is used to indicate versioned changes in how data is maintained using this proto message. For example, event_verison > 0 means that maps tasks logs use the TaskExecutionMetadata ExternalResourceInfo fields for each subtask rather than the TaskLog in this message." + + + + + + + +.. _ref_flyteidl.admin.TaskExecutionGetDataRequest: + +TaskExecutionGetDataRequest +------------------------------------------------------------------ + +Request structure to fetch inputs and output for a task execution. +By default this data is not returned inline in :ref:`ref_flyteidl.admin.TaskExecutionGetRequest` + + + +.. csv-table:: TaskExecutionGetDataRequest type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "id", ":ref:`ref_flyteidl.core.TaskExecutionIdentifier`", "", "The identifier of the task execution for which to fetch inputs and outputs. +required" + + + + + + + +.. _ref_flyteidl.admin.TaskExecutionGetDataResponse: + +TaskExecutionGetDataResponse +------------------------------------------------------------------ + +Response structure for TaskExecutionGetDataRequest which contains inputs and outputs for a task execution. + + + +.. csv-table:: TaskExecutionGetDataResponse type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "inputs", ":ref:`ref_flyteidl.admin.UrlBlob`", "", "**Deprecated.** Signed url to fetch a core.LiteralMap of task execution inputs. Deprecated: Please use full_inputs instead." + "outputs", ":ref:`ref_flyteidl.admin.UrlBlob`", "", "**Deprecated.** Signed url to fetch a core.LiteralMap of task execution outputs. Deprecated: Please use full_outputs instead." + "full_inputs", ":ref:`ref_flyteidl.core.LiteralMap`", "", "Full_inputs will only be populated if they are under a configured size threshold." + "full_outputs", ":ref:`ref_flyteidl.core.LiteralMap`", "", "Full_outputs will only be populated if they are under a configured size threshold." + + + + + + + +.. _ref_flyteidl.admin.TaskExecutionGetRequest: + +TaskExecutionGetRequest +------------------------------------------------------------------ + +A message used to fetch a single task execution entity. +See :ref:`ref_flyteidl.admin.TaskExecution` for more details + + + +.. csv-table:: TaskExecutionGetRequest type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "id", ":ref:`ref_flyteidl.core.TaskExecutionIdentifier`", "", "Unique identifier for the task execution. +required" + + + + + + + +.. _ref_flyteidl.admin.TaskExecutionList: + +TaskExecutionList +------------------------------------------------------------------ + +Response structure for a query to list of task execution entities. +See :ref:`ref_flyteidl.admin.TaskExecution` for more details + + + +.. csv-table:: TaskExecutionList type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "task_executions", ":ref:`ref_flyteidl.admin.TaskExecution`", "repeated", "" + "token", ":ref:`ref_string`", "", "In the case of multiple pages of results, the server-provided token can be used to fetch the next page in a query. If there are no more results, this value will be empty." + + + + + + + +.. _ref_flyteidl.admin.TaskExecutionListRequest: + +TaskExecutionListRequest +------------------------------------------------------------------ + +Represents a request structure to retrieve a list of task execution entities yielded by a specific node execution. +See :ref:`ref_flyteidl.admin.TaskExecution` for more details + + + +.. csv-table:: TaskExecutionListRequest type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "node_execution_id", ":ref:`ref_flyteidl.core.NodeExecutionIdentifier`", "", "Indicates the node execution to filter by. +required" + "limit", ":ref:`ref_uint32`", "", "Indicates the number of resources to be returned. +required" + "token", ":ref:`ref_string`", "", "In the case of multiple pages of results, the server-provided token can be used to fetch the next page in a query. +optional" + "filters", ":ref:`ref_string`", "", "Indicates a list of filters passed as string. More info on constructing filters : +optional" + "sort_by", ":ref:`ref_flyteidl.admin.Sort`", "", "Sort ordering for returned list. +optional" + + + + + + +.. + end messages + + +.. + end enums + + +.. + end HasExtensions + + +.. + end services + + + + +.. _ref_flyteidl/admin/version.proto: + +flyteidl/admin/version.proto +================================================================== + + + + + +.. _ref_flyteidl.admin.GetVersionRequest: + +GetVersionRequest +------------------------------------------------------------------ + +Empty request for GetVersion + + + + + + + + +.. _ref_flyteidl.admin.GetVersionResponse: + +GetVersionResponse +------------------------------------------------------------------ + +Response for the GetVersion API + + + +.. csv-table:: GetVersionResponse type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "control_plane_version", ":ref:`ref_flyteidl.admin.Version`", "", "The control plane version information. FlyteAdmin and related components form the control plane of Flyte" + + + + + + + +.. _ref_flyteidl.admin.Version: + +Version +------------------------------------------------------------------ + +Provides Version information for a component + + + +.. csv-table:: Version type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "Build", ":ref:`ref_string`", "", "Specifies the GIT sha of the build" + "Version", ":ref:`ref_string`", "", "Version for the build, should follow a semver" + "BuildTime", ":ref:`ref_string`", "", "Build timestamp" + + + + + + +.. + end messages + + +.. + end enums + + +.. + end HasExtensions + + +.. + end services + + + + +.. _ref_flyteidl/admin/workflow.proto: + +flyteidl/admin/workflow.proto +================================================================== + + + + + +.. _ref_flyteidl.admin.CreateWorkflowFailureReason: + +CreateWorkflowFailureReason +------------------------------------------------------------------ + +When a CreateWorkflowRequest fails due to matching id + + + +.. csv-table:: CreateWorkflowFailureReason type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "exists_different_structure", ":ref:`ref_flyteidl.admin.WorkflowErrorExistsDifferentStructure`", "", "" + "exists_identical_structure", ":ref:`ref_flyteidl.admin.WorkflowErrorExistsIdenticalStructure`", "", "" + + + + + + + +.. _ref_flyteidl.admin.Workflow: + +Workflow +------------------------------------------------------------------ + +Represents the workflow structure stored in the Admin +A workflow is created by ordering tasks and associating outputs to inputs +in order to produce a directed-acyclic execution graph. + + + +.. csv-table:: Workflow type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "id", ":ref:`ref_flyteidl.core.Identifier`", "", "id represents the unique identifier of the workflow." + "closure", ":ref:`ref_flyteidl.admin.WorkflowClosure`", "", "closure encapsulates all the fields that maps to a compiled version of the workflow." + "short_description", ":ref:`ref_string`", "", "One-liner overview of the entity." + + + + + + + +.. _ref_flyteidl.admin.WorkflowClosure: + +WorkflowClosure +------------------------------------------------------------------ + +A container holding the compiled workflow produced from the WorkflowSpec and additional metadata. + + + +.. csv-table:: WorkflowClosure type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "compiled_workflow", ":ref:`ref_flyteidl.core.CompiledWorkflowClosure`", "", "Represents the compiled representation of the workflow from the specification provided." + "created_at", ":ref:`ref_google.protobuf.Timestamp`", "", "Time at which the workflow was created." + + + + + + + +.. _ref_flyteidl.admin.WorkflowCreateRequest: + +WorkflowCreateRequest +------------------------------------------------------------------ + +Represents a request structure to create a revision of a workflow. +See :ref:`ref_flyteidl.admin.Workflow` for more details + + + +.. csv-table:: WorkflowCreateRequest type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "id", ":ref:`ref_flyteidl.core.Identifier`", "", "id represents the unique identifier of the workflow. +required" + "spec", ":ref:`ref_flyteidl.admin.WorkflowSpec`", "", "Represents the specification for workflow. +required" + + + + + + + +.. _ref_flyteidl.admin.WorkflowCreateResponse: + +WorkflowCreateResponse +------------------------------------------------------------------ + +Purposefully empty, may be populated in the future. + + + + + + + + +.. _ref_flyteidl.admin.WorkflowErrorExistsDifferentStructure: + +WorkflowErrorExistsDifferentStructure +------------------------------------------------------------------ + +The workflow id is already used and the structure is different + + + +.. csv-table:: WorkflowErrorExistsDifferentStructure type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "id", ":ref:`ref_flyteidl.core.Identifier`", "", "" + + + + + + + +.. _ref_flyteidl.admin.WorkflowErrorExistsIdenticalStructure: + +WorkflowErrorExistsIdenticalStructure +------------------------------------------------------------------ + +The workflow id is already used with an identical sctructure + + + +.. csv-table:: WorkflowErrorExistsIdenticalStructure type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "id", ":ref:`ref_flyteidl.core.Identifier`", "", "" + + + + + + + +.. _ref_flyteidl.admin.WorkflowList: + +WorkflowList +------------------------------------------------------------------ + +Represents a list of workflows returned from the admin. +See :ref:`ref_flyteidl.admin.Workflow` for more details + + + +.. csv-table:: WorkflowList type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "workflows", ":ref:`ref_flyteidl.admin.Workflow`", "repeated", "A list of workflows returned based on the request." + "token", ":ref:`ref_string`", "", "In the case of multiple pages of results, the server-provided token can be used to fetch the next page in a query. If there are no more results, this value will be empty." + + + + + + + +.. _ref_flyteidl.admin.WorkflowSpec: + +WorkflowSpec +------------------------------------------------------------------ + +Represents a structure that encapsulates the specification of the workflow. + + + +.. csv-table:: WorkflowSpec type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "template", ":ref:`ref_flyteidl.core.WorkflowTemplate`", "", "Template of the task that encapsulates all the metadata of the workflow." + "sub_workflows", ":ref:`ref_flyteidl.core.WorkflowTemplate`", "repeated", "Workflows that are embedded into other workflows need to be passed alongside the parent workflow to the propeller compiler (since the compiler doesn't have any knowledge of other workflows - ie, it doesn't reach out to Admin to see other registered workflows). In fact, subworkflows do not even need to be registered." + "description", ":ref:`ref_flyteidl.admin.DescriptionEntity`", "", "Represents the specification for description entity." + + + + + + +.. + end messages + + +.. + end enums + + +.. + end HasExtensions + + +.. + end services + + + + +.. _ref_flyteidl/admin/workflow_attributes.proto: + +flyteidl/admin/workflow_attributes.proto +================================================================== + + + + + +.. _ref_flyteidl.admin.WorkflowAttributes: + +WorkflowAttributes +------------------------------------------------------------------ + +Defines a set of custom matching attributes which defines resource defaults for a project, domain and workflow. +For more info on matchable attributes, see :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` + + + +.. csv-table:: WorkflowAttributes type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "project", ":ref:`ref_string`", "", "Unique project id for which this set of attributes will be applied." + "domain", ":ref:`ref_string`", "", "Unique domain id for which this set of attributes will be applied." + "workflow", ":ref:`ref_string`", "", "Workflow name for which this set of attributes will be applied." + "matching_attributes", ":ref:`ref_flyteidl.admin.MatchingAttributes`", "", "" + + + + + + + +.. _ref_flyteidl.admin.WorkflowAttributesDeleteRequest: + +WorkflowAttributesDeleteRequest +------------------------------------------------------------------ + +Request to delete a set matchable workflow attribute override. +For more info on matchable attributes, see :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` + + + +.. csv-table:: WorkflowAttributesDeleteRequest type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "project", ":ref:`ref_string`", "", "Unique project id which this set of attributes references. +required" + "domain", ":ref:`ref_string`", "", "Unique domain id which this set of attributes references. +required" + "workflow", ":ref:`ref_string`", "", "Workflow name which this set of attributes references. +required" + "resource_type", ":ref:`ref_flyteidl.admin.MatchableResource`", "", "Which type of matchable attributes to delete. +required" + + + + + + + +.. _ref_flyteidl.admin.WorkflowAttributesDeleteResponse: + +WorkflowAttributesDeleteResponse +------------------------------------------------------------------ + +Purposefully empty, may be populated in the future. + + + + + + + + +.. _ref_flyteidl.admin.WorkflowAttributesGetRequest: + +WorkflowAttributesGetRequest +------------------------------------------------------------------ + +Request to get an individual workflow attribute override. +For more info on matchable attributes, see :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` + + + +.. csv-table:: WorkflowAttributesGetRequest type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "project", ":ref:`ref_string`", "", "Unique project id which this set of attributes references. +required" + "domain", ":ref:`ref_string`", "", "Unique domain id which this set of attributes references. +required" + "workflow", ":ref:`ref_string`", "", "Workflow name which this set of attributes references. +required" + "resource_type", ":ref:`ref_flyteidl.admin.MatchableResource`", "", "Which type of matchable attributes to return. +required" + + + + + + + +.. _ref_flyteidl.admin.WorkflowAttributesGetResponse: + +WorkflowAttributesGetResponse +------------------------------------------------------------------ + +Response to get an individual workflow attribute override. + + + +.. csv-table:: WorkflowAttributesGetResponse type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "attributes", ":ref:`ref_flyteidl.admin.WorkflowAttributes`", "", "" + + + + + + + +.. _ref_flyteidl.admin.WorkflowAttributesUpdateRequest: + +WorkflowAttributesUpdateRequest +------------------------------------------------------------------ + +Sets custom attributes for a project, domain and workflow combination. +For more info on matchable attributes, see :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` + + + +.. csv-table:: WorkflowAttributesUpdateRequest type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "attributes", ":ref:`ref_flyteidl.admin.WorkflowAttributes`", "", "" + + + + + + + +.. _ref_flyteidl.admin.WorkflowAttributesUpdateResponse: + +WorkflowAttributesUpdateResponse +------------------------------------------------------------------ + +Purposefully empty, may be populated in the future. + + + + + + + +.. + end messages + + +.. + end enums + + +.. + end HasExtensions + + +.. + end services + + + + +.. _ref_google/protobuf/duration.proto: + +google/protobuf/duration.proto +================================================================== + + + + + +.. _ref_google.protobuf.Duration: + +Duration +------------------------------------------------------------------ + +A Duration represents a signed, fixed-length span of time represented +as a count of seconds and fractions of seconds at nanosecond +resolution. It is independent of any calendar and concepts like "day" +or "month". It is related to Timestamp in that the difference between +two Timestamp values is a Duration and it can be added or subtracted +from a Timestamp. Range is approximately +-10,000 years. + +# Examples + +Example 1: Compute Duration from two Timestamps in pseudo code. + + Timestamp start = ...; + Timestamp end = ...; + Duration duration = ...; + + duration.seconds = end.seconds - start.seconds; + duration.nanos = end.nanos - start.nanos; + + if (duration.seconds < 0 && duration.nanos > 0) { + duration.seconds += 1; + duration.nanos -= 1000000000; + } else if (duration.seconds > 0 && duration.nanos < 0) { + duration.seconds -= 1; + duration.nanos += 1000000000; + } + +Example 2: Compute Timestamp from Timestamp + Duration in pseudo code. + + Timestamp start = ...; + Duration duration = ...; + Timestamp end = ...; + + end.seconds = start.seconds + duration.seconds; + end.nanos = start.nanos + duration.nanos; + + if (end.nanos < 0) { + end.seconds -= 1; + end.nanos += 1000000000; + } else if (end.nanos >= 1000000000) { + end.seconds += 1; + end.nanos -= 1000000000; + } + +Example 3: Compute Duration from datetime.timedelta in Python. + + td = datetime.timedelta(days=3, minutes=10) + duration = Duration() + duration.FromTimedelta(td) + +# JSON Mapping + +In JSON format, the Duration type is encoded as a string rather than an +object, where the string ends in the suffix "s" (indicating seconds) and +is preceded by the number of seconds, with nanoseconds expressed as +fractional seconds. For example, 3 seconds with 0 nanoseconds should be +encoded in JSON format as "3s", while 3 seconds and 1 nanosecond should +be expressed in JSON format as "3.000000001s", and 3 seconds and 1 +microsecond should be expressed in JSON format as "3.000001s". + + + +.. csv-table:: Duration type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "seconds", ":ref:`ref_int64`", "", "Signed seconds of the span of time. Must be from -315,576,000,000 to +315,576,000,000 inclusive. Note: these bounds are computed from: 60 sec/min * 60 min/hr * 24 hr/day * 365.25 days/year * 10000 years" + "nanos", ":ref:`ref_int32`", "", "Signed fractions of a second at nanosecond resolution of the span of time. Durations less than one second are represented with a 0 `seconds` field and a positive or negative `nanos` field. For durations of one second or more, a non-zero value for the `nanos` field must be of the same sign as the `seconds` field. Must be from -999,999,999 to +999,999,999 inclusive." + + + + + + +.. + end messages + + +.. + end enums + + +.. + end HasExtensions + + +.. + end services + + + + +.. _ref_google/protobuf/wrappers.proto: + +google/protobuf/wrappers.proto +================================================================== + + + + + +.. _ref_google.protobuf.BoolValue: + +BoolValue +------------------------------------------------------------------ + +Wrapper message for `bool`. + +The JSON representation for `BoolValue` is JSON `true` and `false`. + + + +.. csv-table:: BoolValue type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "value", ":ref:`ref_bool`", "", "The bool value." + + + + + + + +.. _ref_google.protobuf.BytesValue: + +BytesValue +------------------------------------------------------------------ + +Wrapper message for `bytes`. + +The JSON representation for `BytesValue` is JSON string. + + + +.. csv-table:: BytesValue type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "value", ":ref:`ref_bytes`", "", "The bytes value." + + + + + + + +.. _ref_google.protobuf.DoubleValue: + +DoubleValue +------------------------------------------------------------------ + +Wrapper message for `double`. + +The JSON representation for `DoubleValue` is JSON number. + + + +.. csv-table:: DoubleValue type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "value", ":ref:`ref_double`", "", "The double value." + + + + + + + +.. _ref_google.protobuf.FloatValue: + +FloatValue +------------------------------------------------------------------ + +Wrapper message for `float`. + +The JSON representation for `FloatValue` is JSON number. + + + +.. csv-table:: FloatValue type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "value", ":ref:`ref_float`", "", "The float value." + + + + + + + +.. _ref_google.protobuf.Int32Value: + +Int32Value +------------------------------------------------------------------ + +Wrapper message for `int32`. + +The JSON representation for `Int32Value` is JSON number. + + + +.. csv-table:: Int32Value type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "value", ":ref:`ref_int32`", "", "The int32 value." + + + + + + + +.. _ref_google.protobuf.Int64Value: + +Int64Value +------------------------------------------------------------------ + +Wrapper message for `int64`. + +The JSON representation for `Int64Value` is JSON string. + + + +.. csv-table:: Int64Value type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "value", ":ref:`ref_int64`", "", "The int64 value." + + + + + + + +.. _ref_google.protobuf.StringValue: + +StringValue +------------------------------------------------------------------ + +Wrapper message for `string`. + +The JSON representation for `StringValue` is JSON string. + + + +.. csv-table:: StringValue type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "value", ":ref:`ref_string`", "", "The string value." + + + + + + + +.. _ref_google.protobuf.UInt32Value: + +UInt32Value +------------------------------------------------------------------ + +Wrapper message for `uint32`. + +The JSON representation for `UInt32Value` is JSON number. + + + +.. csv-table:: UInt32Value type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "value", ":ref:`ref_uint32`", "", "The uint32 value." + + + + + + + +.. _ref_google.protobuf.UInt64Value: + +UInt64Value +------------------------------------------------------------------ + +Wrapper message for `uint64`. + +The JSON representation for `UInt64Value` is JSON string. + + + +.. csv-table:: UInt64Value type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "value", ":ref:`ref_uint64`", "", "The uint64 value." + + + + + + +.. + end messages + + +.. + end enums + + +.. + end HasExtensions + + +.. + end services + + diff --git a/docs/api/flyteidl/docs/admin/index.rst b/docs/api/flyteidl/docs/admin/index.rst new file mode 100644 index 0000000000..6518e82dbb --- /dev/null +++ b/docs/api/flyteidl/docs/admin/index.rst @@ -0,0 +1,13 @@ +Flyte Admin Service entities +============================ + +These are the control plane entities that can be used to communicate with the +FlyteAdmin service over gRPC or REST. The endpoint specification is defined in the +`Admin raw protos `__ + +.. toctree:: + :maxdepth: 1 + :caption: admin + :name: admintoc + + admin diff --git a/docs/api/flyteidl/docs/contributing.md b/docs/api/flyteidl/docs/contributing.md new file mode 100644 index 0000000000..67685f45b7 --- /dev/null +++ b/docs/api/flyteidl/docs/contributing.md @@ -0,0 +1,79 @@ +# Flyteidl + +This is one of the core repositories of Flyte. It contains the Specification of the Flyte Language using protobuf messages, the Backend API specification in gRPC, and Swagger REST. The repo contains the generated clients and protocol message structures in multiple languages. Along with the generated code, the repository also contains the Golang clients for Flyte's backend APIs (the services grouped under FlyteAdmin). + + +[![Slack](https://img.shields.io/badge/slack-join_chat-white.svg?logo=slack&style=social)](https://slack.flyte.org) + +* [flyte.org](https://flyte.org) +* [Flyte Docs](http://docs.flyte.org) +* [Flyteidl API reference documentation](https://docs.flyte.org/en/latest/reference_flyteidl.html) + +## Contributing to Flyteidl + +## Tooling for Flyteidl + +1. Run ``make download_tooling`` to install generator dependencies. + +```bash + make download_tooling +``` + +2. Ensure Docker is installed locally. +3. Run ``make generate`` to generate all the code, mock client, and docs for FlyteAdmin Service. + +```bash + make generate +``` + +4. To add new dependencies for documentation generation, modify ``doc-requirements.in`` and run + +```bash + make doc-requirements.txt +``` + +## Docs structure + +The index.rst files for protos are arranged in parallel under the ``docs`` folder. +All the proto definitions are within ``protos/flyteidl`` and their corresponding docs are in ``protos/docs``. + +``` +docs +├── admin +│   ├── admin.rst +│   └── index.rst +├── core +│   ├── core.rst +│   └── index.rst +├── datacatalog +│   ├── datacatalog.rst +│   └── index.rst +├── event +│   ├── event.rst +│   └── index.rst +├── plugins +│   ├── index.rst +│   └── plugins.rst +├── service +│   ├── index.rst +│   └── service.rst +``` + +Each module in protos has a module in docs with the same name. +For example: ``protos/flyteidl/core`` has a module ``protos/docs/core`` under the ``docs`` folder which has the corresponding index and documentation files. + + +## Generating Documentation + +* If a new module is to be introduced, follow the structure for core files in `generate_protos.sh` file which helps generate the core documentation from its proto files. +``` + core_proto_files=`ls protos/flyteidl/core/*.proto |xargs` + # Remove any currently generated file + ls -d protos/docs/core/* | grep -v index.rst | xargs rm + protoc --doc_out=protos/docs/core --doc_opt=restructuredtext,core.rst -I=protos `echo $core_proto_files` +``` + +* ``make generate`` generates the modified rst files. + +* ``make html`` generates the Sphinx documentation from the docs folder that uses the modified rst files. + diff --git a/docs/api/flyteidl/docs/core/core.rst b/docs/api/flyteidl/docs/core/core.rst new file mode 100644 index 0000000000..dd3cf71341 --- /dev/null +++ b/docs/api/flyteidl/docs/core/core.rst @@ -0,0 +1,3952 @@ +###################### +Protocol Documentation +###################### + + + + +.. _ref_flyteidl/core/catalog.proto: + +flyteidl/core/catalog.proto +================================================================== + + + + + +.. _ref_flyteidl.core.CatalogArtifactTag: + +CatalogArtifactTag +------------------------------------------------------------------ + + + + + +.. csv-table:: CatalogArtifactTag type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "artifact_id", ":ref:`ref_string`", "", "Artifact ID is generated name" + "name", ":ref:`ref_string`", "", "Flyte computes the tag automatically, as the hash of the values" + + + + + + + +.. _ref_flyteidl.core.CatalogMetadata: + +CatalogMetadata +------------------------------------------------------------------ + +Catalog artifact information with specific metadata + + + +.. csv-table:: CatalogMetadata type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "dataset_id", ":ref:`ref_flyteidl.core.Identifier`", "", "Dataset ID in the catalog" + "artifact_tag", ":ref:`ref_flyteidl.core.CatalogArtifactTag`", "", "Artifact tag in the catalog" + "source_task_execution", ":ref:`ref_flyteidl.core.TaskExecutionIdentifier`", "", "Today we only support TaskExecutionIdentifier as a source, as catalog caching only works for task executions" + + + + + + + +.. _ref_flyteidl.core.CatalogReservation: + +CatalogReservation +------------------------------------------------------------------ + + + + + + + + + +.. + end messages + + + +.. _ref_flyteidl.core.CatalogCacheStatus: + +CatalogCacheStatus +------------------------------------------------------------------ + +Indicates the status of CatalogCaching. The reason why this is not embedded in TaskNodeMetadata is, that we may use for other types of nodes as well in the future + +.. csv-table:: Enum CatalogCacheStatus values + :header: "Name", "Number", "Description" + :widths: auto + + "CACHE_DISABLED", "0", "Used to indicate that caching was disabled" + "CACHE_MISS", "1", "Used to indicate that the cache lookup resulted in no matches" + "CACHE_HIT", "2", "used to indicate that the associated artifact was a result of a previous execution" + "CACHE_POPULATED", "3", "used to indicate that the resultant artifact was added to the cache" + "CACHE_LOOKUP_FAILURE", "4", "Used to indicate that cache lookup failed because of an error" + "CACHE_PUT_FAILURE", "5", "Used to indicate that cache lookup failed because of an error" + "CACHE_SKIPPED", "6", "Used to indicate the cache lookup was skipped" + + + +.. _ref_flyteidl.core.CatalogReservation.Status: + +CatalogReservation.Status +------------------------------------------------------------------ + +Indicates the status of a catalog reservation operation. + +.. csv-table:: Enum CatalogReservation.Status values + :header: "Name", "Number", "Description" + :widths: auto + + "RESERVATION_DISABLED", "0", "Used to indicate that reservations are disabled" + "RESERVATION_ACQUIRED", "1", "Used to indicate that a reservation was successfully acquired or extended" + "RESERVATION_EXISTS", "2", "Used to indicate that an active reservation currently exists" + "RESERVATION_RELEASED", "3", "Used to indicate that the reservation has been successfully released" + "RESERVATION_FAILURE", "4", "Used to indicate that a reservation operation resulted in failure" + + +.. + end enums + + +.. + end HasExtensions + + +.. + end services + + + + +.. _ref_flyteidl/core/compiler.proto: + +flyteidl/core/compiler.proto +================================================================== + + + + + +.. _ref_flyteidl.core.CompiledTask: + +CompiledTask +------------------------------------------------------------------ + +Output of the Compilation step. This object represent one Task. We store more metadata at this layer + + + +.. csv-table:: CompiledTask type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "template", ":ref:`ref_flyteidl.core.TaskTemplate`", "", "Completely contained TaskTemplate" + + + + + + + +.. _ref_flyteidl.core.CompiledWorkflow: + +CompiledWorkflow +------------------------------------------------------------------ + +Output of the compilation Step. This object represents one workflow. We store more metadata at this layer + + + +.. csv-table:: CompiledWorkflow type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "template", ":ref:`ref_flyteidl.core.WorkflowTemplate`", "", "Completely contained Workflow Template" + "connections", ":ref:`ref_flyteidl.core.ConnectionSet`", "", "For internal use only! This field is used by the system and must not be filled in. Any values set will be ignored." + + + + + + + +.. _ref_flyteidl.core.CompiledWorkflowClosure: + +CompiledWorkflowClosure +------------------------------------------------------------------ + +A Compiled Workflow Closure contains all the information required to start a new execution, or to visualize a workflow +and its details. The CompiledWorkflowClosure should always contain a primary workflow, that is the main workflow that +will being the execution. All subworkflows are denormalized. WorkflowNodes refer to the workflow identifiers of +compiled subworkflows. + + + +.. csv-table:: CompiledWorkflowClosure type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "primary", ":ref:`ref_flyteidl.core.CompiledWorkflow`", "", "+required" + "sub_workflows", ":ref:`ref_flyteidl.core.CompiledWorkflow`", "repeated", "Guaranteed that there will only exist one and only one workflow with a given id, i.e., every sub workflow has a unique identifier. Also every enclosed subworkflow is used either by a primary workflow or by a subworkflow as an inlined workflow +optional" + "tasks", ":ref:`ref_flyteidl.core.CompiledTask`", "repeated", "Guaranteed that there will only exist one and only one task with a given id, i.e., every task has a unique id +required (at least 1)" + + + + + + + +.. _ref_flyteidl.core.ConnectionSet: + +ConnectionSet +------------------------------------------------------------------ + +Adjacency list for the workflow. This is created as part of the compilation process. Every process after the compilation +step uses this created ConnectionSet + + + +.. csv-table:: ConnectionSet type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "downstream", ":ref:`ref_flyteidl.core.ConnectionSet.DownstreamEntry`", "repeated", "A list of all the node ids that are downstream from a given node id" + "upstream", ":ref:`ref_flyteidl.core.ConnectionSet.UpstreamEntry`", "repeated", "A list of all the node ids, that are upstream of this node id" + + + + + + + +.. _ref_flyteidl.core.ConnectionSet.DownstreamEntry: + +ConnectionSet.DownstreamEntry +------------------------------------------------------------------ + + + + + +.. csv-table:: ConnectionSet.DownstreamEntry type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "key", ":ref:`ref_string`", "", "" + "value", ":ref:`ref_flyteidl.core.ConnectionSet.IdList`", "", "" + + + + + + + +.. _ref_flyteidl.core.ConnectionSet.IdList: + +ConnectionSet.IdList +------------------------------------------------------------------ + + + + + +.. csv-table:: ConnectionSet.IdList type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "ids", ":ref:`ref_string`", "repeated", "" + + + + + + + +.. _ref_flyteidl.core.ConnectionSet.UpstreamEntry: + +ConnectionSet.UpstreamEntry +------------------------------------------------------------------ + + + + + +.. csv-table:: ConnectionSet.UpstreamEntry type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "key", ":ref:`ref_string`", "", "" + "value", ":ref:`ref_flyteidl.core.ConnectionSet.IdList`", "", "" + + + + + + +.. + end messages + + +.. + end enums + + +.. + end HasExtensions + + +.. + end services + + + + +.. _ref_flyteidl/core/condition.proto: + +flyteidl/core/condition.proto +================================================================== + + + + + +.. _ref_flyteidl.core.BooleanExpression: + +BooleanExpression +------------------------------------------------------------------ + +Defines a boolean expression tree. It can be a simple or a conjunction expression. +Multiple expressions can be combined using a conjunction or a disjunction to result in a final boolean result. + + + +.. csv-table:: BooleanExpression type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "conjunction", ":ref:`ref_flyteidl.core.ConjunctionExpression`", "", "" + "comparison", ":ref:`ref_flyteidl.core.ComparisonExpression`", "", "" + + + + + + + +.. _ref_flyteidl.core.ComparisonExpression: + +ComparisonExpression +------------------------------------------------------------------ + +Defines a 2-level tree where the root is a comparison operator and Operands are primitives or known variables. +Each expression results in a boolean result. + + + +.. csv-table:: ComparisonExpression type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "operator", ":ref:`ref_flyteidl.core.ComparisonExpression.Operator`", "", "" + "left_value", ":ref:`ref_flyteidl.core.Operand`", "", "" + "right_value", ":ref:`ref_flyteidl.core.Operand`", "", "" + + + + + + + +.. _ref_flyteidl.core.ConjunctionExpression: + +ConjunctionExpression +------------------------------------------------------------------ + +Defines a conjunction expression of two boolean expressions. + + + +.. csv-table:: ConjunctionExpression type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "operator", ":ref:`ref_flyteidl.core.ConjunctionExpression.LogicalOperator`", "", "" + "left_expression", ":ref:`ref_flyteidl.core.BooleanExpression`", "", "" + "right_expression", ":ref:`ref_flyteidl.core.BooleanExpression`", "", "" + + + + + + + +.. _ref_flyteidl.core.Operand: + +Operand +------------------------------------------------------------------ + +Defines an operand to a comparison expression. + + + +.. csv-table:: Operand type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "primitive", ":ref:`ref_flyteidl.core.Primitive`", "", "Can be a constant" + "var", ":ref:`ref_string`", "", "Or one of this node's input variables" + + + + + + +.. + end messages + + + +.. _ref_flyteidl.core.ComparisonExpression.Operator: + +ComparisonExpression.Operator +------------------------------------------------------------------ + +Binary Operator for each expression + +.. csv-table:: Enum ComparisonExpression.Operator values + :header: "Name", "Number", "Description" + :widths: auto + + "EQ", "0", "" + "NEQ", "1", "" + "GT", "2", "Greater Than" + "GTE", "3", "" + "LT", "4", "Less Than" + "LTE", "5", "" + + + +.. _ref_flyteidl.core.ConjunctionExpression.LogicalOperator: + +ConjunctionExpression.LogicalOperator +------------------------------------------------------------------ + +Nested conditions. They can be conjoined using AND / OR +Order of evaluation is not important as the operators are Commutative + +.. csv-table:: Enum ConjunctionExpression.LogicalOperator values + :header: "Name", "Number", "Description" + :widths: auto + + "AND", "0", "Conjunction" + "OR", "1", "" + + +.. + end enums + + +.. + end HasExtensions + + +.. + end services + + + + +.. _ref_flyteidl/core/dynamic_job.proto: + +flyteidl/core/dynamic_job.proto +================================================================== + + + + + +.. _ref_flyteidl.core.DynamicJobSpec: + +DynamicJobSpec +------------------------------------------------------------------ + +Describes a set of tasks to execute and how the final outputs are produced. + + + +.. csv-table:: DynamicJobSpec type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "nodes", ":ref:`ref_flyteidl.core.Node`", "repeated", "A collection of nodes to execute." + "min_successes", ":ref:`ref_int64`", "", "An absolute number of successful completions of nodes required to mark this job as succeeded. As soon as this criteria is met, the dynamic job will be marked as successful and outputs will be computed. If this number becomes impossible to reach (e.g. number of currently running tasks + number of already succeeded tasks < min_successes) the task will be aborted immediately and marked as failed. The default value of this field, if not specified, is the count of nodes repeated field." + "outputs", ":ref:`ref_flyteidl.core.Binding`", "repeated", "Describes how to bind the final output of the dynamic job from the outputs of executed nodes. The referenced ids in bindings should have the generated id for the subtask." + "tasks", ":ref:`ref_flyteidl.core.TaskTemplate`", "repeated", "[Optional] A complete list of task specs referenced in nodes." + "subworkflows", ":ref:`ref_flyteidl.core.WorkflowTemplate`", "repeated", "[Optional] A complete list of task specs referenced in nodes." + + + + + + +.. + end messages + + +.. + end enums + + +.. + end HasExtensions + + +.. + end services + + + + +.. _ref_flyteidl/core/errors.proto: + +flyteidl/core/errors.proto +================================================================== + + + + + +.. _ref_flyteidl.core.ContainerError: + +ContainerError +------------------------------------------------------------------ + +Error message to propagate detailed errors from container executions to the execution +engine. + + + +.. csv-table:: ContainerError type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "code", ":ref:`ref_string`", "", "A simplified code for errors, so that we can provide a glossary of all possible errors." + "message", ":ref:`ref_string`", "", "A detailed error message." + "kind", ":ref:`ref_flyteidl.core.ContainerError.Kind`", "", "An abstract error kind for this error. Defaults to Non_Recoverable if not specified." + "origin", ":ref:`ref_flyteidl.core.ExecutionError.ErrorKind`", "", "Defines the origin of the error (system, user, unknown)." + + + + + + + +.. _ref_flyteidl.core.ErrorDocument: + +ErrorDocument +------------------------------------------------------------------ + +Defines the errors.pb file format the container can produce to communicate +failure reasons to the execution engine. + + + +.. csv-table:: ErrorDocument type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "error", ":ref:`ref_flyteidl.core.ContainerError`", "", "The error raised during execution." + + + + + + +.. + end messages + + + +.. _ref_flyteidl.core.ContainerError.Kind: + +ContainerError.Kind +------------------------------------------------------------------ + +Defines a generic error type that dictates the behavior of the retry strategy. + +.. csv-table:: Enum ContainerError.Kind values + :header: "Name", "Number", "Description" + :widths: auto + + "NON_RECOVERABLE", "0", "" + "RECOVERABLE", "1", "" + + +.. + end enums + + +.. + end HasExtensions + + +.. + end services + + + + +.. _ref_flyteidl/core/execution.proto: + +flyteidl/core/execution.proto +================================================================== + + + + + +.. _ref_flyteidl.core.ExecutionError: + +ExecutionError +------------------------------------------------------------------ + +Represents the error message from the execution. + + + +.. csv-table:: ExecutionError type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "code", ":ref:`ref_string`", "", "Error code indicates a grouping of a type of error. More Info: " + "message", ":ref:`ref_string`", "", "Detailed description of the error - including stack trace." + "error_uri", ":ref:`ref_string`", "", "Full error contents accessible via a URI" + "kind", ":ref:`ref_flyteidl.core.ExecutionError.ErrorKind`", "", "" + + + + + + + +.. _ref_flyteidl.core.NodeExecution: + +NodeExecution +------------------------------------------------------------------ + +Indicates various phases of Node Execution that only include the time spent to run the nodes/workflows + + + + + + + + +.. _ref_flyteidl.core.QualityOfService: + +QualityOfService +------------------------------------------------------------------ + +Indicates the priority of an execution. + + + +.. csv-table:: QualityOfService type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "tier", ":ref:`ref_flyteidl.core.QualityOfService.Tier`", "", "" + "spec", ":ref:`ref_flyteidl.core.QualityOfServiceSpec`", "", "" + + + + + + + +.. _ref_flyteidl.core.QualityOfServiceSpec: + +QualityOfServiceSpec +------------------------------------------------------------------ + +Represents customized execution run-time attributes. + + + +.. csv-table:: QualityOfServiceSpec type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "queueing_budget", ":ref:`ref_google.protobuf.Duration`", "", "Indicates how much queueing delay an execution can tolerate." + + + + + + + +.. _ref_flyteidl.core.TaskExecution: + +TaskExecution +------------------------------------------------------------------ + +Phases that task plugins can go through. Not all phases may be applicable to a specific plugin task, +but this is the cumulative list that customers may want to know about for their task. + + + + + + + + +.. _ref_flyteidl.core.TaskLog: + +TaskLog +------------------------------------------------------------------ + +Log information for the task that is specific to a log sink +When our log story is flushed out, we may have more metadata here like log link expiry + + + +.. csv-table:: TaskLog type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "uri", ":ref:`ref_string`", "", "" + "name", ":ref:`ref_string`", "", "" + "message_format", ":ref:`ref_flyteidl.core.TaskLog.MessageFormat`", "", "" + "ttl", ":ref:`ref_google.protobuf.Duration`", "", "" + + + + + + + +.. _ref_flyteidl.core.WorkflowExecution: + +WorkflowExecution +------------------------------------------------------------------ + +Indicates various phases of Workflow Execution + + + + + + + +.. + end messages + + + +.. _ref_flyteidl.core.ExecutionError.ErrorKind: + +ExecutionError.ErrorKind +------------------------------------------------------------------ + +Error type: System or User + +.. csv-table:: Enum ExecutionError.ErrorKind values + :header: "Name", "Number", "Description" + :widths: auto + + "UNKNOWN", "0", "" + "USER", "1", "" + "SYSTEM", "2", "" + + + +.. _ref_flyteidl.core.NodeExecution.Phase: + +NodeExecution.Phase +------------------------------------------------------------------ + + + +.. csv-table:: Enum NodeExecution.Phase values + :header: "Name", "Number", "Description" + :widths: auto + + "UNDEFINED", "0", "" + "QUEUED", "1", "" + "RUNNING", "2", "" + "SUCCEEDED", "3", "" + "FAILING", "4", "" + "FAILED", "5", "" + "ABORTED", "6", "" + "SKIPPED", "7", "" + "TIMED_OUT", "8", "" + "DYNAMIC_RUNNING", "9", "" + "RECOVERED", "10", "" + + + +.. _ref_flyteidl.core.QualityOfService.Tier: + +QualityOfService.Tier +------------------------------------------------------------------ + + + +.. csv-table:: Enum QualityOfService.Tier values + :header: "Name", "Number", "Description" + :widths: auto + + "UNDEFINED", "0", "Default: no quality of service specified." + "HIGH", "1", "" + "MEDIUM", "2", "" + "LOW", "3", "" + + + +.. _ref_flyteidl.core.TaskExecution.Phase: + +TaskExecution.Phase +------------------------------------------------------------------ + + + +.. csv-table:: Enum TaskExecution.Phase values + :header: "Name", "Number", "Description" + :widths: auto + + "UNDEFINED", "0", "" + "QUEUED", "1", "" + "RUNNING", "2", "" + "SUCCEEDED", "3", "" + "ABORTED", "4", "" + "FAILED", "5", "" + "INITIALIZING", "6", "To indicate cases where task is initializing, like: ErrImagePull, ContainerCreating, PodInitializing" + "WAITING_FOR_RESOURCES", "7", "To address cases, where underlying resource is not available: Backoff error, Resource quota exceeded" + + + +.. _ref_flyteidl.core.TaskLog.MessageFormat: + +TaskLog.MessageFormat +------------------------------------------------------------------ + + + +.. csv-table:: Enum TaskLog.MessageFormat values + :header: "Name", "Number", "Description" + :widths: auto + + "UNKNOWN", "0", "" + "CSV", "1", "" + "JSON", "2", "" + + + +.. _ref_flyteidl.core.WorkflowExecution.Phase: + +WorkflowExecution.Phase +------------------------------------------------------------------ + + + +.. csv-table:: Enum WorkflowExecution.Phase values + :header: "Name", "Number", "Description" + :widths: auto + + "UNDEFINED", "0", "" + "QUEUED", "1", "" + "RUNNING", "2", "" + "SUCCEEDING", "3", "" + "SUCCEEDED", "4", "" + "FAILING", "5", "" + "FAILED", "6", "" + "ABORTED", "7", "" + "TIMED_OUT", "8", "" + "ABORTING", "9", "" + + +.. + end enums + + +.. + end HasExtensions + + +.. + end services + + + + +.. _ref_flyteidl/core/identifier.proto: + +flyteidl/core/identifier.proto +================================================================== + + + + + +.. _ref_flyteidl.core.Identifier: + +Identifier +------------------------------------------------------------------ + +Encapsulation of fields that uniquely identifies a Flyte resource. + + + +.. csv-table:: Identifier type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "resource_type", ":ref:`ref_flyteidl.core.ResourceType`", "", "Identifies the specific type of resource that this identifier corresponds to." + "project", ":ref:`ref_string`", "", "Name of the project the resource belongs to." + "domain", ":ref:`ref_string`", "", "Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project." + "name", ":ref:`ref_string`", "", "User provided value for the resource." + "version", ":ref:`ref_string`", "", "Specific version of the resource." + + + + + + + +.. _ref_flyteidl.core.NodeExecutionIdentifier: + +NodeExecutionIdentifier +------------------------------------------------------------------ + +Encapsulation of fields that identify a Flyte node execution entity. + + + +.. csv-table:: NodeExecutionIdentifier type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "node_id", ":ref:`ref_string`", "", "" + "execution_id", ":ref:`ref_flyteidl.core.WorkflowExecutionIdentifier`", "", "" + + + + + + + +.. _ref_flyteidl.core.SignalIdentifier: + +SignalIdentifier +------------------------------------------------------------------ + +Encapsulation of fields the uniquely identify a signal. + + + +.. csv-table:: SignalIdentifier type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "signal_id", ":ref:`ref_string`", "", "Unique identifier for a signal." + "execution_id", ":ref:`ref_flyteidl.core.WorkflowExecutionIdentifier`", "", "Identifies the Flyte workflow execution this signal belongs to." + + + + + + + +.. _ref_flyteidl.core.TaskExecutionIdentifier: + +TaskExecutionIdentifier +------------------------------------------------------------------ + +Encapsulation of fields that identify a Flyte task execution entity. + + + +.. csv-table:: TaskExecutionIdentifier type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "task_id", ":ref:`ref_flyteidl.core.Identifier`", "", "" + "node_execution_id", ":ref:`ref_flyteidl.core.NodeExecutionIdentifier`", "", "" + "retry_attempt", ":ref:`ref_uint32`", "", "" + + + + + + + +.. _ref_flyteidl.core.WorkflowExecutionIdentifier: + +WorkflowExecutionIdentifier +------------------------------------------------------------------ + +Encapsulation of fields that uniquely identifies a Flyte workflow execution + + + +.. csv-table:: WorkflowExecutionIdentifier type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "project", ":ref:`ref_string`", "", "Name of the project the resource belongs to." + "domain", ":ref:`ref_string`", "", "Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project." + "name", ":ref:`ref_string`", "", "User or system provided value for the resource." + + + + + + +.. + end messages + + + +.. _ref_flyteidl.core.ResourceType: + +ResourceType +------------------------------------------------------------------ + +Indicates a resource type within Flyte. + +.. csv-table:: Enum ResourceType values + :header: "Name", "Number", "Description" + :widths: auto + + "UNSPECIFIED", "0", "" + "TASK", "1", "" + "WORKFLOW", "2", "" + "LAUNCH_PLAN", "3", "" + "DATASET", "4", "A dataset represents an entity modeled in Flyte DataCatalog. A Dataset is also a versioned entity and can be a compilation of multiple individual objects. Eventually all Catalog objects should be modeled similar to Flyte Objects. The Dataset entities makes it possible for the UI and CLI to act on the objects in a similar manner to other Flyte objects" + + +.. + end enums + + +.. + end HasExtensions + + +.. + end services + + + + +.. _ref_flyteidl/core/interface.proto: + +flyteidl/core/interface.proto +================================================================== + + + + + +.. _ref_flyteidl.core.Parameter: + +Parameter +------------------------------------------------------------------ + +A parameter is used as input to a launch plan and has +the special ability to have a default value or mark itself as required. + + + +.. csv-table:: Parameter type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "var", ":ref:`ref_flyteidl.core.Variable`", "", "+required Variable. Defines the type of the variable backing this parameter." + "default", ":ref:`ref_flyteidl.core.Literal`", "", "Defines a default value that has to match the variable type defined." + "required", ":ref:`ref_bool`", "", "+optional, is this value required to be filled." + + + + + + + +.. _ref_flyteidl.core.ParameterMap: + +ParameterMap +------------------------------------------------------------------ + +A map of Parameters. + + + +.. csv-table:: ParameterMap type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "parameters", ":ref:`ref_flyteidl.core.ParameterMap.ParametersEntry`", "repeated", "Defines a map of parameter names to parameters." + + + + + + + +.. _ref_flyteidl.core.ParameterMap.ParametersEntry: + +ParameterMap.ParametersEntry +------------------------------------------------------------------ + + + + + +.. csv-table:: ParameterMap.ParametersEntry type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "key", ":ref:`ref_string`", "", "" + "value", ":ref:`ref_flyteidl.core.Parameter`", "", "" + + + + + + + +.. _ref_flyteidl.core.TypedInterface: + +TypedInterface +------------------------------------------------------------------ + +Defines strongly typed inputs and outputs. + + + +.. csv-table:: TypedInterface type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "inputs", ":ref:`ref_flyteidl.core.VariableMap`", "", "" + "outputs", ":ref:`ref_flyteidl.core.VariableMap`", "", "" + + + + + + + +.. _ref_flyteidl.core.Variable: + +Variable +------------------------------------------------------------------ + +Defines a strongly typed variable. + + + +.. csv-table:: Variable type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "type", ":ref:`ref_flyteidl.core.LiteralType`", "", "Variable literal type." + "description", ":ref:`ref_string`", "", "+optional string describing input variable" + + + + + + + +.. _ref_flyteidl.core.VariableMap: + +VariableMap +------------------------------------------------------------------ + +A map of Variables + + + +.. csv-table:: VariableMap type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "variables", ":ref:`ref_flyteidl.core.VariableMap.VariablesEntry`", "repeated", "Defines a map of variable names to variables." + + + + + + + +.. _ref_flyteidl.core.VariableMap.VariablesEntry: + +VariableMap.VariablesEntry +------------------------------------------------------------------ + + + + + +.. csv-table:: VariableMap.VariablesEntry type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "key", ":ref:`ref_string`", "", "" + "value", ":ref:`ref_flyteidl.core.Variable`", "", "" + + + + + + +.. + end messages + + +.. + end enums + + +.. + end HasExtensions + + +.. + end services + + + + +.. _ref_flyteidl/core/literals.proto: + +flyteidl/core/literals.proto +================================================================== + + + + + +.. _ref_flyteidl.core.Binary: + +Binary +------------------------------------------------------------------ + +A simple byte array with a tag to help different parts of the system communicate about what is in the byte array. +It's strongly advisable that consumers of this type define a unique tag and validate the tag before parsing the data. + + + +.. csv-table:: Binary type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "value", ":ref:`ref_bytes`", "", "" + "tag", ":ref:`ref_string`", "", "" + + + + + + + +.. _ref_flyteidl.core.Binding: + +Binding +------------------------------------------------------------------ + +An input/output binding of a variable to either static value or a node output. + + + +.. csv-table:: Binding type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "var", ":ref:`ref_string`", "", "Variable name must match an input/output variable of the node." + "binding", ":ref:`ref_flyteidl.core.BindingData`", "", "Data to use to bind this variable." + + + + + + + +.. _ref_flyteidl.core.BindingData: + +BindingData +------------------------------------------------------------------ + +Specifies either a simple value or a reference to another output. + + + +.. csv-table:: BindingData type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "scalar", ":ref:`ref_flyteidl.core.Scalar`", "", "A simple scalar value." + "collection", ":ref:`ref_flyteidl.core.BindingDataCollection`", "", "A collection of binding data. This allows nesting of binding data to any number of levels." + "promise", ":ref:`ref_flyteidl.core.OutputReference`", "", "References an output promised by another node." + "map", ":ref:`ref_flyteidl.core.BindingDataMap`", "", "A map of bindings. The key is always a string." + "union", ":ref:`ref_flyteidl.core.UnionInfo`", "", "" + + + + + + + +.. _ref_flyteidl.core.BindingDataCollection: + +BindingDataCollection +------------------------------------------------------------------ + +A collection of BindingData items. + + + +.. csv-table:: BindingDataCollection type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "bindings", ":ref:`ref_flyteidl.core.BindingData`", "repeated", "" + + + + + + + +.. _ref_flyteidl.core.BindingDataMap: + +BindingDataMap +------------------------------------------------------------------ + +A map of BindingData items. + + + +.. csv-table:: BindingDataMap type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "bindings", ":ref:`ref_flyteidl.core.BindingDataMap.BindingsEntry`", "repeated", "" + + + + + + + +.. _ref_flyteidl.core.BindingDataMap.BindingsEntry: + +BindingDataMap.BindingsEntry +------------------------------------------------------------------ + + + + + +.. csv-table:: BindingDataMap.BindingsEntry type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "key", ":ref:`ref_string`", "", "" + "value", ":ref:`ref_flyteidl.core.BindingData`", "", "" + + + + + + + +.. _ref_flyteidl.core.Blob: + +Blob +------------------------------------------------------------------ + +Refers to an offloaded set of files. It encapsulates the type of the store and a unique uri for where the data is. +There are no restrictions on how the uri is formatted since it will depend on how to interact with the store. + + + +.. csv-table:: Blob type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "metadata", ":ref:`ref_flyteidl.core.BlobMetadata`", "", "" + "uri", ":ref:`ref_string`", "", "" + + + + + + + +.. _ref_flyteidl.core.BlobMetadata: + +BlobMetadata +------------------------------------------------------------------ + + + + + +.. csv-table:: BlobMetadata type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "type", ":ref:`ref_flyteidl.core.BlobType`", "", "" + + + + + + + +.. _ref_flyteidl.core.KeyValuePair: + +KeyValuePair +------------------------------------------------------------------ + +A generic key value pair. + + + +.. csv-table:: KeyValuePair type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "key", ":ref:`ref_string`", "", "required." + "value", ":ref:`ref_string`", "", "+optional." + + + + + + + +.. _ref_flyteidl.core.Literal: + +Literal +------------------------------------------------------------------ + +A simple value. This supports any level of nesting (e.g. array of array of array of Blobs) as well as simple primitives. + + + +.. csv-table:: Literal type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "scalar", ":ref:`ref_flyteidl.core.Scalar`", "", "A simple value." + "collection", ":ref:`ref_flyteidl.core.LiteralCollection`", "", "A collection of literals to allow nesting." + "map", ":ref:`ref_flyteidl.core.LiteralMap`", "", "A map of strings to literals." + "hash", ":ref:`ref_string`", "", "A hash representing this literal. This is used for caching purposes. For more details refer to RFC 1893 (https://github.com/flyteorg/flyte/blob/master/rfc/system/1893-caching-of-offloaded-objects.md)" + + + + + + + +.. _ref_flyteidl.core.LiteralCollection: + +LiteralCollection +------------------------------------------------------------------ + +A collection of literals. This is a workaround since oneofs in proto messages cannot contain a repeated field. + + + +.. csv-table:: LiteralCollection type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "literals", ":ref:`ref_flyteidl.core.Literal`", "repeated", "" + + + + + + + +.. _ref_flyteidl.core.LiteralMap: + +LiteralMap +------------------------------------------------------------------ + +A map of literals. This is a workaround since oneofs in proto messages cannot contain a repeated field. + + + +.. csv-table:: LiteralMap type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "literals", ":ref:`ref_flyteidl.core.LiteralMap.LiteralsEntry`", "repeated", "" + + + + + + + +.. _ref_flyteidl.core.LiteralMap.LiteralsEntry: + +LiteralMap.LiteralsEntry +------------------------------------------------------------------ + + + + + +.. csv-table:: LiteralMap.LiteralsEntry type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "key", ":ref:`ref_string`", "", "" + "value", ":ref:`ref_flyteidl.core.Literal`", "", "" + + + + + + + +.. _ref_flyteidl.core.Primitive: + +Primitive +------------------------------------------------------------------ + +Primitive Types + + + +.. csv-table:: Primitive type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "integer", ":ref:`ref_int64`", "", "" + "float_value", ":ref:`ref_double`", "", "" + "string_value", ":ref:`ref_string`", "", "" + "boolean", ":ref:`ref_bool`", "", "" + "datetime", ":ref:`ref_google.protobuf.Timestamp`", "", "" + "duration", ":ref:`ref_google.protobuf.Duration`", "", "" + + + + + + + +.. _ref_flyteidl.core.RetryStrategy: + +RetryStrategy +------------------------------------------------------------------ + +Retry strategy associated with an executable unit. + + + +.. csv-table:: RetryStrategy type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "retries", ":ref:`ref_uint32`", "", "Number of retries. Retries will be consumed when the job fails with a recoverable error. The number of retries must be less than or equals to 10." + + + + + + + +.. _ref_flyteidl.core.Scalar: + +Scalar +------------------------------------------------------------------ + + + + + +.. csv-table:: Scalar type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "primitive", ":ref:`ref_flyteidl.core.Primitive`", "", "" + "blob", ":ref:`ref_flyteidl.core.Blob`", "", "" + "binary", ":ref:`ref_flyteidl.core.Binary`", "", "" + "schema", ":ref:`ref_flyteidl.core.Schema`", "", "" + "none_type", ":ref:`ref_flyteidl.core.Void`", "", "" + "error", ":ref:`ref_flyteidl.core.Error`", "", "" + "generic", ":ref:`ref_google.protobuf.Struct`", "", "" + "structured_dataset", ":ref:`ref_flyteidl.core.StructuredDataset`", "", "" + "union", ":ref:`ref_flyteidl.core.Union`", "", "" + + + + + + + +.. _ref_flyteidl.core.Schema: + +Schema +------------------------------------------------------------------ + +A strongly typed schema that defines the interface of data retrieved from the underlying storage medium. + + + +.. csv-table:: Schema type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "uri", ":ref:`ref_string`", "", "" + "type", ":ref:`ref_flyteidl.core.SchemaType`", "", "" + + + + + + + +.. _ref_flyteidl.core.StructuredDataset: + +StructuredDataset +------------------------------------------------------------------ + + + + + +.. csv-table:: StructuredDataset type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "uri", ":ref:`ref_string`", "", "String location uniquely identifying where the data is. Should start with the storage location (e.g. s3://, gs://, bq://, etc.)" + "metadata", ":ref:`ref_flyteidl.core.StructuredDatasetMetadata`", "", "" + + + + + + + +.. _ref_flyteidl.core.StructuredDatasetMetadata: + +StructuredDatasetMetadata +------------------------------------------------------------------ + + + + + +.. csv-table:: StructuredDatasetMetadata type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "structured_dataset_type", ":ref:`ref_flyteidl.core.StructuredDatasetType`", "", "Bundle the type information along with the literal. This is here because StructuredDatasets can often be more defined at run time than at compile time. That is, at compile time you might only declare a task to return a pandas dataframe or a StructuredDataset, without any column information, but at run time, you might have that column information. flytekit python will copy this type information into the literal, from the type information, if not provided by the various plugins (encoders). Since this field is run time generated, it's not used for any type checking." + + + + + + + +.. _ref_flyteidl.core.Union: + +Union +------------------------------------------------------------------ + +The runtime representation of a tagged union value. See `UnionType` for more details. + + + +.. csv-table:: Union type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "value", ":ref:`ref_flyteidl.core.Literal`", "", "" + "type", ":ref:`ref_flyteidl.core.LiteralType`", "", "" + + + + + + + +.. _ref_flyteidl.core.UnionInfo: + +UnionInfo +------------------------------------------------------------------ + + + + + +.. csv-table:: UnionInfo type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "targetType", ":ref:`ref_flyteidl.core.LiteralType`", "", "" + + + + + + + +.. _ref_flyteidl.core.Void: + +Void +------------------------------------------------------------------ + +Used to denote a nil/null/None assignment to a scalar value. The underlying LiteralType for Void is intentionally +undefined since it can be assigned to a scalar of any LiteralType. + + + + + + + +.. + end messages + + +.. + end enums + + +.. + end HasExtensions + + +.. + end services + + + + +.. _ref_flyteidl/core/security.proto: + +flyteidl/core/security.proto +================================================================== + + + + + +.. _ref_flyteidl.core.Identity: + +Identity +------------------------------------------------------------------ + +Identity encapsulates the various security identities a task can run as. It's up to the underlying plugin to pick the +right identity for the execution environment. + + + +.. csv-table:: Identity type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "iam_role", ":ref:`ref_string`", "", "iam_role references the fully qualified name of Identity & Access Management role to impersonate." + "k8s_service_account", ":ref:`ref_string`", "", "k8s_service_account references a kubernetes service account to impersonate." + "oauth2_client", ":ref:`ref_flyteidl.core.OAuth2Client`", "", "oauth2_client references an oauth2 client. Backend plugins can use this information to impersonate the client when making external calls." + + + + + + + +.. _ref_flyteidl.core.OAuth2Client: + +OAuth2Client +------------------------------------------------------------------ + +OAuth2Client encapsulates OAuth2 Client Credentials to be used when making calls on behalf of that task. + + + +.. csv-table:: OAuth2Client type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "client_id", ":ref:`ref_string`", "", "client_id is the public id for the client to use. The system will not perform any pre-auth validation that the secret requested matches the client_id indicated here. +required" + "client_secret", ":ref:`ref_flyteidl.core.Secret`", "", "client_secret is a reference to the secret used to authenticate the OAuth2 client. +required" + + + + + + + +.. _ref_flyteidl.core.OAuth2TokenRequest: + +OAuth2TokenRequest +------------------------------------------------------------------ + +OAuth2TokenRequest encapsulates information needed to request an OAuth2 token. +FLYTE_TOKENS_ENV_PREFIX will be passed to indicate the prefix of the environment variables that will be present if +tokens are passed through environment variables. +FLYTE_TOKENS_PATH_PREFIX will be passed to indicate the prefix of the path where secrets will be mounted if tokens +are passed through file mounts. + + + +.. csv-table:: OAuth2TokenRequest type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "name", ":ref:`ref_string`", "", "name indicates a unique id for the token request within this task token requests. It'll be used as a suffix for environment variables and as a filename for mounting tokens as files. +required" + "type", ":ref:`ref_flyteidl.core.OAuth2TokenRequest.Type`", "", "type indicates the type of the request to make. Defaults to CLIENT_CREDENTIALS. +required" + "client", ":ref:`ref_flyteidl.core.OAuth2Client`", "", "client references the client_id/secret to use to request the OAuth2 token. +required" + "idp_discovery_endpoint", ":ref:`ref_string`", "", "idp_discovery_endpoint references the discovery endpoint used to retrieve token endpoint and other related information. +optional" + "token_endpoint", ":ref:`ref_string`", "", "token_endpoint references the token issuance endpoint. If idp_discovery_endpoint is not provided, this parameter is mandatory. +optional" + + + + + + + +.. _ref_flyteidl.core.Secret: + +Secret +------------------------------------------------------------------ + +Secret encapsulates information about the secret a task needs to proceed. An environment variable +FLYTE_SECRETS_ENV_PREFIX will be passed to indicate the prefix of the environment variables that will be present if +secrets are passed through environment variables. +FLYTE_SECRETS_DEFAULT_DIR will be passed to indicate the prefix of the path where secrets will be mounted if secrets +are passed through file mounts. + + + +.. csv-table:: Secret type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "group", ":ref:`ref_string`", "", "The name of the secret group where to find the key referenced below. For K8s secrets, this should be the name of the v1/secret object. For Confidant, this should be the Credential name. For Vault, this should be the secret name. For AWS Secret Manager, this should be the name of the secret. +required" + "group_version", ":ref:`ref_string`", "", "The group version to fetch. This is not supported in all secret management systems. It'll be ignored for the ones that do not support it. +optional" + "key", ":ref:`ref_string`", "", "The name of the secret to mount. This has to match an existing secret in the system. It's up to the implementation of the secret management system to require case sensitivity. For K8s secrets, Confidant and Vault, this should match one of the keys inside the secret. For AWS Secret Manager, it's ignored. +optional" + "mount_requirement", ":ref:`ref_flyteidl.core.Secret.MountType`", "", "mount_requirement is optional. Indicates where the secret has to be mounted. If provided, the execution will fail if the underlying key management system cannot satisfy that requirement. If not provided, the default location will depend on the key management system. +optional" + + + + + + + +.. _ref_flyteidl.core.SecurityContext: + +SecurityContext +------------------------------------------------------------------ + +SecurityContext holds security attributes that apply to tasks. + + + +.. csv-table:: SecurityContext type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "run_as", ":ref:`ref_flyteidl.core.Identity`", "", "run_as encapsulates the identity a pod should run as. If the task fills in multiple fields here, it'll be up to the backend plugin to choose the appropriate identity for the execution engine the task will run on." + "secrets", ":ref:`ref_flyteidl.core.Secret`", "repeated", "secrets indicate the list of secrets the task needs in order to proceed. Secrets will be mounted/passed to the pod as it starts. If the plugin responsible for kicking of the task will not run it on a flyte cluster (e.g. AWS Batch), it's the responsibility of the plugin to fetch the secret (which means propeller identity will need access to the secret) and to pass it to the remote execution engine." + "tokens", ":ref:`ref_flyteidl.core.OAuth2TokenRequest`", "repeated", "tokens indicate the list of token requests the task needs in order to proceed. Tokens will be mounted/passed to the pod as it starts. If the plugin responsible for kicking of the task will not run it on a flyte cluster (e.g. AWS Batch), it's the responsibility of the plugin to fetch the secret (which means propeller identity will need access to the secret) and to pass it to the remote execution engine." + + + + + + +.. + end messages + + + +.. _ref_flyteidl.core.OAuth2TokenRequest.Type: + +OAuth2TokenRequest.Type +------------------------------------------------------------------ + +Type of the token requested. + +.. csv-table:: Enum OAuth2TokenRequest.Type values + :header: "Name", "Number", "Description" + :widths: auto + + "CLIENT_CREDENTIALS", "0", "CLIENT_CREDENTIALS indicates a 2-legged OAuth token requested using client credentials." + + + +.. _ref_flyteidl.core.Secret.MountType: + +Secret.MountType +------------------------------------------------------------------ + + + +.. csv-table:: Enum Secret.MountType values + :header: "Name", "Number", "Description" + :widths: auto + + "ANY", "0", "Default case, indicates the client can tolerate either mounting options." + "ENV_VAR", "1", "ENV_VAR indicates the secret needs to be mounted as an environment variable." + "FILE", "2", "FILE indicates the secret needs to be mounted as a file." + + +.. + end enums + + +.. + end HasExtensions + + +.. + end services + + + + +.. _ref_flyteidl/core/tasks.proto: + +flyteidl/core/tasks.proto +================================================================== + + + + + +.. _ref_flyteidl.core.Container: + +Container +------------------------------------------------------------------ + + + + + +.. csv-table:: Container type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "image", ":ref:`ref_string`", "", "Container image url. Eg: docker/redis:latest" + "command", ":ref:`ref_string`", "repeated", "Command to be executed, if not provided, the default entrypoint in the container image will be used." + "args", ":ref:`ref_string`", "repeated", "These will default to Flyte given paths. If provided, the system will not append known paths. If the task still needs flyte's inputs and outputs path, add $(FLYTE_INPUT_FILE), $(FLYTE_OUTPUT_FILE) wherever makes sense and the system will populate these before executing the container." + "resources", ":ref:`ref_flyteidl.core.Resources`", "", "Container resources requirement as specified by the container engine." + "env", ":ref:`ref_flyteidl.core.KeyValuePair`", "repeated", "Environment variables will be set as the container is starting up." + "config", ":ref:`ref_flyteidl.core.KeyValuePair`", "repeated", "**Deprecated.** Allows extra configs to be available for the container. TODO: elaborate on how configs will become available. Deprecated, please use TaskTemplate.config instead." + "ports", ":ref:`ref_flyteidl.core.ContainerPort`", "repeated", "Ports to open in the container. This feature is not supported by all execution engines. (e.g. supported on K8s but not supported on AWS Batch) Only K8s" + "data_config", ":ref:`ref_flyteidl.core.DataLoadingConfig`", "", "BETA: Optional configuration for DataLoading. If not specified, then default values are used. This makes it possible to to run a completely portable container, that uses inputs and outputs only from the local file-system and without having any reference to flyteidl. This is supported only on K8s at the moment. If data loading is enabled, then data will be mounted in accompanying directories specified in the DataLoadingConfig. If the directories are not specified, inputs will be mounted onto and outputs will be uploaded from a pre-determined file-system path. Refer to the documentation to understand the default paths. Only K8s" + "architecture", ":ref:`ref_flyteidl.core.Container.Architecture`", "", "" + + + + + + + +.. _ref_flyteidl.core.ContainerPort: + +ContainerPort +------------------------------------------------------------------ + +Defines port properties for a container. + + + +.. csv-table:: ContainerPort type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "container_port", ":ref:`ref_uint32`", "", "Number of port to expose on the pod's IP address. This must be a valid port number, 0 < x < 65536." + + + + + + + +.. _ref_flyteidl.core.DataLoadingConfig: + +DataLoadingConfig +------------------------------------------------------------------ + +This configuration allows executing raw containers in Flyte using the Flyte CoPilot system. +Flyte CoPilot, eliminates the needs of flytekit or sdk inside the container. Any inputs required by the users container are side-loaded in the input_path +Any outputs generated by the user container - within output_path are automatically uploaded. + + + +.. csv-table:: DataLoadingConfig type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "enabled", ":ref:`ref_bool`", "", "Flag enables DataLoading Config. If this is not set, data loading will not be used!" + "input_path", ":ref:`ref_string`", "", "File system path (start at root). This folder will contain all the inputs exploded to a separate file. Example, if the input interface needs (x: int, y: blob, z: multipart_blob) and the input path is '/var/flyte/inputs', then the file system will look like /var/flyte/inputs/inputs. .pb .json .yaml> -> Format as defined previously. The Blob and Multipart blob will reference local filesystem instead of remote locations /var/flyte/inputs/x -> X is a file that contains the value of x (integer) in string format /var/flyte/inputs/y -> Y is a file in Binary format /var/flyte/inputs/z/... -> Note Z itself is a directory More information about the protocol - refer to docs #TODO reference docs here" + "output_path", ":ref:`ref_string`", "", "File system path (start at root). This folder should contain all the outputs for the task as individual files and/or an error text file" + "format", ":ref:`ref_flyteidl.core.DataLoadingConfig.LiteralMapFormat`", "", "In the inputs folder, there will be an additional summary/metadata file that contains references to all files or inlined primitive values. This format decides the actual encoding for the data. Refer to the encoding to understand the specifics of the contents and the encoding" + "io_strategy", ":ref:`ref_flyteidl.core.IOStrategy`", "", "" + + + + + + + +.. _ref_flyteidl.core.IOStrategy: + +IOStrategy +------------------------------------------------------------------ + +Strategy to use when dealing with Blob, Schema, or multipart blob data (large datasets) + + + +.. csv-table:: IOStrategy type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "download_mode", ":ref:`ref_flyteidl.core.IOStrategy.DownloadMode`", "", "Mode to use to manage downloads" + "upload_mode", ":ref:`ref_flyteidl.core.IOStrategy.UploadMode`", "", "Mode to use to manage uploads" + + + + + + + +.. _ref_flyteidl.core.K8sObjectMetadata: + +K8sObjectMetadata +------------------------------------------------------------------ + +Metadata for building a kubernetes object when a task is executed. + + + +.. csv-table:: K8sObjectMetadata type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "labels", ":ref:`ref_flyteidl.core.K8sObjectMetadata.LabelsEntry`", "repeated", "Optional labels to add to the pod definition." + "annotations", ":ref:`ref_flyteidl.core.K8sObjectMetadata.AnnotationsEntry`", "repeated", "Optional annotations to add to the pod definition." + + + + + + + +.. _ref_flyteidl.core.K8sObjectMetadata.AnnotationsEntry: + +K8sObjectMetadata.AnnotationsEntry +------------------------------------------------------------------ + + + + + +.. csv-table:: K8sObjectMetadata.AnnotationsEntry type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "key", ":ref:`ref_string`", "", "" + "value", ":ref:`ref_string`", "", "" + + + + + + + +.. _ref_flyteidl.core.K8sObjectMetadata.LabelsEntry: + +K8sObjectMetadata.LabelsEntry +------------------------------------------------------------------ + + + + + +.. csv-table:: K8sObjectMetadata.LabelsEntry type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "key", ":ref:`ref_string`", "", "" + "value", ":ref:`ref_string`", "", "" + + + + + + + +.. _ref_flyteidl.core.K8sPod: + +K8sPod +------------------------------------------------------------------ + +Defines a pod spec and additional pod metadata that is created when a task is executed. + + + +.. csv-table:: K8sPod type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "metadata", ":ref:`ref_flyteidl.core.K8sObjectMetadata`", "", "Contains additional metadata for building a kubernetes pod." + "pod_spec", ":ref:`ref_google.protobuf.Struct`", "", "Defines the primary pod spec created when a task is executed. This should be a JSON-marshalled pod spec, which can be defined in - go, using: https://github.com/kubernetes/api/blob/release-1.21/core/v1/types.go#L2936 - python: using https://github.com/kubernetes-client/python/blob/release-19.0/kubernetes/client/models/v1_pod_spec.py" + + + + + + + +.. _ref_flyteidl.core.Resources: + +Resources +------------------------------------------------------------------ + +A customizable interface to convey resources requested for a container. This can be interpreted differently for different +container engines. + + + +.. csv-table:: Resources type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "requests", ":ref:`ref_flyteidl.core.Resources.ResourceEntry`", "repeated", "The desired set of resources requested. ResourceNames must be unique within the list." + "limits", ":ref:`ref_flyteidl.core.Resources.ResourceEntry`", "repeated", "Defines a set of bounds (e.g. min/max) within which the task can reliably run. ResourceNames must be unique within the list." + + + + + + + +.. _ref_flyteidl.core.Resources.ResourceEntry: + +Resources.ResourceEntry +------------------------------------------------------------------ + +Encapsulates a resource name and value. + + + +.. csv-table:: Resources.ResourceEntry type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "name", ":ref:`ref_flyteidl.core.Resources.ResourceName`", "", "Resource name." + "value", ":ref:`ref_string`", "", "Value must be a valid k8s quantity. See https://github.com/kubernetes/apimachinery/blob/master/pkg/api/resource/quantity.go#L30-L80" + + + + + + + +.. _ref_flyteidl.core.RuntimeMetadata: + +RuntimeMetadata +------------------------------------------------------------------ + +Runtime information. This is loosely defined to allow for extensibility. + + + +.. csv-table:: RuntimeMetadata type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "type", ":ref:`ref_flyteidl.core.RuntimeMetadata.RuntimeType`", "", "Type of runtime." + "version", ":ref:`ref_string`", "", "Version of the runtime. All versions should be backward compatible. However, certain cases call for version checks to ensure tighter validation or setting expectations." + "flavor", ":ref:`ref_string`", "", "+optional It can be used to provide extra information about the runtime (e.g. python, golang... etc.)." + + + + + + + +.. _ref_flyteidl.core.Sql: + +Sql +------------------------------------------------------------------ + +Sql represents a generic sql workload with a statement and dialect. + + + +.. csv-table:: Sql type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "statement", ":ref:`ref_string`", "", "The actual query to run, the query can have templated parameters. We use Flyte's Golang templating format for Query templating. For example, insert overwrite directory '{{ .rawOutputDataPrefix }}' stored as parquet select * from my_table where ds = '{{ .Inputs.ds }}'" + "dialect", ":ref:`ref_flyteidl.core.Sql.Dialect`", "", "" + + + + + + + +.. _ref_flyteidl.core.TaskMetadata: + +TaskMetadata +------------------------------------------------------------------ + +Task Metadata + + + +.. csv-table:: TaskMetadata type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "discoverable", ":ref:`ref_bool`", "", "Indicates whether the system should attempt to lookup this task's output to avoid duplication of work." + "runtime", ":ref:`ref_flyteidl.core.RuntimeMetadata`", "", "Runtime information about the task." + "timeout", ":ref:`ref_google.protobuf.Duration`", "", "The overall timeout of a task including user-triggered retries." + "retries", ":ref:`ref_flyteidl.core.RetryStrategy`", "", "Number of retries per task." + "discovery_version", ":ref:`ref_string`", "", "Indicates a logical version to apply to this task for the purpose of discovery." + "deprecated_error_message", ":ref:`ref_string`", "", "If set, this indicates that this task is deprecated. This will enable owners of tasks to notify consumers of the ending of support for a given task." + "interruptible", ":ref:`ref_bool`", "", "" + "cache_serializable", ":ref:`ref_bool`", "", "Indicates whether the system should attempt to execute discoverable instances in serial to avoid duplicate work" + "generates_deck", ":ref:`ref_bool`", "", "Indicates whether the task will generate a Deck URI when it finishes executing." + "tags", ":ref:`ref_flyteidl.core.TaskMetadata.TagsEntry`", "repeated", "Arbitrary tags that allow users and the platform to store small but arbitrary labels" + + + + + + + +.. _ref_flyteidl.core.TaskMetadata.TagsEntry: + +TaskMetadata.TagsEntry +------------------------------------------------------------------ + + + + + +.. csv-table:: TaskMetadata.TagsEntry type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "key", ":ref:`ref_string`", "", "" + "value", ":ref:`ref_string`", "", "" + + + + + + + +.. _ref_flyteidl.core.TaskTemplate: + +TaskTemplate +------------------------------------------------------------------ + +A Task structure that uniquely identifies a task in the system +Tasks are registered as a first step in the system. + + + +.. csv-table:: TaskTemplate type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "id", ":ref:`ref_flyteidl.core.Identifier`", "", "Auto generated taskId by the system. Task Id uniquely identifies this task globally." + "type", ":ref:`ref_string`", "", "A predefined yet extensible Task type identifier. This can be used to customize any of the components. If no extensions are provided in the system, Flyte will resolve the this task to its TaskCategory and default the implementation registered for the TaskCategory." + "metadata", ":ref:`ref_flyteidl.core.TaskMetadata`", "", "Extra metadata about the task." + "interface", ":ref:`ref_flyteidl.core.TypedInterface`", "", "A strongly typed interface for the task. This enables others to use this task within a workflow and guarantees compile-time validation of the workflow to avoid costly runtime failures." + "custom", ":ref:`ref_google.protobuf.Struct`", "", "Custom data about the task. This is extensible to allow various plugins in the system." + "container", ":ref:`ref_flyteidl.core.Container`", "", "" + "k8s_pod", ":ref:`ref_flyteidl.core.K8sPod`", "", "" + "sql", ":ref:`ref_flyteidl.core.Sql`", "", "" + "task_type_version", ":ref:`ref_int32`", "", "This can be used to customize task handling at execution time for the same task type." + "security_context", ":ref:`ref_flyteidl.core.SecurityContext`", "", "security_context encapsulates security attributes requested to run this task." + "config", ":ref:`ref_flyteidl.core.TaskTemplate.ConfigEntry`", "repeated", "Metadata about the custom defined for this task. This is extensible to allow various plugins in the system to use as required. reserve the field numbers 1 through 15 for very frequently occurring message elements" + + + + + + + +.. _ref_flyteidl.core.TaskTemplate.ConfigEntry: + +TaskTemplate.ConfigEntry +------------------------------------------------------------------ + + + + + +.. csv-table:: TaskTemplate.ConfigEntry type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "key", ":ref:`ref_string`", "", "" + "value", ":ref:`ref_string`", "", "" + + + + + + +.. + end messages + + + +.. _ref_flyteidl.core.Container.Architecture: + +Container.Architecture +------------------------------------------------------------------ + +Architecture-type the container image supports. + +.. csv-table:: Enum Container.Architecture values + :header: "Name", "Number", "Description" + :widths: auto + + "UNKNOWN", "0", "" + "AMD64", "1", "" + "ARM64", "2", "" + "ARM_V6", "3", "" + "ARM_V7", "4", "" + + + +.. _ref_flyteidl.core.DataLoadingConfig.LiteralMapFormat: + +DataLoadingConfig.LiteralMapFormat +------------------------------------------------------------------ + +LiteralMapFormat decides the encoding format in which the input metadata should be made available to the containers. +If the user has access to the protocol buffer definitions, it is recommended to use the PROTO format. +JSON and YAML do not need any protobuf definitions to read it +All remote references in core.LiteralMap are replaced with local filesystem references (the data is downloaded to local filesystem) + +.. csv-table:: Enum DataLoadingConfig.LiteralMapFormat values + :header: "Name", "Number", "Description" + :widths: auto + + "JSON", "0", "JSON / YAML for the metadata (which contains inlined primitive values). The representation is inline with the standard json specification as specified - https://www.json.org/json-en.html" + "YAML", "1", "" + "PROTO", "2", "Proto is a serialized binary of `core.LiteralMap` defined in flyteidl/core" + + + +.. _ref_flyteidl.core.IOStrategy.DownloadMode: + +IOStrategy.DownloadMode +------------------------------------------------------------------ + +Mode to use for downloading + +.. csv-table:: Enum IOStrategy.DownloadMode values + :header: "Name", "Number", "Description" + :widths: auto + + "DOWNLOAD_EAGER", "0", "All data will be downloaded before the main container is executed" + "DOWNLOAD_STREAM", "1", "Data will be downloaded as a stream and an End-Of-Stream marker will be written to indicate all data has been downloaded. Refer to protocol for details" + "DO_NOT_DOWNLOAD", "2", "Large objects (offloaded) will not be downloaded" + + + +.. _ref_flyteidl.core.IOStrategy.UploadMode: + +IOStrategy.UploadMode +------------------------------------------------------------------ + +Mode to use for uploading + +.. csv-table:: Enum IOStrategy.UploadMode values + :header: "Name", "Number", "Description" + :widths: auto + + "UPLOAD_ON_EXIT", "0", "All data will be uploaded after the main container exits" + "UPLOAD_EAGER", "1", "Data will be uploaded as it appears. Refer to protocol specification for details" + "DO_NOT_UPLOAD", "2", "Data will not be uploaded, only references will be written" + + + +.. _ref_flyteidl.core.Resources.ResourceName: + +Resources.ResourceName +------------------------------------------------------------------ + +Known resource names. + +.. csv-table:: Enum Resources.ResourceName values + :header: "Name", "Number", "Description" + :widths: auto + + "UNKNOWN", "0", "" + "CPU", "1", "" + "GPU", "2", "" + "MEMORY", "3", "" + "STORAGE", "4", "" + "EPHEMERAL_STORAGE", "5", "For Kubernetes-based deployments, pods use ephemeral local storage for scratch space, caching, and for logs." + + + +.. _ref_flyteidl.core.RuntimeMetadata.RuntimeType: + +RuntimeMetadata.RuntimeType +------------------------------------------------------------------ + + + +.. csv-table:: Enum RuntimeMetadata.RuntimeType values + :header: "Name", "Number", "Description" + :widths: auto + + "OTHER", "0", "" + "FLYTE_SDK", "1", "" + + + +.. _ref_flyteidl.core.Sql.Dialect: + +Sql.Dialect +------------------------------------------------------------------ + +The dialect of the SQL statement. This is used to validate and parse SQL statements at compilation time to avoid +expensive runtime operations. If set to an unsupported dialect, no validation will be done on the statement. +We support the following dialect: ansi, hive. + +.. csv-table:: Enum Sql.Dialect values + :header: "Name", "Number", "Description" + :widths: auto + + "UNDEFINED", "0", "" + "ANSI", "1", "" + "HIVE", "2", "" + "OTHER", "3", "" + + +.. + end enums + + +.. + end HasExtensions + + +.. + end services + + + + +.. _ref_flyteidl/core/types.proto: + +flyteidl/core/types.proto +================================================================== + + + + + +.. _ref_flyteidl.core.BlobType: + +BlobType +------------------------------------------------------------------ + +Defines type behavior for blob objects + + + +.. csv-table:: BlobType type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "format", ":ref:`ref_string`", "", "Format can be a free form string understood by SDK/UI etc like csv, parquet etc" + "dimensionality", ":ref:`ref_flyteidl.core.BlobType.BlobDimensionality`", "", "" + + + + + + + +.. _ref_flyteidl.core.EnumType: + +EnumType +------------------------------------------------------------------ + +Enables declaring enum types, with predefined string values +For len(values) > 0, the first value in the ordered list is regarded as the default value. If you wish +To provide no defaults, make the first value as undefined. + + + +.. csv-table:: EnumType type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "values", ":ref:`ref_string`", "repeated", "Predefined set of enum values." + + + + + + + +.. _ref_flyteidl.core.Error: + +Error +------------------------------------------------------------------ + +Represents an error thrown from a node. + + + +.. csv-table:: Error type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "failed_node_id", ":ref:`ref_string`", "", "The node id that threw the error." + "message", ":ref:`ref_string`", "", "Error message thrown." + + + + + + + +.. _ref_flyteidl.core.LiteralType: + +LiteralType +------------------------------------------------------------------ + +Defines a strong type to allow type checking between interfaces. + + + +.. csv-table:: LiteralType type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "simple", ":ref:`ref_flyteidl.core.SimpleType`", "", "A simple type that can be compared one-to-one with another." + "schema", ":ref:`ref_flyteidl.core.SchemaType`", "", "A complex type that requires matching of inner fields." + "collection_type", ":ref:`ref_flyteidl.core.LiteralType`", "", "Defines the type of the value of a collection. Only homogeneous collections are allowed." + "map_value_type", ":ref:`ref_flyteidl.core.LiteralType`", "", "Defines the type of the value of a map type. The type of the key is always a string." + "blob", ":ref:`ref_flyteidl.core.BlobType`", "", "A blob might have specialized implementation details depending on associated metadata." + "enum_type", ":ref:`ref_flyteidl.core.EnumType`", "", "Defines an enum with pre-defined string values." + "structured_dataset_type", ":ref:`ref_flyteidl.core.StructuredDatasetType`", "", "Generalized schema support" + "union_type", ":ref:`ref_flyteidl.core.UnionType`", "", "Defines an union type with pre-defined LiteralTypes." + "metadata", ":ref:`ref_google.protobuf.Struct`", "", "This field contains type metadata that is descriptive of the type, but is NOT considered in type-checking. This might be used by consumers to identify special behavior or display extended information for the type." + "annotation", ":ref:`ref_flyteidl.core.TypeAnnotation`", "", "This field contains arbitrary data that might have special semantic meaning for the client but does not effect internal flyte behavior." + "structure", ":ref:`ref_flyteidl.core.TypeStructure`", "", "Hints to improve type matching." + + + + + + + +.. _ref_flyteidl.core.OutputReference: + +OutputReference +------------------------------------------------------------------ + +A reference to an output produced by a node. The type can be retrieved -and validated- from +the underlying interface of the node. + + + +.. csv-table:: OutputReference type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "node_id", ":ref:`ref_string`", "", "Node id must exist at the graph layer." + "var", ":ref:`ref_string`", "", "Variable name must refer to an output variable for the node." + + + + + + + +.. _ref_flyteidl.core.SchemaType: + +SchemaType +------------------------------------------------------------------ + +Defines schema columns and types to strongly type-validate schemas interoperability. + + + +.. csv-table:: SchemaType type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "columns", ":ref:`ref_flyteidl.core.SchemaType.SchemaColumn`", "repeated", "A list of ordered columns this schema comprises of." + + + + + + + +.. _ref_flyteidl.core.SchemaType.SchemaColumn: + +SchemaType.SchemaColumn +------------------------------------------------------------------ + + + + + +.. csv-table:: SchemaType.SchemaColumn type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "name", ":ref:`ref_string`", "", "A unique name -within the schema type- for the column" + "type", ":ref:`ref_flyteidl.core.SchemaType.SchemaColumn.SchemaColumnType`", "", "The column type. This allows a limited set of types currently." + + + + + + + +.. _ref_flyteidl.core.StructuredDatasetType: + +StructuredDatasetType +------------------------------------------------------------------ + + + + + +.. csv-table:: StructuredDatasetType type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "columns", ":ref:`ref_flyteidl.core.StructuredDatasetType.DatasetColumn`", "repeated", "A list of ordered columns this schema comprises of." + "format", ":ref:`ref_string`", "", "This is the storage format, the format of the bits at rest parquet, feather, csv, etc. For two types to be compatible, the format will need to be an exact match." + "external_schema_type", ":ref:`ref_string`", "", "This is a string representing the type that the bytes in external_schema_bytes are formatted in. This is an optional field that will not be used for type checking." + "external_schema_bytes", ":ref:`ref_bytes`", "", "The serialized bytes of a third-party schema library like Arrow. This is an optional field that will not be used for type checking." + + + + + + + +.. _ref_flyteidl.core.StructuredDatasetType.DatasetColumn: + +StructuredDatasetType.DatasetColumn +------------------------------------------------------------------ + + + + + +.. csv-table:: StructuredDatasetType.DatasetColumn type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "name", ":ref:`ref_string`", "", "A unique name within the schema type for the column." + "literal_type", ":ref:`ref_flyteidl.core.LiteralType`", "", "The column type." + + + + + + + +.. _ref_flyteidl.core.TypeAnnotation: + +TypeAnnotation +------------------------------------------------------------------ + +TypeAnnotation encapsulates registration time information about a type. This can be used for various control-plane operations. TypeAnnotation will not be available at runtime when a task runs. + + + +.. csv-table:: TypeAnnotation type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "annotations", ":ref:`ref_google.protobuf.Struct`", "", "A arbitrary JSON payload to describe a type." + + + + + + + +.. _ref_flyteidl.core.TypeStructure: + +TypeStructure +------------------------------------------------------------------ + +Hints to improve type matching +e.g. allows distinguishing output from custom type transformers +even if the underlying IDL serialization matches. + + + +.. csv-table:: TypeStructure type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "tag", ":ref:`ref_string`", "", "Must exactly match for types to be castable" + + + + + + + +.. _ref_flyteidl.core.UnionType: + +UnionType +------------------------------------------------------------------ + +Defines a tagged union type, also known as a variant (and formally as the sum type). + +A sum type S is defined by a sequence of types (A, B, C, ...), each tagged by a string tag +A value of type S is constructed from a value of any of the variant types. The specific choice of type is recorded by +storing the varaint's tag with the literal value and can be examined in runtime. + +Type S is typically written as +S := Apple A | Banana B | Cantaloupe C | ... + +Notably, a nullable (optional) type is a sum type between some type X and the singleton type representing a null-value: +Optional X := X | Null + +See also: https://en.wikipedia.org/wiki/Tagged_union + + + +.. csv-table:: UnionType type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "variants", ":ref:`ref_flyteidl.core.LiteralType`", "repeated", "Predefined set of variants in union." + + + + + + +.. + end messages + + + +.. _ref_flyteidl.core.BlobType.BlobDimensionality: + +BlobType.BlobDimensionality +------------------------------------------------------------------ + + + +.. csv-table:: Enum BlobType.BlobDimensionality values + :header: "Name", "Number", "Description" + :widths: auto + + "SINGLE", "0", "" + "MULTIPART", "1", "" + + + +.. _ref_flyteidl.core.SchemaType.SchemaColumn.SchemaColumnType: + +SchemaType.SchemaColumn.SchemaColumnType +------------------------------------------------------------------ + + + +.. csv-table:: Enum SchemaType.SchemaColumn.SchemaColumnType values + :header: "Name", "Number", "Description" + :widths: auto + + "INTEGER", "0", "" + "FLOAT", "1", "" + "STRING", "2", "" + "BOOLEAN", "3", "" + "DATETIME", "4", "" + "DURATION", "5", "" + + + +.. _ref_flyteidl.core.SimpleType: + +SimpleType +------------------------------------------------------------------ + +Define a set of simple types. + +.. csv-table:: Enum SimpleType values + :header: "Name", "Number", "Description" + :widths: auto + + "NONE", "0", "" + "INTEGER", "1", "" + "FLOAT", "2", "" + "STRING", "3", "" + "BOOLEAN", "4", "" + "DATETIME", "5", "" + "DURATION", "6", "" + "BINARY", "7", "" + "ERROR", "8", "" + "STRUCT", "9", "" + + +.. + end enums + + +.. + end HasExtensions + + +.. + end services + + + + +.. _ref_flyteidl/core/workflow.proto: + +flyteidl/core/workflow.proto +================================================================== + + + + + +.. _ref_flyteidl.core.Alias: + +Alias +------------------------------------------------------------------ + +Links a variable to an alias. + + + +.. csv-table:: Alias type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "var", ":ref:`ref_string`", "", "Must match one of the output variable names on a node." + "alias", ":ref:`ref_string`", "", "A workflow-level unique alias that downstream nodes can refer to in their input." + + + + + + + +.. _ref_flyteidl.core.ApproveCondition: + +ApproveCondition +------------------------------------------------------------------ + +ApproveCondition represents a dependency on an external approval. During execution, this will manifest as a boolean +signal with the provided signal_id. + + + +.. csv-table:: ApproveCondition type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "signal_id", ":ref:`ref_string`", "", "A unique identifier for the requested boolean signal." + + + + + + + +.. _ref_flyteidl.core.BranchNode: + +BranchNode +------------------------------------------------------------------ + +BranchNode is a special node that alter the flow of the workflow graph. It allows the control flow to branch at +runtime based on a series of conditions that get evaluated on various parameters (e.g. inputs, primitives). + + + +.. csv-table:: BranchNode type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "if_else", ":ref:`ref_flyteidl.core.IfElseBlock`", "", "+required" + + + + + + + +.. _ref_flyteidl.core.GateNode: + +GateNode +------------------------------------------------------------------ + +GateNode refers to the condition that is required for the gate to successfully complete. + + + +.. csv-table:: GateNode type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "approve", ":ref:`ref_flyteidl.core.ApproveCondition`", "", "ApproveCondition represents a dependency on an external approval provided by a boolean signal." + "signal", ":ref:`ref_flyteidl.core.SignalCondition`", "", "SignalCondition represents a dependency on an signal." + "sleep", ":ref:`ref_flyteidl.core.SleepCondition`", "", "SleepCondition represents a dependency on waiting for the specified duration." + + + + + + + +.. _ref_flyteidl.core.IfBlock: + +IfBlock +------------------------------------------------------------------ + +Defines a condition and the execution unit that should be executed if the condition is satisfied. + + + +.. csv-table:: IfBlock type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "condition", ":ref:`ref_flyteidl.core.BooleanExpression`", "", "" + "then_node", ":ref:`ref_flyteidl.core.Node`", "", "" + + + + + + + +.. _ref_flyteidl.core.IfElseBlock: + +IfElseBlock +------------------------------------------------------------------ + +Defines a series of if/else blocks. The first branch whose condition evaluates to true is the one to execute. +If no conditions were satisfied, the else_node or the error will execute. + + + +.. csv-table:: IfElseBlock type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "case", ":ref:`ref_flyteidl.core.IfBlock`", "", "+required. First condition to evaluate." + "other", ":ref:`ref_flyteidl.core.IfBlock`", "repeated", "+optional. Additional branches to evaluate." + "else_node", ":ref:`ref_flyteidl.core.Node`", "", "The node to execute in case none of the branches were taken." + "error", ":ref:`ref_flyteidl.core.Error`", "", "An error to throw in case none of the branches were taken." + + + + + + + +.. _ref_flyteidl.core.Node: + +Node +------------------------------------------------------------------ + +A Workflow graph Node. One unit of execution in the graph. Each node can be linked to a Task, a Workflow or a branch +node. + + + +.. csv-table:: Node type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "id", ":ref:`ref_string`", "", "A workflow-level unique identifier that identifies this node in the workflow. 'inputs' and 'outputs' are reserved node ids that cannot be used by other nodes." + "metadata", ":ref:`ref_flyteidl.core.NodeMetadata`", "", "Extra metadata about the node." + "inputs", ":ref:`ref_flyteidl.core.Binding`", "repeated", "Specifies how to bind the underlying interface's inputs. All required inputs specified in the underlying interface must be fulfilled." + "upstream_node_ids", ":ref:`ref_string`", "repeated", "+optional Specifies execution dependency for this node ensuring it will only get scheduled to run after all its upstream nodes have completed. This node will have an implicit dependency on any node that appears in inputs field." + "output_aliases", ":ref:`ref_flyteidl.core.Alias`", "repeated", "+optional. A node can define aliases for a subset of its outputs. This is particularly useful if different nodes need to conform to the same interface (e.g. all branches in a branch node). Downstream nodes must refer to this nodes outputs using the alias if one's specified." + "task_node", ":ref:`ref_flyteidl.core.TaskNode`", "", "Information about the Task to execute in this node." + "workflow_node", ":ref:`ref_flyteidl.core.WorkflowNode`", "", "Information about the Workflow to execute in this mode." + "branch_node", ":ref:`ref_flyteidl.core.BranchNode`", "", "Information about the branch node to evaluate in this node." + "gate_node", ":ref:`ref_flyteidl.core.GateNode`", "", "Information about the condition to evaluate in this node." + + + + + + + +.. _ref_flyteidl.core.NodeMetadata: + +NodeMetadata +------------------------------------------------------------------ + +Defines extra information about the Node. + + + +.. csv-table:: NodeMetadata type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "name", ":ref:`ref_string`", "", "A friendly name for the Node" + "timeout", ":ref:`ref_google.protobuf.Duration`", "", "The overall timeout of a task." + "retries", ":ref:`ref_flyteidl.core.RetryStrategy`", "", "Number of retries per task." + "interruptible", ":ref:`ref_bool`", "", "" + + + + + + + +.. _ref_flyteidl.core.SignalCondition: + +SignalCondition +------------------------------------------------------------------ + +SignalCondition represents a dependency on an signal. + + + +.. csv-table:: SignalCondition type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "signal_id", ":ref:`ref_string`", "", "A unique identifier for the requested signal." + "type", ":ref:`ref_flyteidl.core.LiteralType`", "", "A type denoting the required value type for this signal." + "output_variable_name", ":ref:`ref_string`", "", "The variable name for the signal value in this nodes outputs." + + + + + + + +.. _ref_flyteidl.core.SleepCondition: + +SleepCondition +------------------------------------------------------------------ + +SleepCondition represents a dependency on waiting for the specified duration. + + + +.. csv-table:: SleepCondition type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "duration", ":ref:`ref_google.protobuf.Duration`", "", "The overall duration for this sleep." + + + + + + + +.. _ref_flyteidl.core.TaskNode: + +TaskNode +------------------------------------------------------------------ + +Refers to the task that the Node is to execute. + + + +.. csv-table:: TaskNode type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "reference_id", ":ref:`ref_flyteidl.core.Identifier`", "", "A globally unique identifier for the task." + "overrides", ":ref:`ref_flyteidl.core.TaskNodeOverrides`", "", "Optional overrides applied at task execution time." + + + + + + + +.. _ref_flyteidl.core.TaskNodeOverrides: + +TaskNodeOverrides +------------------------------------------------------------------ + +Optional task node overrides that will be applied at task execution time. + + + +.. csv-table:: TaskNodeOverrides type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "resources", ":ref:`ref_flyteidl.core.Resources`", "", "A customizable interface to convey resources requested for a task container." + + + + + + + +.. _ref_flyteidl.core.WorkflowMetadata: + +WorkflowMetadata +------------------------------------------------------------------ + +This is workflow layer metadata. These settings are only applicable to the workflow as a whole, and do not +percolate down to child entities (like tasks) launched by the workflow. + + + +.. csv-table:: WorkflowMetadata type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "quality_of_service", ":ref:`ref_flyteidl.core.QualityOfService`", "", "Indicates the runtime priority of workflow executions." + "on_failure", ":ref:`ref_flyteidl.core.WorkflowMetadata.OnFailurePolicy`", "", "Defines how the system should behave when a failure is detected in the workflow execution." + "tags", ":ref:`ref_flyteidl.core.WorkflowMetadata.TagsEntry`", "repeated", "Arbitrary tags that allow users and the platform to store small but arbitrary labels" + + + + + + + +.. _ref_flyteidl.core.WorkflowMetadata.TagsEntry: + +WorkflowMetadata.TagsEntry +------------------------------------------------------------------ + + + + + +.. csv-table:: WorkflowMetadata.TagsEntry type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "key", ":ref:`ref_string`", "", "" + "value", ":ref:`ref_string`", "", "" + + + + + + + +.. _ref_flyteidl.core.WorkflowMetadataDefaults: + +WorkflowMetadataDefaults +------------------------------------------------------------------ + +The difference between these settings and the WorkflowMetadata ones is that these are meant to be passed down to +a workflow's underlying entities (like tasks). For instance, 'interruptible' has no meaning at the workflow layer, it +is only relevant when a task executes. The settings here are the defaults that are passed to all nodes +unless explicitly overridden at the node layer. +If you are adding a setting that applies to both the Workflow itself, and everything underneath it, it should be +added to both this object and the WorkflowMetadata object above. + + + +.. csv-table:: WorkflowMetadataDefaults type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "interruptible", ":ref:`ref_bool`", "", "Whether child nodes of the workflow are interruptible." + + + + + + + +.. _ref_flyteidl.core.WorkflowNode: + +WorkflowNode +------------------------------------------------------------------ + +Refers to a the workflow the node is to execute. + + + +.. csv-table:: WorkflowNode type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "launchplan_ref", ":ref:`ref_flyteidl.core.Identifier`", "", "A globally unique identifier for the launch plan." + "sub_workflow_ref", ":ref:`ref_flyteidl.core.Identifier`", "", "Reference to a subworkflow, that should be defined with the compiler context" + + + + + + + +.. _ref_flyteidl.core.WorkflowTemplate: + +WorkflowTemplate +------------------------------------------------------------------ + +Flyte Workflow Structure that encapsulates task, branch and subworkflow nodes to form a statically analyzable, +directed acyclic graph. + + + +.. csv-table:: WorkflowTemplate type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "id", ":ref:`ref_flyteidl.core.Identifier`", "", "A globally unique identifier for the workflow." + "metadata", ":ref:`ref_flyteidl.core.WorkflowMetadata`", "", "Extra metadata about the workflow." + "interface", ":ref:`ref_flyteidl.core.TypedInterface`", "", "Defines a strongly typed interface for the Workflow. This can include some optional parameters." + "nodes", ":ref:`ref_flyteidl.core.Node`", "repeated", "A list of nodes. In addition, 'globals' is a special reserved node id that can be used to consume workflow inputs." + "outputs", ":ref:`ref_flyteidl.core.Binding`", "repeated", "A list of output bindings that specify how to construct workflow outputs. Bindings can pull node outputs or specify literals. All workflow outputs specified in the interface field must be bound in order for the workflow to be validated. A workflow has an implicit dependency on all of its nodes to execute successfully in order to bind final outputs. Most of these outputs will be Binding's with a BindingData of type OutputReference. That is, your workflow can just have an output of some constant (`Output(5)`), but usually, the workflow will be pulling outputs from the output of a task." + "failure_node", ":ref:`ref_flyteidl.core.Node`", "", "+optional A catch-all node. This node is executed whenever the execution engine determines the workflow has failed. The interface of this node must match the Workflow interface with an additional input named 'error' of type pb.lyft.flyte.core.Error." + "metadata_defaults", ":ref:`ref_flyteidl.core.WorkflowMetadataDefaults`", "", "workflow defaults" + + + + + + +.. + end messages + + + +.. _ref_flyteidl.core.WorkflowMetadata.OnFailurePolicy: + +WorkflowMetadata.OnFailurePolicy +------------------------------------------------------------------ + +Failure Handling Strategy + +.. csv-table:: Enum WorkflowMetadata.OnFailurePolicy values + :header: "Name", "Number", "Description" + :widths: auto + + "FAIL_IMMEDIATELY", "0", "FAIL_IMMEDIATELY instructs the system to fail as soon as a node fails in the workflow. It'll automatically abort all currently running nodes and clean up resources before finally marking the workflow executions as failed." + "FAIL_AFTER_EXECUTABLE_NODES_COMPLETE", "1", "FAIL_AFTER_EXECUTABLE_NODES_COMPLETE instructs the system to make as much progress as it can. The system will not alter the dependencies of the execution graph so any node that depend on the failed node will not be run. Other nodes that will be executed to completion before cleaning up resources and marking the workflow execution as failed." + + +.. + end enums + + +.. + end HasExtensions + + +.. + end services + + + + +.. _ref_flyteidl/core/workflow_closure.proto: + +flyteidl/core/workflow_closure.proto +================================================================== + + + + + +.. _ref_flyteidl.core.WorkflowClosure: + +WorkflowClosure +------------------------------------------------------------------ + +Defines an enclosed package of workflow and tasks it references. + + + +.. csv-table:: WorkflowClosure type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "workflow", ":ref:`ref_flyteidl.core.WorkflowTemplate`", "", "required. Workflow template." + "tasks", ":ref:`ref_flyteidl.core.TaskTemplate`", "repeated", "optional. A collection of tasks referenced by the workflow. Only needed if the workflow references tasks." + + + + + + +.. + end messages + + +.. + end enums + + +.. + end HasExtensions + + +.. + end services + + + + +.. _ref_google/protobuf/timestamp.proto: + +google/protobuf/timestamp.proto +================================================================== + + + + + +.. _ref_google.protobuf.Timestamp: + +Timestamp +------------------------------------------------------------------ + +A Timestamp represents a point in time independent of any time zone or local +calendar, encoded as a count of seconds and fractions of seconds at +nanosecond resolution. The count is relative to an epoch at UTC midnight on +January 1, 1970, in the proleptic Gregorian calendar which extends the +Gregorian calendar backwards to year one. + +All minutes are 60 seconds long. Leap seconds are "smeared" so that no leap +second table is needed for interpretation, using a [24-hour linear +smear](https://developers.google.com/time/smear). + +The range is from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59.999999999Z. By +restricting to that range, we ensure that we can convert to and from [RFC +3339](https://www.ietf.org/rfc/rfc3339.txt) date strings. + +# Examples + +Example 1: Compute Timestamp from POSIX `time()`. + + Timestamp timestamp; + timestamp.set_seconds(time(NULL)); + timestamp.set_nanos(0); + +Example 2: Compute Timestamp from POSIX `gettimeofday()`. + + struct timeval tv; + gettimeofday(&tv, NULL); + + Timestamp timestamp; + timestamp.set_seconds(tv.tv_sec); + timestamp.set_nanos(tv.tv_usec * 1000); + +Example 3: Compute Timestamp from Win32 `GetSystemTimeAsFileTime()`. + + FILETIME ft; + GetSystemTimeAsFileTime(&ft); + UINT64 ticks = (((UINT64)ft.dwHighDateTime) << 32) | ft.dwLowDateTime; + + // A Windows tick is 100 nanoseconds. Windows epoch 1601-01-01T00:00:00Z + // is 11644473600 seconds before Unix epoch 1970-01-01T00:00:00Z. + Timestamp timestamp; + timestamp.set_seconds((INT64) ((ticks / 10000000) - 11644473600LL)); + timestamp.set_nanos((INT32) ((ticks % 10000000) * 100)); + +Example 4: Compute Timestamp from Java `System.currentTimeMillis()`. + + long millis = System.currentTimeMillis(); + + Timestamp timestamp = Timestamp.newBuilder().setSeconds(millis / 1000) + .setNanos((int) ((millis % 1000) * 1000000)).build(); + +Example 5: Compute Timestamp from Java `Instant.now()`. + + Instant now = Instant.now(); + + Timestamp timestamp = + Timestamp.newBuilder().setSeconds(now.getEpochSecond()) + .setNanos(now.getNano()).build(); + +Example 6: Compute Timestamp from current time in Python. + + timestamp = Timestamp() + timestamp.GetCurrentTime() + +# JSON Mapping + +In JSON format, the Timestamp type is encoded as a string in the +[RFC 3339](https://www.ietf.org/rfc/rfc3339.txt) format. That is, the +format is "{year}-{month}-{day}T{hour}:{min}:{sec}[.{frac_sec}]Z" +where {year} is always expressed using four digits while {month}, {day}, +{hour}, {min}, and {sec} are zero-padded to two digits each. The fractional +seconds, which can go up to 9 digits (i.e. up to 1 nanosecond resolution), +are optional. The "Z" suffix indicates the timezone ("UTC"); the timezone +is required. A proto3 JSON serializer should always use UTC (as indicated by +"Z") when printing the Timestamp type and a proto3 JSON parser should be +able to accept both UTC and other timezones (as indicated by an offset). + +For example, "2017-01-15T01:30:15.01Z" encodes 15.01 seconds past +01:30 UTC on January 15, 2017. + +In JavaScript, one can convert a Date object to this format using the +standard +[toISOString()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Date/toISOString) +method. In Python, a standard `datetime.datetime` object can be converted +to this format using +[`strftime`](https://docs.python.org/2/library/time.html#time.strftime) with +the time format spec '%Y-%m-%dT%H:%M:%S.%fZ'. Likewise, in Java, one can use +the Joda Time's [`ISODateTimeFormat.dateTime()`]( +http://www.joda.org/joda-time/apidocs/org/joda/time/format/ISODateTimeFormat.html#dateTime%2D%2D +) to obtain a formatter capable of generating timestamps in this format. + + + +.. csv-table:: Timestamp type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "seconds", ":ref:`ref_int64`", "", "Represents seconds of UTC time since Unix epoch 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59Z inclusive." + "nanos", ":ref:`ref_int32`", "", "Non-negative fractions of a second at nanosecond resolution. Negative second values with fractions must still have non-negative nanos values that count forward in time. Must be from 0 to 999,999,999 inclusive." + + + + + + +.. + end messages + + +.. + end enums + + +.. + end HasExtensions + + +.. + end services + + + + +.. _ref_google/protobuf/duration.proto: + +google/protobuf/duration.proto +================================================================== + + + + + +.. _ref_google.protobuf.Duration: + +Duration +------------------------------------------------------------------ + +A Duration represents a signed, fixed-length span of time represented +as a count of seconds and fractions of seconds at nanosecond +resolution. It is independent of any calendar and concepts like "day" +or "month". It is related to Timestamp in that the difference between +two Timestamp values is a Duration and it can be added or subtracted +from a Timestamp. Range is approximately +-10,000 years. + +# Examples + +Example 1: Compute Duration from two Timestamps in pseudo code. + + Timestamp start = ...; + Timestamp end = ...; + Duration duration = ...; + + duration.seconds = end.seconds - start.seconds; + duration.nanos = end.nanos - start.nanos; + + if (duration.seconds < 0 && duration.nanos > 0) { + duration.seconds += 1; + duration.nanos -= 1000000000; + } else if (duration.seconds > 0 && duration.nanos < 0) { + duration.seconds -= 1; + duration.nanos += 1000000000; + } + +Example 2: Compute Timestamp from Timestamp + Duration in pseudo code. + + Timestamp start = ...; + Duration duration = ...; + Timestamp end = ...; + + end.seconds = start.seconds + duration.seconds; + end.nanos = start.nanos + duration.nanos; + + if (end.nanos < 0) { + end.seconds -= 1; + end.nanos += 1000000000; + } else if (end.nanos >= 1000000000) { + end.seconds += 1; + end.nanos -= 1000000000; + } + +Example 3: Compute Duration from datetime.timedelta in Python. + + td = datetime.timedelta(days=3, minutes=10) + duration = Duration() + duration.FromTimedelta(td) + +# JSON Mapping + +In JSON format, the Duration type is encoded as a string rather than an +object, where the string ends in the suffix "s" (indicating seconds) and +is preceded by the number of seconds, with nanoseconds expressed as +fractional seconds. For example, 3 seconds with 0 nanoseconds should be +encoded in JSON format as "3s", while 3 seconds and 1 nanosecond should +be expressed in JSON format as "3.000000001s", and 3 seconds and 1 +microsecond should be expressed in JSON format as "3.000001s". + + + +.. csv-table:: Duration type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "seconds", ":ref:`ref_int64`", "", "Signed seconds of the span of time. Must be from -315,576,000,000 to +315,576,000,000 inclusive. Note: these bounds are computed from: 60 sec/min * 60 min/hr * 24 hr/day * 365.25 days/year * 10000 years" + "nanos", ":ref:`ref_int32`", "", "Signed fractions of a second at nanosecond resolution of the span of time. Durations less than one second are represented with a 0 `seconds` field and a positive or negative `nanos` field. For durations of one second or more, a non-zero value for the `nanos` field must be of the same sign as the `seconds` field. Must be from -999,999,999 to +999,999,999 inclusive." + + + + + + +.. + end messages + + +.. + end enums + + +.. + end HasExtensions + + +.. + end services + + + + +.. _ref_google/protobuf/struct.proto: + +google/protobuf/struct.proto +================================================================== + + + + + +.. _ref_google.protobuf.ListValue: + +ListValue +------------------------------------------------------------------ + +`ListValue` is a wrapper around a repeated field of values. + +The JSON representation for `ListValue` is JSON array. + + + +.. csv-table:: ListValue type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "values", ":ref:`ref_google.protobuf.Value`", "repeated", "Repeated field of dynamically typed values." + + + + + + + +.. _ref_google.protobuf.Struct: + +Struct +------------------------------------------------------------------ + +`Struct` represents a structured data value, consisting of fields +which map to dynamically typed values. In some languages, `Struct` +might be supported by a native representation. For example, in +scripting languages like JS a struct is represented as an +object. The details of that representation are described together +with the proto support for the language. + +The JSON representation for `Struct` is JSON object. + + + +.. csv-table:: Struct type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "fields", ":ref:`ref_google.protobuf.Struct.FieldsEntry`", "repeated", "Unordered map of dynamically typed values." + + + + + + + +.. _ref_google.protobuf.Struct.FieldsEntry: + +Struct.FieldsEntry +------------------------------------------------------------------ + + + + + +.. csv-table:: Struct.FieldsEntry type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "key", ":ref:`ref_string`", "", "" + "value", ":ref:`ref_google.protobuf.Value`", "", "" + + + + + + + +.. _ref_google.protobuf.Value: + +Value +------------------------------------------------------------------ + +`Value` represents a dynamically typed value which can be either +null, a number, a string, a boolean, a recursive struct value, or a +list of values. A producer of value is expected to set one of these +variants. Absence of any variant indicates an error. + +The JSON representation for `Value` is JSON value. + + + +.. csv-table:: Value type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "null_value", ":ref:`ref_google.protobuf.NullValue`", "", "Represents a null value." + "number_value", ":ref:`ref_double`", "", "Represents a double value." + "string_value", ":ref:`ref_string`", "", "Represents a string value." + "bool_value", ":ref:`ref_bool`", "", "Represents a boolean value." + "struct_value", ":ref:`ref_google.protobuf.Struct`", "", "Represents a structured value." + "list_value", ":ref:`ref_google.protobuf.ListValue`", "", "Represents a repeated `Value`." + + + + + + +.. + end messages + + + +.. _ref_google.protobuf.NullValue: + +NullValue +------------------------------------------------------------------ + +`NullValue` is a singleton enumeration to represent the null value for the +`Value` type union. + + The JSON representation for `NullValue` is JSON `null`. + +.. csv-table:: Enum NullValue values + :header: "Name", "Number", "Description" + :widths: auto + + "NULL_VALUE", "0", "Null value." + + +.. + end enums + + +.. + end HasExtensions + + +.. + end services + + + +.. _ref_scala_types: + +Scalar Value Types +================== + + + +.. _ref_double: + +double +----------------------------- + + + +.. csv-table:: double language representation + :header: ".proto Type", "C++", "Java", "Python", "Go", "C#", "PHP", "Ruby" + :widths: auto + + "double", "double", "double", "float", "float64", "double", "float", "Float" + + + +.. _ref_float: + +float +----------------------------- + + + +.. csv-table:: float language representation + :header: ".proto Type", "C++", "Java", "Python", "Go", "C#", "PHP", "Ruby" + :widths: auto + + "float", "float", "float", "float", "float32", "float", "float", "Float" + + + +.. _ref_int32: + +int32 +----------------------------- + +Uses variable-length encoding. Inefficient for encoding negative numbers – if your field is likely to have negative values, use sint32 instead. + +.. csv-table:: int32 language representation + :header: ".proto Type", "C++", "Java", "Python", "Go", "C#", "PHP", "Ruby" + :widths: auto + + "int32", "int32", "int", "int", "int32", "int", "integer", "Bignum or Fixnum (as required)" + + + +.. _ref_int64: + +int64 +----------------------------- + +Uses variable-length encoding. Inefficient for encoding negative numbers – if your field is likely to have negative values, use sint64 instead. + +.. csv-table:: int64 language representation + :header: ".proto Type", "C++", "Java", "Python", "Go", "C#", "PHP", "Ruby" + :widths: auto + + "int64", "int64", "long", "int/long", "int64", "long", "integer/string", "Bignum" + + + +.. _ref_uint32: + +uint32 +----------------------------- + +Uses variable-length encoding. + +.. csv-table:: uint32 language representation + :header: ".proto Type", "C++", "Java", "Python", "Go", "C#", "PHP", "Ruby" + :widths: auto + + "uint32", "uint32", "int", "int/long", "uint32", "uint", "integer", "Bignum or Fixnum (as required)" + + + +.. _ref_uint64: + +uint64 +----------------------------- + +Uses variable-length encoding. + +.. csv-table:: uint64 language representation + :header: ".proto Type", "C++", "Java", "Python", "Go", "C#", "PHP", "Ruby" + :widths: auto + + "uint64", "uint64", "long", "int/long", "uint64", "ulong", "integer/string", "Bignum or Fixnum (as required)" + + + +.. _ref_sint32: + +sint32 +----------------------------- + +Uses variable-length encoding. Signed int value. These more efficiently encode negative numbers than regular int32s. + +.. csv-table:: sint32 language representation + :header: ".proto Type", "C++", "Java", "Python", "Go", "C#", "PHP", "Ruby" + :widths: auto + + "sint32", "int32", "int", "int", "int32", "int", "integer", "Bignum or Fixnum (as required)" + + + +.. _ref_sint64: + +sint64 +----------------------------- + +Uses variable-length encoding. Signed int value. These more efficiently encode negative numbers than regular int64s. + +.. csv-table:: sint64 language representation + :header: ".proto Type", "C++", "Java", "Python", "Go", "C#", "PHP", "Ruby" + :widths: auto + + "sint64", "int64", "long", "int/long", "int64", "long", "integer/string", "Bignum" + + + +.. _ref_fixed32: + +fixed32 +----------------------------- + +Always four bytes. More efficient than uint32 if values are often greater than 2^28. + +.. csv-table:: fixed32 language representation + :header: ".proto Type", "C++", "Java", "Python", "Go", "C#", "PHP", "Ruby" + :widths: auto + + "fixed32", "uint32", "int", "int", "uint32", "uint", "integer", "Bignum or Fixnum (as required)" + + + +.. _ref_fixed64: + +fixed64 +----------------------------- + +Always eight bytes. More efficient than uint64 if values are often greater than 2^56. + +.. csv-table:: fixed64 language representation + :header: ".proto Type", "C++", "Java", "Python", "Go", "C#", "PHP", "Ruby" + :widths: auto + + "fixed64", "uint64", "long", "int/long", "uint64", "ulong", "integer/string", "Bignum" + + + +.. _ref_sfixed32: + +sfixed32 +----------------------------- + +Always four bytes. + +.. csv-table:: sfixed32 language representation + :header: ".proto Type", "C++", "Java", "Python", "Go", "C#", "PHP", "Ruby" + :widths: auto + + "sfixed32", "int32", "int", "int", "int32", "int", "integer", "Bignum or Fixnum (as required)" + + + +.. _ref_sfixed64: + +sfixed64 +----------------------------- + +Always eight bytes. + +.. csv-table:: sfixed64 language representation + :header: ".proto Type", "C++", "Java", "Python", "Go", "C#", "PHP", "Ruby" + :widths: auto + + "sfixed64", "int64", "long", "int/long", "int64", "long", "integer/string", "Bignum" + + + +.. _ref_bool: + +bool +----------------------------- + + + +.. csv-table:: bool language representation + :header: ".proto Type", "C++", "Java", "Python", "Go", "C#", "PHP", "Ruby" + :widths: auto + + "bool", "bool", "boolean", "boolean", "bool", "bool", "boolean", "TrueClass/FalseClass" + + + +.. _ref_string: + +string +----------------------------- + +A string must always contain UTF-8 encoded or 7-bit ASCII text. + +.. csv-table:: string language representation + :header: ".proto Type", "C++", "Java", "Python", "Go", "C#", "PHP", "Ruby" + :widths: auto + + "string", "string", "String", "str/unicode", "string", "string", "string", "String (UTF-8)" + + + +.. _ref_bytes: + +bytes +----------------------------- + +May contain any arbitrary sequence of bytes. + +.. csv-table:: bytes language representation + :header: ".proto Type", "C++", "Java", "Python", "Go", "C#", "PHP", "Ruby" + :widths: auto + + "bytes", "string", "ByteString", "str", "[]byte", "ByteString", "string", "String (ASCII-8BIT)" + + +.. + end scalars \ No newline at end of file diff --git a/docs/api/flyteidl/docs/core/index.rst b/docs/api/flyteidl/docs/core/index.rst new file mode 100644 index 0000000000..7d2ce06617 --- /dev/null +++ b/docs/api/flyteidl/docs/core/index.rst @@ -0,0 +1,15 @@ +Core Flyte language specification +================================= + +Protocol buffers provide details about core data +structures like :ref:`workflows `, :ref:`tasks `, :ref:`nodes `, and Literals. They are the specifications +of the various entities in Flyte and the type system. + +`Core raw protos `__ + +.. toctree:: + :maxdepth: 1 + :caption: core + :name: coretoc + + core diff --git a/docs/api/flyteidl/docs/datacatalog/datacatalog.rst b/docs/api/flyteidl/docs/datacatalog/datacatalog.rst new file mode 100644 index 0000000000..6ca4328f95 --- /dev/null +++ b/docs/api/flyteidl/docs/datacatalog/datacatalog.rst @@ -0,0 +1,1313 @@ +###################### +Protocol Documentation +###################### + + + + +.. _ref_flyteidl/datacatalog/datacatalog.proto: + +flyteidl/datacatalog/datacatalog.proto +================================================================== + + + + + +.. _ref_datacatalog.AddTagRequest: + +AddTagRequest +------------------------------------------------------------------ + +Request message for tagging an Artifact. + + + +.. csv-table:: AddTagRequest type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "tag", ":ref:`ref_datacatalog.Tag`", "", "" + + + + + + + +.. _ref_datacatalog.AddTagResponse: + +AddTagResponse +------------------------------------------------------------------ + +Response message for tagging an Artifact. + + + + + + + + +.. _ref_datacatalog.Artifact: + +Artifact +------------------------------------------------------------------ + +Artifact message. It is composed of several string fields. + + + +.. csv-table:: Artifact type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "id", ":ref:`ref_string`", "", "The unique ID of the artifact" + "dataset", ":ref:`ref_datacatalog.DatasetID`", "", "The Dataset that the artifact belongs to" + "data", ":ref:`ref_datacatalog.ArtifactData`", "repeated", "A list of data that is associated with the artifact" + "metadata", ":ref:`ref_datacatalog.Metadata`", "", "Free-form metadata associated with the artifact" + "partitions", ":ref:`ref_datacatalog.Partition`", "repeated", "" + "tags", ":ref:`ref_datacatalog.Tag`", "repeated", "" + "created_at", ":ref:`ref_google.protobuf.Timestamp`", "", "creation timestamp of artifact, autogenerated by service" + + + + + + + +.. _ref_datacatalog.ArtifactData: + +ArtifactData +------------------------------------------------------------------ + +ArtifactData that belongs to an artifact + + + +.. csv-table:: ArtifactData type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "name", ":ref:`ref_string`", "", "" + "value", ":ref:`ref_flyteidl.core.Literal`", "", "" + + + + + + + +.. _ref_datacatalog.ArtifactPropertyFilter: + +ArtifactPropertyFilter +------------------------------------------------------------------ + +Artifact properties we can filter by + + + +.. csv-table:: ArtifactPropertyFilter type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "artifact_id", ":ref:`ref_string`", "", "" + + + + + + + +.. _ref_datacatalog.CreateArtifactRequest: + +CreateArtifactRequest +------------------------------------------------------------------ + +Request message for creating an Artifact and its associated artifact Data. + + + +.. csv-table:: CreateArtifactRequest type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "artifact", ":ref:`ref_datacatalog.Artifact`", "", "" + + + + + + + +.. _ref_datacatalog.CreateArtifactResponse: + +CreateArtifactResponse +------------------------------------------------------------------ + +Response message for creating an Artifact. + + + + + + + + +.. _ref_datacatalog.CreateDatasetRequest: + +CreateDatasetRequest +------------------------------------------------------------------ + +Request message for creating a Dataset. + + + +.. csv-table:: CreateDatasetRequest type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "dataset", ":ref:`ref_datacatalog.Dataset`", "", "" + + + + + + + +.. _ref_datacatalog.CreateDatasetResponse: + +CreateDatasetResponse +------------------------------------------------------------------ + +Response message for creating a Dataset + + + + + + + + +.. _ref_datacatalog.Dataset: + +Dataset +------------------------------------------------------------------ + +Dataset message. It is uniquely identified by DatasetID. + + + +.. csv-table:: Dataset type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "id", ":ref:`ref_datacatalog.DatasetID`", "", "" + "metadata", ":ref:`ref_datacatalog.Metadata`", "", "" + "partitionKeys", ":ref:`ref_string`", "repeated", "" + + + + + + + +.. _ref_datacatalog.DatasetID: + +DatasetID +------------------------------------------------------------------ + +DatasetID message that is composed of several string fields. + + + +.. csv-table:: DatasetID type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "project", ":ref:`ref_string`", "", "The name of the project" + "name", ":ref:`ref_string`", "", "The name of the dataset" + "domain", ":ref:`ref_string`", "", "The domain (eg. environment)" + "version", ":ref:`ref_string`", "", "Version of the data schema" + "UUID", ":ref:`ref_string`", "", "UUID for the dataset (if set the above fields are optional)" + + + + + + + +.. _ref_datacatalog.DatasetPropertyFilter: + +DatasetPropertyFilter +------------------------------------------------------------------ + +Dataset properties we can filter by + + + +.. csv-table:: DatasetPropertyFilter type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "project", ":ref:`ref_string`", "", "" + "name", ":ref:`ref_string`", "", "" + "domain", ":ref:`ref_string`", "", "" + "version", ":ref:`ref_string`", "", "" + + + + + + + +.. _ref_datacatalog.FilterExpression: + +FilterExpression +------------------------------------------------------------------ + +Filter expression that is composed of a combination of single filters + + + +.. csv-table:: FilterExpression type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "filters", ":ref:`ref_datacatalog.SinglePropertyFilter`", "repeated", "" + + + + + + + +.. _ref_datacatalog.GetArtifactRequest: + +GetArtifactRequest +------------------------------------------------------------------ + +Request message for retrieving an Artifact. Retrieve an artifact based on a query handle that +can be one of artifact_id or tag. The result returned will include the artifact data and metadata +associated with the artifact. + + + +.. csv-table:: GetArtifactRequest type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "dataset", ":ref:`ref_datacatalog.DatasetID`", "", "" + "artifact_id", ":ref:`ref_string`", "", "" + "tag_name", ":ref:`ref_string`", "", "" + + + + + + + +.. _ref_datacatalog.GetArtifactResponse: + +GetArtifactResponse +------------------------------------------------------------------ + +Response message for retrieving an Artifact. The result returned will include the artifact data +and metadata associated with the artifact. + + + +.. csv-table:: GetArtifactResponse type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "artifact", ":ref:`ref_datacatalog.Artifact`", "", "" + + + + + + + +.. _ref_datacatalog.GetDatasetRequest: + +GetDatasetRequest +------------------------------------------------------------------ + +Request message for retrieving a Dataset. The Dataset is retrieved by it's unique identifier +which is a combination of several fields. + + + +.. csv-table:: GetDatasetRequest type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "dataset", ":ref:`ref_datacatalog.DatasetID`", "", "" + + + + + + + +.. _ref_datacatalog.GetDatasetResponse: + +GetDatasetResponse +------------------------------------------------------------------ + +Response message for retrieving a Dataset. The response will include the metadata for the +Dataset. + + + +.. csv-table:: GetDatasetResponse type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "dataset", ":ref:`ref_datacatalog.Dataset`", "", "" + + + + + + + +.. _ref_datacatalog.GetOrExtendReservationRequest: + +GetOrExtendReservationRequest +------------------------------------------------------------------ + +Try to acquire or extend an artifact reservation. If an active reservation exists, retrieve that instance. + + + +.. csv-table:: GetOrExtendReservationRequest type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "reservation_id", ":ref:`ref_datacatalog.ReservationID`", "", "" + "owner_id", ":ref:`ref_string`", "", "" + "heartbeat_interval", ":ref:`ref_google.protobuf.Duration`", "", "Requested reservation extension heartbeat interval" + + + + + + + +.. _ref_datacatalog.GetOrExtendReservationResponse: + +GetOrExtendReservationResponse +------------------------------------------------------------------ + +Response including either a newly minted reservation or the existing reservation + + + +.. csv-table:: GetOrExtendReservationResponse type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "reservation", ":ref:`ref_datacatalog.Reservation`", "", "" + + + + + + + +.. _ref_datacatalog.KeyValuePair: + +KeyValuePair +------------------------------------------------------------------ + + + + + +.. csv-table:: KeyValuePair type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "key", ":ref:`ref_string`", "", "" + "value", ":ref:`ref_string`", "", "" + + + + + + + +.. _ref_datacatalog.ListArtifactsRequest: + +ListArtifactsRequest +------------------------------------------------------------------ + +List the artifacts that belong to the Dataset, optionally filtered using filtered expression. + + + +.. csv-table:: ListArtifactsRequest type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "dataset", ":ref:`ref_datacatalog.DatasetID`", "", "Use a datasetID for which you want to retrieve the artifacts" + "filter", ":ref:`ref_datacatalog.FilterExpression`", "", "Apply the filter expression to this query" + "pagination", ":ref:`ref_datacatalog.PaginationOptions`", "", "Pagination options to get a page of artifacts" + + + + + + + +.. _ref_datacatalog.ListArtifactsResponse: + +ListArtifactsResponse +------------------------------------------------------------------ + +Response to list artifacts + + + +.. csv-table:: ListArtifactsResponse type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "artifacts", ":ref:`ref_datacatalog.Artifact`", "repeated", "The list of artifacts" + "next_token", ":ref:`ref_string`", "", "Token to use to request the next page, pass this into the next requests PaginationOptions" + + + + + + + +.. _ref_datacatalog.ListDatasetsRequest: + +ListDatasetsRequest +------------------------------------------------------------------ + +List the datasets for the given query + + + +.. csv-table:: ListDatasetsRequest type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "filter", ":ref:`ref_datacatalog.FilterExpression`", "", "Apply the filter expression to this query" + "pagination", ":ref:`ref_datacatalog.PaginationOptions`", "", "Pagination options to get a page of datasets" + + + + + + + +.. _ref_datacatalog.ListDatasetsResponse: + +ListDatasetsResponse +------------------------------------------------------------------ + +List the datasets response with token for next pagination + + + +.. csv-table:: ListDatasetsResponse type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "datasets", ":ref:`ref_datacatalog.Dataset`", "repeated", "The list of datasets" + "next_token", ":ref:`ref_string`", "", "Token to use to request the next page, pass this into the next requests PaginationOptions" + + + + + + + +.. _ref_datacatalog.Metadata: + +Metadata +------------------------------------------------------------------ + +Metadata representation for artifacts and datasets + + + +.. csv-table:: Metadata type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "key_map", ":ref:`ref_datacatalog.Metadata.KeyMapEntry`", "repeated", "key map is a dictionary of key/val strings that represent metadata" + + + + + + + +.. _ref_datacatalog.Metadata.KeyMapEntry: + +Metadata.KeyMapEntry +------------------------------------------------------------------ + + + + + +.. csv-table:: Metadata.KeyMapEntry type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "key", ":ref:`ref_string`", "", "" + "value", ":ref:`ref_string`", "", "" + + + + + + + +.. _ref_datacatalog.PaginationOptions: + +PaginationOptions +------------------------------------------------------------------ + +Pagination options for making list requests + + + +.. csv-table:: PaginationOptions type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "limit", ":ref:`ref_uint32`", "", "the max number of results to return" + "token", ":ref:`ref_string`", "", "the token to pass to fetch the next page" + "sortKey", ":ref:`ref_datacatalog.PaginationOptions.SortKey`", "", "the property that we want to sort the results by" + "sortOrder", ":ref:`ref_datacatalog.PaginationOptions.SortOrder`", "", "the sort order of the results" + + + + + + + +.. _ref_datacatalog.Partition: + +Partition +------------------------------------------------------------------ + +An artifact could have multiple partitions and each partition can have an arbitrary string key/value pair + + + +.. csv-table:: Partition type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "key", ":ref:`ref_string`", "", "" + "value", ":ref:`ref_string`", "", "" + + + + + + + +.. _ref_datacatalog.PartitionPropertyFilter: + +PartitionPropertyFilter +------------------------------------------------------------------ + +Partition properties we can filter by + + + +.. csv-table:: PartitionPropertyFilter type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "key_val", ":ref:`ref_datacatalog.KeyValuePair`", "", "" + + + + + + + +.. _ref_datacatalog.ReleaseReservationRequest: + +ReleaseReservationRequest +------------------------------------------------------------------ + +Request to release reservation + + + +.. csv-table:: ReleaseReservationRequest type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "reservation_id", ":ref:`ref_datacatalog.ReservationID`", "", "" + "owner_id", ":ref:`ref_string`", "", "" + + + + + + + +.. _ref_datacatalog.ReleaseReservationResponse: + +ReleaseReservationResponse +------------------------------------------------------------------ + +Response to release reservation + + + + + + + + +.. _ref_datacatalog.Reservation: + +Reservation +------------------------------------------------------------------ + +A reservation including owner, heartbeat interval, expiration timestamp, and various metadata. + + + +.. csv-table:: Reservation type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "reservation_id", ":ref:`ref_datacatalog.ReservationID`", "", "" + "owner_id", ":ref:`ref_string`", "", "" + "heartbeat_interval", ":ref:`ref_google.protobuf.Duration`", "", "Recommended heartbeat interval to extend reservation" + "expires_at", ":ref:`ref_google.protobuf.Timestamp`", "", "Expiration timestamp of this reservation" + "metadata", ":ref:`ref_datacatalog.Metadata`", "", "" + + + + + + + +.. _ref_datacatalog.ReservationID: + +ReservationID +------------------------------------------------------------------ + +ReservationID message that is composed of several string fields. + + + +.. csv-table:: ReservationID type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "dataset_id", ":ref:`ref_datacatalog.DatasetID`", "", "" + "tag_name", ":ref:`ref_string`", "", "" + + + + + + + +.. _ref_datacatalog.SinglePropertyFilter: + +SinglePropertyFilter +------------------------------------------------------------------ + +A single property to filter on. + + + +.. csv-table:: SinglePropertyFilter type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "tag_filter", ":ref:`ref_datacatalog.TagPropertyFilter`", "", "" + "partition_filter", ":ref:`ref_datacatalog.PartitionPropertyFilter`", "", "" + "artifact_filter", ":ref:`ref_datacatalog.ArtifactPropertyFilter`", "", "" + "dataset_filter", ":ref:`ref_datacatalog.DatasetPropertyFilter`", "", "" + "operator", ":ref:`ref_datacatalog.SinglePropertyFilter.ComparisonOperator`", "", "field 10 in case we add more entities to query" + + + + + + + +.. _ref_datacatalog.Tag: + +Tag +------------------------------------------------------------------ + +Tag message that is unique to a Dataset. It is associated to a single artifact and +can be retrieved by name later. + + + +.. csv-table:: Tag type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "name", ":ref:`ref_string`", "", "Name of tag" + "artifact_id", ":ref:`ref_string`", "", "The tagged artifact" + "dataset", ":ref:`ref_datacatalog.DatasetID`", "", "The Dataset that this tag belongs to" + + + + + + + +.. _ref_datacatalog.TagPropertyFilter: + +TagPropertyFilter +------------------------------------------------------------------ + +Tag properties we can filter by + + + +.. csv-table:: TagPropertyFilter type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "tag_name", ":ref:`ref_string`", "", "" + + + + + + + +.. _ref_datacatalog.UpdateArtifactRequest: + +UpdateArtifactRequest +------------------------------------------------------------------ + +Request message for updating an Artifact and overwriting its associated ArtifactData. + + + +.. csv-table:: UpdateArtifactRequest type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "dataset", ":ref:`ref_datacatalog.DatasetID`", "", "ID of dataset the artifact is associated with" + "artifact_id", ":ref:`ref_string`", "", "" + "tag_name", ":ref:`ref_string`", "", "" + "data", ":ref:`ref_datacatalog.ArtifactData`", "repeated", "List of data to overwrite stored artifact data with. Must contain ALL data for updated Artifact as any missing ArtifactData entries will be removed from the underlying blob storage and database." + + + + + + + +.. _ref_datacatalog.UpdateArtifactResponse: + +UpdateArtifactResponse +------------------------------------------------------------------ + +Response message for updating an Artifact. + + + +.. csv-table:: UpdateArtifactResponse type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "artifact_id", ":ref:`ref_string`", "", "The unique ID of the artifact updated" + + + + + + +.. + end messages + + + +.. _ref_datacatalog.PaginationOptions.SortKey: + +PaginationOptions.SortKey +------------------------------------------------------------------ + + + +.. csv-table:: Enum PaginationOptions.SortKey values + :header: "Name", "Number", "Description" + :widths: auto + + "CREATION_TIME", "0", "" + + + +.. _ref_datacatalog.PaginationOptions.SortOrder: + +PaginationOptions.SortOrder +------------------------------------------------------------------ + + + +.. csv-table:: Enum PaginationOptions.SortOrder values + :header: "Name", "Number", "Description" + :widths: auto + + "DESCENDING", "0", "" + "ASCENDING", "1", "" + + + +.. _ref_datacatalog.SinglePropertyFilter.ComparisonOperator: + +SinglePropertyFilter.ComparisonOperator +------------------------------------------------------------------ + +as use-cases come up we can add more operators, ex: gte, like, not eq etc. + +.. csv-table:: Enum SinglePropertyFilter.ComparisonOperator values + :header: "Name", "Number", "Description" + :widths: auto + + "EQUALS", "0", "" + + +.. + end enums + + +.. + end HasExtensions + + + +.. _ref_datacatalog.DataCatalog: + +DataCatalog +------------------------------------------------------------------ + +Data Catalog service definition +Data Catalog is a service for indexing parameterized, strongly-typed data artifacts across revisions. +Artifacts are associated with a Dataset, and can be tagged for retrieval. + +.. csv-table:: DataCatalog service methods + :header: "Method Name", "Request Type", "Response Type", "Description" + :widths: auto + + "CreateDataset", ":ref:`ref_datacatalog.CreateDatasetRequest`", ":ref:`ref_datacatalog.CreateDatasetResponse`", "Create a new Dataset. Datasets are unique based on the DatasetID. Datasets are logical groupings of artifacts. Each dataset can have one or more artifacts" + "GetDataset", ":ref:`ref_datacatalog.GetDatasetRequest`", ":ref:`ref_datacatalog.GetDatasetResponse`", "Get a Dataset by the DatasetID. This returns the Dataset with the associated metadata." + "CreateArtifact", ":ref:`ref_datacatalog.CreateArtifactRequest`", ":ref:`ref_datacatalog.CreateArtifactResponse`", "Create an artifact and the artifact data associated with it. An artifact can be a hive partition or arbitrary files or data values" + "GetArtifact", ":ref:`ref_datacatalog.GetArtifactRequest`", ":ref:`ref_datacatalog.GetArtifactResponse`", "Retrieve an artifact by an identifying handle. This returns an artifact along with the artifact data." + "AddTag", ":ref:`ref_datacatalog.AddTagRequest`", ":ref:`ref_datacatalog.AddTagResponse`", "Associate a tag with an artifact. Tags are unique within a Dataset." + "ListArtifacts", ":ref:`ref_datacatalog.ListArtifactsRequest`", ":ref:`ref_datacatalog.ListArtifactsResponse`", "Return a paginated list of artifacts" + "ListDatasets", ":ref:`ref_datacatalog.ListDatasetsRequest`", ":ref:`ref_datacatalog.ListDatasetsResponse`", "Return a paginated list of datasets" + "UpdateArtifact", ":ref:`ref_datacatalog.UpdateArtifactRequest`", ":ref:`ref_datacatalog.UpdateArtifactResponse`", "Updates an existing artifact, overwriting the stored artifact data in the underlying blob storage." + "GetOrExtendReservation", ":ref:`ref_datacatalog.GetOrExtendReservationRequest`", ":ref:`ref_datacatalog.GetOrExtendReservationResponse`", "Attempts to get or extend a reservation for the corresponding artifact. If one already exists (ie. another entity owns the reservation) then that reservation is retrieved. Once you acquire a reservation, you need to periodically extend the reservation with an identical call. If the reservation is not extended before the defined expiration, it may be acquired by another task. Note: We may have multiple concurrent tasks with the same signature and the same input that try to populate the same artifact at the same time. Thus with reservation, only one task can run at a time, until the reservation expires. Note: If task A does not extend the reservation in time and the reservation expires, another task B may take over the reservation, resulting in two tasks A and B running in parallel. So a third task C may get the Artifact from A or B, whichever writes last." + "ReleaseReservation", ":ref:`ref_datacatalog.ReleaseReservationRequest`", ":ref:`ref_datacatalog.ReleaseReservationResponse`", "Release the reservation when the task holding the spot fails so that the other tasks can grab the spot." + +.. + end services + + + + +.. _ref_google/protobuf/timestamp.proto: + +google/protobuf/timestamp.proto +================================================================== + + + + + +.. _ref_google.protobuf.Timestamp: + +Timestamp +------------------------------------------------------------------ + +A Timestamp represents a point in time independent of any time zone or local +calendar, encoded as a count of seconds and fractions of seconds at +nanosecond resolution. The count is relative to an epoch at UTC midnight on +January 1, 1970, in the proleptic Gregorian calendar which extends the +Gregorian calendar backwards to year one. + +All minutes are 60 seconds long. Leap seconds are "smeared" so that no leap +second table is needed for interpretation, using a [24-hour linear +smear](https://developers.google.com/time/smear). + +The range is from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59.999999999Z. By +restricting to that range, we ensure that we can convert to and from [RFC +3339](https://www.ietf.org/rfc/rfc3339.txt) date strings. + +# Examples + +Example 1: Compute Timestamp from POSIX `time()`. + + Timestamp timestamp; + timestamp.set_seconds(time(NULL)); + timestamp.set_nanos(0); + +Example 2: Compute Timestamp from POSIX `gettimeofday()`. + + struct timeval tv; + gettimeofday(&tv, NULL); + + Timestamp timestamp; + timestamp.set_seconds(tv.tv_sec); + timestamp.set_nanos(tv.tv_usec * 1000); + +Example 3: Compute Timestamp from Win32 `GetSystemTimeAsFileTime()`. + + FILETIME ft; + GetSystemTimeAsFileTime(&ft); + UINT64 ticks = (((UINT64)ft.dwHighDateTime) << 32) | ft.dwLowDateTime; + + // A Windows tick is 100 nanoseconds. Windows epoch 1601-01-01T00:00:00Z + // is 11644473600 seconds before Unix epoch 1970-01-01T00:00:00Z. + Timestamp timestamp; + timestamp.set_seconds((INT64) ((ticks / 10000000) - 11644473600LL)); + timestamp.set_nanos((INT32) ((ticks % 10000000) * 100)); + +Example 4: Compute Timestamp from Java `System.currentTimeMillis()`. + + long millis = System.currentTimeMillis(); + + Timestamp timestamp = Timestamp.newBuilder().setSeconds(millis / 1000) + .setNanos((int) ((millis % 1000) * 1000000)).build(); + +Example 5: Compute Timestamp from Java `Instant.now()`. + + Instant now = Instant.now(); + + Timestamp timestamp = + Timestamp.newBuilder().setSeconds(now.getEpochSecond()) + .setNanos(now.getNano()).build(); + +Example 6: Compute Timestamp from current time in Python. + + timestamp = Timestamp() + timestamp.GetCurrentTime() + +# JSON Mapping + +In JSON format, the Timestamp type is encoded as a string in the +[RFC 3339](https://www.ietf.org/rfc/rfc3339.txt) format. That is, the +format is "{year}-{month}-{day}T{hour}:{min}:{sec}[.{frac_sec}]Z" +where {year} is always expressed using four digits while {month}, {day}, +{hour}, {min}, and {sec} are zero-padded to two digits each. The fractional +seconds, which can go up to 9 digits (i.e. up to 1 nanosecond resolution), +are optional. The "Z" suffix indicates the timezone ("UTC"); the timezone +is required. A proto3 JSON serializer should always use UTC (as indicated by +"Z") when printing the Timestamp type and a proto3 JSON parser should be +able to accept both UTC and other timezones (as indicated by an offset). + +For example, "2017-01-15T01:30:15.01Z" encodes 15.01 seconds past +01:30 UTC on January 15, 2017. + +In JavaScript, one can convert a Date object to this format using the +standard +[toISOString()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Date/toISOString) +method. In Python, a standard `datetime.datetime` object can be converted +to this format using +[`strftime`](https://docs.python.org/2/library/time.html#time.strftime) with +the time format spec '%Y-%m-%dT%H:%M:%S.%fZ'. Likewise, in Java, one can use +the Joda Time's [`ISODateTimeFormat.dateTime()`]( +http://www.joda.org/joda-time/apidocs/org/joda/time/format/ISODateTimeFormat.html#dateTime%2D%2D +) to obtain a formatter capable of generating timestamps in this format. + + + +.. csv-table:: Timestamp type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "seconds", ":ref:`ref_int64`", "", "Represents seconds of UTC time since Unix epoch 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59Z inclusive." + "nanos", ":ref:`ref_int32`", "", "Non-negative fractions of a second at nanosecond resolution. Negative second values with fractions must still have non-negative nanos values that count forward in time. Must be from 0 to 999,999,999 inclusive." + + + + + + +.. + end messages + + +.. + end enums + + +.. + end HasExtensions + + +.. + end services + + + + +.. _ref_google/protobuf/duration.proto: + +google/protobuf/duration.proto +================================================================== + + + + + +.. _ref_google.protobuf.Duration: + +Duration +------------------------------------------------------------------ + +A Duration represents a signed, fixed-length span of time represented +as a count of seconds and fractions of seconds at nanosecond +resolution. It is independent of any calendar and concepts like "day" +or "month". It is related to Timestamp in that the difference between +two Timestamp values is a Duration and it can be added or subtracted +from a Timestamp. Range is approximately +-10,000 years. + +# Examples + +Example 1: Compute Duration from two Timestamps in pseudo code. + + Timestamp start = ...; + Timestamp end = ...; + Duration duration = ...; + + duration.seconds = end.seconds - start.seconds; + duration.nanos = end.nanos - start.nanos; + + if (duration.seconds < 0 && duration.nanos > 0) { + duration.seconds += 1; + duration.nanos -= 1000000000; + } else if (duration.seconds > 0 && duration.nanos < 0) { + duration.seconds -= 1; + duration.nanos += 1000000000; + } + +Example 2: Compute Timestamp from Timestamp + Duration in pseudo code. + + Timestamp start = ...; + Duration duration = ...; + Timestamp end = ...; + + end.seconds = start.seconds + duration.seconds; + end.nanos = start.nanos + duration.nanos; + + if (end.nanos < 0) { + end.seconds -= 1; + end.nanos += 1000000000; + } else if (end.nanos >= 1000000000) { + end.seconds += 1; + end.nanos -= 1000000000; + } + +Example 3: Compute Duration from datetime.timedelta in Python. + + td = datetime.timedelta(days=3, minutes=10) + duration = Duration() + duration.FromTimedelta(td) + +# JSON Mapping + +In JSON format, the Duration type is encoded as a string rather than an +object, where the string ends in the suffix "s" (indicating seconds) and +is preceded by the number of seconds, with nanoseconds expressed as +fractional seconds. For example, 3 seconds with 0 nanoseconds should be +encoded in JSON format as "3s", while 3 seconds and 1 nanosecond should +be expressed in JSON format as "3.000000001s", and 3 seconds and 1 +microsecond should be expressed in JSON format as "3.000001s". + + + +.. csv-table:: Duration type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "seconds", ":ref:`ref_int64`", "", "Signed seconds of the span of time. Must be from -315,576,000,000 to +315,576,000,000 inclusive. Note: these bounds are computed from: 60 sec/min * 60 min/hr * 24 hr/day * 365.25 days/year * 10000 years" + "nanos", ":ref:`ref_int32`", "", "Signed fractions of a second at nanosecond resolution of the span of time. Durations less than one second are represented with a 0 `seconds` field and a positive or negative `nanos` field. For durations of one second or more, a non-zero value for the `nanos` field must be of the same sign as the `seconds` field. Must be from -999,999,999 to +999,999,999 inclusive." + + + + + + +.. + end messages + + +.. + end enums + + +.. + end HasExtensions + + +.. + end services + + + + +.. _ref_google/protobuf/struct.proto: + +google/protobuf/struct.proto +================================================================== + + + + + +.. _ref_google.protobuf.ListValue: + +ListValue +------------------------------------------------------------------ + +`ListValue` is a wrapper around a repeated field of values. + +The JSON representation for `ListValue` is JSON array. + + + +.. csv-table:: ListValue type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "values", ":ref:`ref_google.protobuf.Value`", "repeated", "Repeated field of dynamically typed values." + + + + + + + +.. _ref_google.protobuf.Struct: + +Struct +------------------------------------------------------------------ + +`Struct` represents a structured data value, consisting of fields +which map to dynamically typed values. In some languages, `Struct` +might be supported by a native representation. For example, in +scripting languages like JS a struct is represented as an +object. The details of that representation are described together +with the proto support for the language. + +The JSON representation for `Struct` is JSON object. + + + +.. csv-table:: Struct type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "fields", ":ref:`ref_google.protobuf.Struct.FieldsEntry`", "repeated", "Unordered map of dynamically typed values." + + + + + + + +.. _ref_google.protobuf.Struct.FieldsEntry: + +Struct.FieldsEntry +------------------------------------------------------------------ + + + + + +.. csv-table:: Struct.FieldsEntry type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "key", ":ref:`ref_string`", "", "" + "value", ":ref:`ref_google.protobuf.Value`", "", "" + + + + + + + +.. _ref_google.protobuf.Value: + +Value +------------------------------------------------------------------ + +`Value` represents a dynamically typed value which can be either +null, a number, a string, a boolean, a recursive struct value, or a +list of values. A producer of value is expected to set one of these +variants. Absence of any variant indicates an error. + +The JSON representation for `Value` is JSON value. + + + +.. csv-table:: Value type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "null_value", ":ref:`ref_google.protobuf.NullValue`", "", "Represents a null value." + "number_value", ":ref:`ref_double`", "", "Represents a double value." + "string_value", ":ref:`ref_string`", "", "Represents a string value." + "bool_value", ":ref:`ref_bool`", "", "Represents a boolean value." + "struct_value", ":ref:`ref_google.protobuf.Struct`", "", "Represents a structured value." + "list_value", ":ref:`ref_google.protobuf.ListValue`", "", "Represents a repeated `Value`." + + + + + + +.. + end messages + + + +.. _ref_google.protobuf.NullValue: + +NullValue +------------------------------------------------------------------ + +`NullValue` is a singleton enumeration to represent the null value for the +`Value` type union. + + The JSON representation for `NullValue` is JSON `null`. + +.. csv-table:: Enum NullValue values + :header: "Name", "Number", "Description" + :widths: auto + + "NULL_VALUE", "0", "Null value." + + +.. + end enums + + +.. + end HasExtensions + + +.. + end services + + diff --git a/docs/api/flyteidl/docs/datacatalog/index.rst b/docs/api/flyteidl/docs/datacatalog/index.rst new file mode 100644 index 0000000000..d64c2ddd9f --- /dev/null +++ b/docs/api/flyteidl/docs/datacatalog/index.rst @@ -0,0 +1,16 @@ +Flyte Data Catalog Service +============================ + +Protos provides the interface definition for the Data Catalog Service. Data Catalog is a service to +index parameterized, strongly-typed data artifacts across revisions. It is used in the Flyte ecosystem +to catalog artifacts generated by the task executions. The output generated by a task can be stored as artifact +data and tagged by the user so as to be retrieved later by that tag. + +`Datacatalog raw proto `__ + +.. toctree:: + :maxdepth: 1 + :caption: datacatalog + :name: datacatalogtoc + + datacatalog diff --git a/docs/api/flyteidl/docs/event/event.rst b/docs/api/flyteidl/docs/event/event.rst new file mode 100644 index 0000000000..df0a3b2e8b --- /dev/null +++ b/docs/api/flyteidl/docs/event/event.rst @@ -0,0 +1,726 @@ +###################### +Protocol Documentation +###################### + + + + +.. _ref_flyteidl/event/event.proto: + +flyteidl/event/event.proto +================================================================== + + + + + +.. _ref_flyteidl.event.DynamicWorkflowNodeMetadata: + +DynamicWorkflowNodeMetadata +------------------------------------------------------------------ + +For dynamic workflow nodes we send information about the dynamic workflow definition that gets generated. + + + +.. csv-table:: DynamicWorkflowNodeMetadata type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "id", ":ref:`ref_flyteidl.core.Identifier`", "", "id represents the unique identifier of the workflow." + "compiled_workflow", ":ref:`ref_flyteidl.core.CompiledWorkflowClosure`", "", "Represents the compiled representation of the embedded dynamic workflow." + + + + + + + +.. _ref_flyteidl.event.ExternalResourceInfo: + +ExternalResourceInfo +------------------------------------------------------------------ + +This message contains metadata about external resources produced or used by a specific task execution. + + + +.. csv-table:: ExternalResourceInfo type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "external_id", ":ref:`ref_string`", "", "Identifier for an external resource created by this task execution, for example Qubole query ID or presto query ids." + "index", ":ref:`ref_uint32`", "", "A unique index for the external resource with respect to all external resources for this task. Although the identifier may change between task reporting events or retries, this will remain the same to enable aggregating information from multiple reports." + "retry_attempt", ":ref:`ref_uint32`", "", "Retry attempt number for this external resource, ie., 2 for the second attempt" + "phase", ":ref:`ref_flyteidl.core.TaskExecution.Phase`", "", "Phase associated with the external resource" + "cache_status", ":ref:`ref_flyteidl.core.CatalogCacheStatus`", "", "Captures the status of caching for this external resource execution." + "logs", ":ref:`ref_flyteidl.core.TaskLog`", "repeated", "log information for the external resource execution" + + + + + + + +.. _ref_flyteidl.event.NodeExecutionEvent: + +NodeExecutionEvent +------------------------------------------------------------------ + + + + + +.. csv-table:: NodeExecutionEvent type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "id", ":ref:`ref_flyteidl.core.NodeExecutionIdentifier`", "", "Unique identifier for this node execution" + "producer_id", ":ref:`ref_string`", "", "the id of the originator (Propeller) of the event" + "phase", ":ref:`ref_flyteidl.core.NodeExecution.Phase`", "", "" + "occurred_at", ":ref:`ref_google.protobuf.Timestamp`", "", "This timestamp represents when the original event occurred, it is generated by the executor of the node." + "input_uri", ":ref:`ref_string`", "", "" + "output_uri", ":ref:`ref_string`", "", "URL to the output of the execution, it encodes all the information including Cloud source provider. ie., s3://..." + "error", ":ref:`ref_flyteidl.core.ExecutionError`", "", "Error information for the execution" + "output_data", ":ref:`ref_flyteidl.core.LiteralMap`", "", "Raw output data produced by this node execution." + "workflow_node_metadata", ":ref:`ref_flyteidl.event.WorkflowNodeMetadata`", "", "" + "task_node_metadata", ":ref:`ref_flyteidl.event.TaskNodeMetadata`", "", "" + "parent_task_metadata", ":ref:`ref_flyteidl.event.ParentTaskExecutionMetadata`", "", "[To be deprecated] Specifies which task (if any) launched this node." + "parent_node_metadata", ":ref:`ref_flyteidl.event.ParentNodeExecutionMetadata`", "", "Specifies the parent node of the current node execution. Node executions at level zero will not have a parent node." + "retry_group", ":ref:`ref_string`", "", "Retry group to indicate grouping of nodes by retries" + "spec_node_id", ":ref:`ref_string`", "", "Identifier of the node in the original workflow/graph This maps to value of WorkflowTemplate.nodes[X].id" + "node_name", ":ref:`ref_string`", "", "Friendly readable name for the node" + "event_version", ":ref:`ref_int32`", "", "" + "is_parent", ":ref:`ref_bool`", "", "Whether this node launched a subworkflow." + "is_dynamic", ":ref:`ref_bool`", "", "Whether this node yielded a dynamic workflow." + "deck_uri", ":ref:`ref_string`", "", "String location uniquely identifying where the deck HTML file is NativeUrl specifies the url in the format of the configured storage provider (e.g. s3://my-bucket/randomstring/suffix.tar)" + + + + + + + +.. _ref_flyteidl.event.ParentNodeExecutionMetadata: + +ParentNodeExecutionMetadata +------------------------------------------------------------------ + + + + + +.. csv-table:: ParentNodeExecutionMetadata type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "node_id", ":ref:`ref_string`", "", "Unique identifier of the parent node id within the execution This is value of core.NodeExecutionIdentifier.node_id of the parent node" + + + + + + + +.. _ref_flyteidl.event.ParentTaskExecutionMetadata: + +ParentTaskExecutionMetadata +------------------------------------------------------------------ + + + + + +.. csv-table:: ParentTaskExecutionMetadata type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "id", ":ref:`ref_flyteidl.core.TaskExecutionIdentifier`", "", "" + + + + + + + +.. _ref_flyteidl.event.ResourcePoolInfo: + +ResourcePoolInfo +------------------------------------------------------------------ + +This message holds task execution metadata specific to resource allocation used to manage concurrent +executions for a project namespace. + + + +.. csv-table:: ResourcePoolInfo type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "allocation_token", ":ref:`ref_string`", "", "Unique resource ID used to identify this execution when allocating a token." + "namespace", ":ref:`ref_string`", "", "Namespace under which this task execution requested an allocation token." + + + + + + + +.. _ref_flyteidl.event.TaskExecutionEvent: + +TaskExecutionEvent +------------------------------------------------------------------ + +Plugin specific execution event information. For tasks like Python, Hive, Spark, DynamicJob. + + + +.. csv-table:: TaskExecutionEvent type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "task_id", ":ref:`ref_flyteidl.core.Identifier`", "", "ID of the task. In combination with the retryAttempt this will indicate the task execution uniquely for a given parent node execution." + "parent_node_execution_id", ":ref:`ref_flyteidl.core.NodeExecutionIdentifier`", "", "A task execution is always kicked off by a node execution, the event consumer will use the parent_id to relate the task to it's parent node execution" + "retry_attempt", ":ref:`ref_uint32`", "", "retry attempt number for this task, ie., 2 for the second attempt" + "phase", ":ref:`ref_flyteidl.core.TaskExecution.Phase`", "", "Phase associated with the event" + "producer_id", ":ref:`ref_string`", "", "id of the process that sent this event, mainly for trace debugging" + "logs", ":ref:`ref_flyteidl.core.TaskLog`", "repeated", "log information for the task execution" + "occurred_at", ":ref:`ref_google.protobuf.Timestamp`", "", "This timestamp represents when the original event occurred, it is generated by the executor of the task." + "input_uri", ":ref:`ref_string`", "", "URI of the input file, it encodes all the information including Cloud source provider. ie., s3://..." + "output_uri", ":ref:`ref_string`", "", "URI to the output of the execution, it will be in a format that encodes all the information including Cloud source provider. ie., s3://..." + "error", ":ref:`ref_flyteidl.core.ExecutionError`", "", "Error information for the execution" + "output_data", ":ref:`ref_flyteidl.core.LiteralMap`", "", "Raw output data produced by this task execution." + "custom_info", ":ref:`ref_google.protobuf.Struct`", "", "Custom data that the task plugin sends back. This is extensible to allow various plugins in the system." + "phase_version", ":ref:`ref_uint32`", "", "Some phases, like RUNNING, can send multiple events with changed metadata (new logs, additional custom_info, etc) that should be recorded regardless of the lack of phase change. The version field should be incremented when metadata changes across the duration of an individual phase." + "reason", ":ref:`ref_string`", "", "An optional explanation for the phase transition." + "task_type", ":ref:`ref_string`", "", "A predefined yet extensible Task type identifier. If the task definition is already registered in flyte admin this type will be identical, but not all task executions necessarily use pre-registered definitions and this type is useful to render the task in the UI, filter task executions, etc." + "metadata", ":ref:`ref_flyteidl.event.TaskExecutionMetadata`", "", "Metadata around how a task was executed." + "event_version", ":ref:`ref_int32`", "", "The event version is used to indicate versioned changes in how data is reported using this proto message. For example, event_verison > 0 means that maps tasks report logs using the TaskExecutionMetadata ExternalResourceInfo fields for each subtask rather than the TaskLog in this message." + + + + + + + +.. _ref_flyteidl.event.TaskExecutionMetadata: + +TaskExecutionMetadata +------------------------------------------------------------------ + +Holds metadata around how a task was executed. +As a task transitions across event phases during execution some attributes, such its generated name, generated external resources, +and more may grow in size but not change necessarily based on the phase transition that sparked the event update. +Metadata is a container for these attributes across the task execution lifecycle. + + + +.. csv-table:: TaskExecutionMetadata type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "generated_name", ":ref:`ref_string`", "", "Unique, generated name for this task execution used by the backend." + "external_resources", ":ref:`ref_flyteidl.event.ExternalResourceInfo`", "repeated", "Additional data on external resources on other back-ends or platforms (e.g. Hive, Qubole, etc) launched by this task execution." + "resource_pool_info", ":ref:`ref_flyteidl.event.ResourcePoolInfo`", "repeated", "Includes additional data on concurrent resource management used during execution.. This is a repeated field because a plugin can request multiple resource allocations during execution." + "plugin_identifier", ":ref:`ref_string`", "", "The identifier of the plugin used to execute this task." + "instance_class", ":ref:`ref_flyteidl.event.TaskExecutionMetadata.InstanceClass`", "", "" + + + + + + + +.. _ref_flyteidl.event.TaskNodeMetadata: + +TaskNodeMetadata +------------------------------------------------------------------ + + + + + +.. csv-table:: TaskNodeMetadata type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "cache_status", ":ref:`ref_flyteidl.core.CatalogCacheStatus`", "", "Captures the status of caching for this execution." + "catalog_key", ":ref:`ref_flyteidl.core.CatalogMetadata`", "", "This structure carries the catalog artifact information" + "reservation_status", ":ref:`ref_flyteidl.core.CatalogReservation.Status`", "", "Captures the status of cache reservations for this execution." + "checkpoint_uri", ":ref:`ref_string`", "", "The latest checkpoint location" + "dynamic_workflow", ":ref:`ref_flyteidl.event.DynamicWorkflowNodeMetadata`", "", "In the case this task launched a dynamic workflow we capture its structure here." + + + + + + + +.. _ref_flyteidl.event.WorkflowExecutionEvent: + +WorkflowExecutionEvent +------------------------------------------------------------------ + + + + + +.. csv-table:: WorkflowExecutionEvent type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "execution_id", ":ref:`ref_flyteidl.core.WorkflowExecutionIdentifier`", "", "Workflow execution id" + "producer_id", ":ref:`ref_string`", "", "the id of the originator (Propeller) of the event" + "phase", ":ref:`ref_flyteidl.core.WorkflowExecution.Phase`", "", "" + "occurred_at", ":ref:`ref_google.protobuf.Timestamp`", "", "This timestamp represents when the original event occurred, it is generated by the executor of the workflow." + "output_uri", ":ref:`ref_string`", "", "URL to the output of the execution, it encodes all the information including Cloud source provider. ie., s3://..." + "error", ":ref:`ref_flyteidl.core.ExecutionError`", "", "Error information for the execution" + "output_data", ":ref:`ref_flyteidl.core.LiteralMap`", "", "Raw output data produced by this workflow execution." + + + + + + + +.. _ref_flyteidl.event.WorkflowNodeMetadata: + +WorkflowNodeMetadata +------------------------------------------------------------------ + +For Workflow Nodes we need to send information about the workflow that's launched + + + +.. csv-table:: WorkflowNodeMetadata type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "execution_id", ":ref:`ref_flyteidl.core.WorkflowExecutionIdentifier`", "", "" + + + + + + +.. + end messages + + + +.. _ref_flyteidl.event.TaskExecutionMetadata.InstanceClass: + +TaskExecutionMetadata.InstanceClass +------------------------------------------------------------------ + +Includes the broad category of machine used for this specific task execution. + +.. csv-table:: Enum TaskExecutionMetadata.InstanceClass values + :header: "Name", "Number", "Description" + :widths: auto + + "DEFAULT", "0", "The default instance class configured for the flyte application platform." + "INTERRUPTIBLE", "1", "The instance class configured for interruptible tasks." + + +.. + end enums + + +.. + end HasExtensions + + +.. + end services + + + + +.. _ref_google/protobuf/timestamp.proto: + +google/protobuf/timestamp.proto +================================================================== + + + + + +.. _ref_google.protobuf.Timestamp: + +Timestamp +------------------------------------------------------------------ + +A Timestamp represents a point in time independent of any time zone or local +calendar, encoded as a count of seconds and fractions of seconds at +nanosecond resolution. The count is relative to an epoch at UTC midnight on +January 1, 1970, in the proleptic Gregorian calendar which extends the +Gregorian calendar backwards to year one. + +All minutes are 60 seconds long. Leap seconds are "smeared" so that no leap +second table is needed for interpretation, using a [24-hour linear +smear](https://developers.google.com/time/smear). + +The range is from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59.999999999Z. By +restricting to that range, we ensure that we can convert to and from [RFC +3339](https://www.ietf.org/rfc/rfc3339.txt) date strings. + +# Examples + +Example 1: Compute Timestamp from POSIX `time()`. + + Timestamp timestamp; + timestamp.set_seconds(time(NULL)); + timestamp.set_nanos(0); + +Example 2: Compute Timestamp from POSIX `gettimeofday()`. + + struct timeval tv; + gettimeofday(&tv, NULL); + + Timestamp timestamp; + timestamp.set_seconds(tv.tv_sec); + timestamp.set_nanos(tv.tv_usec * 1000); + +Example 3: Compute Timestamp from Win32 `GetSystemTimeAsFileTime()`. + + FILETIME ft; + GetSystemTimeAsFileTime(&ft); + UINT64 ticks = (((UINT64)ft.dwHighDateTime) << 32) | ft.dwLowDateTime; + + // A Windows tick is 100 nanoseconds. Windows epoch 1601-01-01T00:00:00Z + // is 11644473600 seconds before Unix epoch 1970-01-01T00:00:00Z. + Timestamp timestamp; + timestamp.set_seconds((INT64) ((ticks / 10000000) - 11644473600LL)); + timestamp.set_nanos((INT32) ((ticks % 10000000) * 100)); + +Example 4: Compute Timestamp from Java `System.currentTimeMillis()`. + + long millis = System.currentTimeMillis(); + + Timestamp timestamp = Timestamp.newBuilder().setSeconds(millis / 1000) + .setNanos((int) ((millis % 1000) * 1000000)).build(); + +Example 5: Compute Timestamp from Java `Instant.now()`. + + Instant now = Instant.now(); + + Timestamp timestamp = + Timestamp.newBuilder().setSeconds(now.getEpochSecond()) + .setNanos(now.getNano()).build(); + +Example 6: Compute Timestamp from current time in Python. + + timestamp = Timestamp() + timestamp.GetCurrentTime() + +# JSON Mapping + +In JSON format, the Timestamp type is encoded as a string in the +[RFC 3339](https://www.ietf.org/rfc/rfc3339.txt) format. That is, the +format is "{year}-{month}-{day}T{hour}:{min}:{sec}[.{frac_sec}]Z" +where {year} is always expressed using four digits while {month}, {day}, +{hour}, {min}, and {sec} are zero-padded to two digits each. The fractional +seconds, which can go up to 9 digits (i.e. up to 1 nanosecond resolution), +are optional. The "Z" suffix indicates the timezone ("UTC"); the timezone +is required. A proto3 JSON serializer should always use UTC (as indicated by +"Z") when printing the Timestamp type and a proto3 JSON parser should be +able to accept both UTC and other timezones (as indicated by an offset). + +For example, "2017-01-15T01:30:15.01Z" encodes 15.01 seconds past +01:30 UTC on January 15, 2017. + +In JavaScript, one can convert a Date object to this format using the +standard +[toISOString()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Date/toISOString) +method. In Python, a standard `datetime.datetime` object can be converted +to this format using +[`strftime`](https://docs.python.org/2/library/time.html#time.strftime) with +the time format spec '%Y-%m-%dT%H:%M:%S.%fZ'. Likewise, in Java, one can use +the Joda Time's [`ISODateTimeFormat.dateTime()`]( +http://www.joda.org/joda-time/apidocs/org/joda/time/format/ISODateTimeFormat.html#dateTime%2D%2D +) to obtain a formatter capable of generating timestamps in this format. + + + +.. csv-table:: Timestamp type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "seconds", ":ref:`ref_int64`", "", "Represents seconds of UTC time since Unix epoch 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59Z inclusive." + "nanos", ":ref:`ref_int32`", "", "Non-negative fractions of a second at nanosecond resolution. Negative second values with fractions must still have non-negative nanos values that count forward in time. Must be from 0 to 999,999,999 inclusive." + + + + + + +.. + end messages + + +.. + end enums + + +.. + end HasExtensions + + +.. + end services + + + + +.. _ref_google/protobuf/duration.proto: + +google/protobuf/duration.proto +================================================================== + + + + + +.. _ref_google.protobuf.Duration: + +Duration +------------------------------------------------------------------ + +A Duration represents a signed, fixed-length span of time represented +as a count of seconds and fractions of seconds at nanosecond +resolution. It is independent of any calendar and concepts like "day" +or "month". It is related to Timestamp in that the difference between +two Timestamp values is a Duration and it can be added or subtracted +from a Timestamp. Range is approximately +-10,000 years. + +# Examples + +Example 1: Compute Duration from two Timestamps in pseudo code. + + Timestamp start = ...; + Timestamp end = ...; + Duration duration = ...; + + duration.seconds = end.seconds - start.seconds; + duration.nanos = end.nanos - start.nanos; + + if (duration.seconds < 0 && duration.nanos > 0) { + duration.seconds += 1; + duration.nanos -= 1000000000; + } else if (duration.seconds > 0 && duration.nanos < 0) { + duration.seconds -= 1; + duration.nanos += 1000000000; + } + +Example 2: Compute Timestamp from Timestamp + Duration in pseudo code. + + Timestamp start = ...; + Duration duration = ...; + Timestamp end = ...; + + end.seconds = start.seconds + duration.seconds; + end.nanos = start.nanos + duration.nanos; + + if (end.nanos < 0) { + end.seconds -= 1; + end.nanos += 1000000000; + } else if (end.nanos >= 1000000000) { + end.seconds += 1; + end.nanos -= 1000000000; + } + +Example 3: Compute Duration from datetime.timedelta in Python. + + td = datetime.timedelta(days=3, minutes=10) + duration = Duration() + duration.FromTimedelta(td) + +# JSON Mapping + +In JSON format, the Duration type is encoded as a string rather than an +object, where the string ends in the suffix "s" (indicating seconds) and +is preceded by the number of seconds, with nanoseconds expressed as +fractional seconds. For example, 3 seconds with 0 nanoseconds should be +encoded in JSON format as "3s", while 3 seconds and 1 nanosecond should +be expressed in JSON format as "3.000000001s", and 3 seconds and 1 +microsecond should be expressed in JSON format as "3.000001s". + + + +.. csv-table:: Duration type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "seconds", ":ref:`ref_int64`", "", "Signed seconds of the span of time. Must be from -315,576,000,000 to +315,576,000,000 inclusive. Note: these bounds are computed from: 60 sec/min * 60 min/hr * 24 hr/day * 365.25 days/year * 10000 years" + "nanos", ":ref:`ref_int32`", "", "Signed fractions of a second at nanosecond resolution of the span of time. Durations less than one second are represented with a 0 `seconds` field and a positive or negative `nanos` field. For durations of one second or more, a non-zero value for the `nanos` field must be of the same sign as the `seconds` field. Must be from -999,999,999 to +999,999,999 inclusive." + + + + + + +.. + end messages + + +.. + end enums + + +.. + end HasExtensions + + +.. + end services + + + + +.. _ref_google/protobuf/struct.proto: + +google/protobuf/struct.proto +================================================================== + + + + + +.. _ref_google.protobuf.ListValue: + +ListValue +------------------------------------------------------------------ + +`ListValue` is a wrapper around a repeated field of values. + +The JSON representation for `ListValue` is JSON array. + + + +.. csv-table:: ListValue type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "values", ":ref:`ref_google.protobuf.Value`", "repeated", "Repeated field of dynamically typed values." + + + + + + + +.. _ref_google.protobuf.Struct: + +Struct +------------------------------------------------------------------ + +`Struct` represents a structured data value, consisting of fields +which map to dynamically typed values. In some languages, `Struct` +might be supported by a native representation. For example, in +scripting languages like JS a struct is represented as an +object. The details of that representation are described together +with the proto support for the language. + +The JSON representation for `Struct` is JSON object. + + + +.. csv-table:: Struct type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "fields", ":ref:`ref_google.protobuf.Struct.FieldsEntry`", "repeated", "Unordered map of dynamically typed values." + + + + + + + +.. _ref_google.protobuf.Struct.FieldsEntry: + +Struct.FieldsEntry +------------------------------------------------------------------ + + + + + +.. csv-table:: Struct.FieldsEntry type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "key", ":ref:`ref_string`", "", "" + "value", ":ref:`ref_google.protobuf.Value`", "", "" + + + + + + + +.. _ref_google.protobuf.Value: + +Value +------------------------------------------------------------------ + +`Value` represents a dynamically typed value which can be either +null, a number, a string, a boolean, a recursive struct value, or a +list of values. A producer of value is expected to set one of these +variants. Absence of any variant indicates an error. + +The JSON representation for `Value` is JSON value. + + + +.. csv-table:: Value type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "null_value", ":ref:`ref_google.protobuf.NullValue`", "", "Represents a null value." + "number_value", ":ref:`ref_double`", "", "Represents a double value." + "string_value", ":ref:`ref_string`", "", "Represents a string value." + "bool_value", ":ref:`ref_bool`", "", "Represents a boolean value." + "struct_value", ":ref:`ref_google.protobuf.Struct`", "", "Represents a structured value." + "list_value", ":ref:`ref_google.protobuf.ListValue`", "", "Represents a repeated `Value`." + + + + + + +.. + end messages + + + +.. _ref_google.protobuf.NullValue: + +NullValue +------------------------------------------------------------------ + +`NullValue` is a singleton enumeration to represent the null value for the +`Value` type union. + + The JSON representation for `NullValue` is JSON `null`. + +.. csv-table:: Enum NullValue values + :header: "Name", "Number", "Description" + :widths: auto + + "NULL_VALUE", "0", "Null value." + + +.. + end enums + + +.. + end HasExtensions + + +.. + end services + + diff --git a/docs/api/flyteidl/docs/event/index.rst b/docs/api/flyteidl/docs/event/index.rst new file mode 100644 index 0000000000..b2c8abe50c --- /dev/null +++ b/docs/api/flyteidl/docs/event/index.rst @@ -0,0 +1,27 @@ + +############################################## +Flyte Internal and External Eventing interface +############################################## + +This section contains all the protocol buffer definitions for Internal and +External Eventing system. + +Flyte Internal Eventing +======================== + +This is the interface used by the dataplane (execution engine) to communicate with the control plane admin service about the workflow and task progress. + +Flyte External Eventing - Event Egress +======================================= + +This refers to the interface for all the event messages leaving the Flyte +**control plane** and reaching on the configured pubsub channel. + +`Event raw proto `__ + +.. toctree:: + :maxdepth: 1 + :caption: event + :name: eventtoc + + event diff --git a/docs/api/flyteidl/docs/plugins/index.rst b/docs/api/flyteidl/docs/plugins/index.rst new file mode 100644 index 0000000000..90924ae451 --- /dev/null +++ b/docs/api/flyteidl/docs/plugins/index.rst @@ -0,0 +1,14 @@ +Flyte Task Plugins +================== + +These protocol buffer specifications provide information about the various Task +Plugins available in the Flyte system. + +`Plugins raw protos `__ + +.. toctree:: + :maxdepth: 1 + :caption: plugins + :name: pluginstoc + + plugins diff --git a/docs/api/flyteidl/docs/plugins/plugins.rst b/docs/api/flyteidl/docs/plugins/plugins.rst new file mode 100644 index 0000000000..995dc7c084 --- /dev/null +++ b/docs/api/flyteidl/docs/plugins/plugins.rst @@ -0,0 +1,780 @@ +###################### +Protocol Documentation +###################### + + + + +.. _ref_flyteidl/plugins/array_job.proto: + +flyteidl/plugins/array_job.proto +================================================================== + + + + + +.. _ref_flyteidl.plugins.ArrayJob: + +ArrayJob +------------------------------------------------------------------ + +Describes a job that can process independent pieces of data concurrently. Multiple copies of the runnable component +will be executed concurrently. + + + +.. csv-table:: ArrayJob type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "parallelism", ":ref:`ref_int64`", "", "Defines the minimum number of instances to bring up concurrently at any given point. Note that this is an optimistic restriction and that, due to network partitioning or other failures, the actual number of currently running instances might be more. This has to be a positive number if assigned. Default value is size." + "size", ":ref:`ref_int64`", "", "Defines the number of instances to launch at most. This number should match the size of the input if the job requires processing of all input data. This has to be a positive number. In the case this is not defined, the back-end will determine the size at run-time by reading the inputs." + "min_successes", ":ref:`ref_int64`", "", "An absolute number of the minimum number of successful completions of subtasks. As soon as this criteria is met, the array job will be marked as successful and outputs will be computed. This has to be a non-negative number if assigned. Default value is size (if specified)." + "min_success_ratio", ":ref:`ref_float`", "", "If the array job size is not known beforehand, the min_success_ratio can instead be used to determine when an array job can be marked successful." + + + + + + +.. + end messages + + +.. + end enums + + +.. + end HasExtensions + + +.. + end services + + + + +.. _ref_flyteidl/plugins/dask.proto: + +flyteidl/plugins/dask.proto +================================================================== + + + + + +.. _ref_flyteidl.plugins.DaskCluster: + +DaskCluster +------------------------------------------------------------------ + + + + + +.. csv-table:: DaskCluster type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "image", ":ref:`ref_string`", "", "Optional image to use for the scheduler as well as the default worker group. If unset, will use the default image." + "nWorkers", ":ref:`ref_int32`", "", "Number of workers in the default worker group" + "resources", ":ref:`ref_flyteidl.core.Resources`", "", "Resources assigned to the scheduler as well as all pods of the default worker group. As per https://kubernetes.dask.org/en/latest/kubecluster.html?highlight=limit#best-practices it is advised to only set limits. If requests are not explicitly set, the plugin will make sure to set requests==limits. The plugin sets ` --memory-limit` as well as `--nthreads` for the workers according to the limit." + + + + + + + +.. _ref_flyteidl.plugins.DaskJob: + +DaskJob +------------------------------------------------------------------ + +Custom Proto for Dask Plugin + + + +.. csv-table:: DaskJob type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "namespace", ":ref:`ref_string`", "", "Optional namespace to use for the dask pods. If none is given, the namespace of the Flyte task is used" + "jobPodSpec", ":ref:`ref_flyteidl.plugins.JobPodSpec`", "", "Spec for the job pod" + "cluster", ":ref:`ref_flyteidl.plugins.DaskCluster`", "", "Cluster" + + + + + + + +.. _ref_flyteidl.plugins.JobPodSpec: + +JobPodSpec +------------------------------------------------------------------ + +Specification for the job pod + + + +.. csv-table:: JobPodSpec type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "image", ":ref:`ref_string`", "", "Optional image to use. If unset, will use the default image." + "resources", ":ref:`ref_flyteidl.core.Resources`", "", "Resources assigned to the job pod." + + + + + + +.. + end messages + + +.. + end enums + + +.. + end HasExtensions + + +.. + end services + + + + +.. _ref_flyteidl/plugins/mpi.proto: + +flyteidl/plugins/mpi.proto +================================================================== + + + + + +.. _ref_flyteidl.plugins.DistributedMPITrainingTask: + +DistributedMPITrainingTask +------------------------------------------------------------------ + +MPI operator proposal https://github.com/kubeflow/community/blob/master/proposals/mpi-operator-proposal.md +Custom proto for plugin that enables distributed training using https://github.com/kubeflow/mpi-operator + + + +.. csv-table:: DistributedMPITrainingTask type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "num_workers", ":ref:`ref_int32`", "", "number of worker spawned in the cluster for this job" + "num_launcher_replicas", ":ref:`ref_int32`", "", "number of launcher replicas spawned in the cluster for this job The launcher pod invokes mpirun and communicates with worker pods through MPI." + "slots", ":ref:`ref_int32`", "", "number of slots per worker used in hostfile. The available slots (GPUs) in each pod." + + + + + + +.. + end messages + + +.. + end enums + + +.. + end HasExtensions + + +.. + end services + + + + +.. _ref_flyteidl/plugins/presto.proto: + +flyteidl/plugins/presto.proto +================================================================== + + + + + +.. _ref_flyteidl.plugins.PrestoQuery: + +PrestoQuery +------------------------------------------------------------------ + +This message works with the 'presto' task type in the SDK and is the object that will be in the 'custom' field +of a Presto task's TaskTemplate + + + +.. csv-table:: PrestoQuery type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "routing_group", ":ref:`ref_string`", "", "" + "catalog", ":ref:`ref_string`", "", "" + "schema", ":ref:`ref_string`", "", "" + "statement", ":ref:`ref_string`", "", "" + + + + + + +.. + end messages + + +.. + end enums + + +.. + end HasExtensions + + +.. + end services + + + + +.. _ref_flyteidl/plugins/pytorch.proto: + +flyteidl/plugins/pytorch.proto +================================================================== + + + + + +.. _ref_flyteidl.plugins.DistributedPyTorchTrainingTask: + +DistributedPyTorchTrainingTask +------------------------------------------------------------------ + +Custom proto for plugin that enables distributed training using https://github.com/kubeflow/pytorch-operator + + + +.. csv-table:: DistributedPyTorchTrainingTask type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "workers", ":ref:`ref_int32`", "", "number of worker replicas spawned in the cluster for this job" + + + + + + +.. + end messages + + +.. + end enums + + +.. + end HasExtensions + + +.. + end services + + + + +.. _ref_flyteidl/plugins/qubole.proto: + +flyteidl/plugins/qubole.proto +================================================================== + + + + + +.. _ref_flyteidl.plugins.HiveQuery: + +HiveQuery +------------------------------------------------------------------ + +Defines a query to execute on a hive cluster. + + + +.. csv-table:: HiveQuery type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "query", ":ref:`ref_string`", "", "" + "timeout_sec", ":ref:`ref_uint32`", "", "" + "retryCount", ":ref:`ref_uint32`", "", "" + + + + + + + +.. _ref_flyteidl.plugins.HiveQueryCollection: + +HiveQueryCollection +------------------------------------------------------------------ + +Defines a collection of hive queries. + + + +.. csv-table:: HiveQueryCollection type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "queries", ":ref:`ref_flyteidl.plugins.HiveQuery`", "repeated", "" + + + + + + + +.. _ref_flyteidl.plugins.QuboleHiveJob: + +QuboleHiveJob +------------------------------------------------------------------ + +This message works with the 'hive' task type in the SDK and is the object that will be in the 'custom' field +of a hive task's TaskTemplate + + + +.. csv-table:: QuboleHiveJob type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "cluster_label", ":ref:`ref_string`", "", "" + "query_collection", ":ref:`ref_flyteidl.plugins.HiveQueryCollection`", "", "**Deprecated.** " + "tags", ":ref:`ref_string`", "repeated", "" + "query", ":ref:`ref_flyteidl.plugins.HiveQuery`", "", "" + + + + + + +.. + end messages + + +.. + end enums + + +.. + end HasExtensions + + +.. + end services + + + + +.. _ref_flyteidl/plugins/ray.proto: + +flyteidl/plugins/ray.proto +================================================================== + + + + + +.. _ref_flyteidl.plugins.HeadGroupSpec: + +HeadGroupSpec +------------------------------------------------------------------ + +HeadGroupSpec are the spec for the head pod + + + +.. csv-table:: HeadGroupSpec type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "ray_start_params", ":ref:`ref_flyteidl.plugins.HeadGroupSpec.RayStartParamsEntry`", "repeated", "Optional. RayStartParams are the params of the start command: address, object-store-memory. Refer to https://docs.ray.io/en/latest/ray-core/package-ref.html#ray-start" + + + + + + + +.. _ref_flyteidl.plugins.HeadGroupSpec.RayStartParamsEntry: + +HeadGroupSpec.RayStartParamsEntry +------------------------------------------------------------------ + + + + + +.. csv-table:: HeadGroupSpec.RayStartParamsEntry type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "key", ":ref:`ref_string`", "", "" + "value", ":ref:`ref_string`", "", "" + + + + + + + +.. _ref_flyteidl.plugins.RayCluster: + +RayCluster +------------------------------------------------------------------ + +Define Ray cluster defines the desired state of RayCluster + + + +.. csv-table:: RayCluster type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "head_group_spec", ":ref:`ref_flyteidl.plugins.HeadGroupSpec`", "", "HeadGroupSpecs are the spec for the head pod" + "worker_group_spec", ":ref:`ref_flyteidl.plugins.WorkerGroupSpec`", "repeated", "WorkerGroupSpecs are the specs for the worker pods" + + + + + + + +.. _ref_flyteidl.plugins.RayJob: + +RayJob +------------------------------------------------------------------ + +RayJobSpec defines the desired state of RayJob + + + +.. csv-table:: RayJob type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "ray_cluster", ":ref:`ref_flyteidl.plugins.RayCluster`", "", "RayClusterSpec is the cluster template to run the job" + "runtime_env", ":ref:`ref_string`", "", "runtime_env is base64 encoded. Ray runtime environments: https://docs.ray.io/en/latest/ray-core/handling-dependencies.html#runtime-environments" + + + + + + + +.. _ref_flyteidl.plugins.WorkerGroupSpec: + +WorkerGroupSpec +------------------------------------------------------------------ + +WorkerGroupSpec are the specs for the worker pods + + + +.. csv-table:: WorkerGroupSpec type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "group_name", ":ref:`ref_string`", "", "Required. RayCluster can have multiple worker groups, and it distinguishes them by name" + "replicas", ":ref:`ref_int32`", "", "Required. Desired replicas of the worker group. Defaults to 1." + "min_replicas", ":ref:`ref_int32`", "", "Optional. Min replicas of the worker group. MinReplicas defaults to 1." + "max_replicas", ":ref:`ref_int32`", "", "Optional. Max replicas of the worker group. MaxReplicas defaults to maxInt32" + "ray_start_params", ":ref:`ref_flyteidl.plugins.WorkerGroupSpec.RayStartParamsEntry`", "repeated", "Optional. RayStartParams are the params of the start command: address, object-store-memory. Refer to https://docs.ray.io/en/latest/ray-core/package-ref.html#ray-start" + + + + + + + +.. _ref_flyteidl.plugins.WorkerGroupSpec.RayStartParamsEntry: + +WorkerGroupSpec.RayStartParamsEntry +------------------------------------------------------------------ + + + + + +.. csv-table:: WorkerGroupSpec.RayStartParamsEntry type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "key", ":ref:`ref_string`", "", "" + "value", ":ref:`ref_string`", "", "" + + + + + + +.. + end messages + + +.. + end enums + + +.. + end HasExtensions + + +.. + end services + + + + +.. _ref_flyteidl/plugins/spark.proto: + +flyteidl/plugins/spark.proto +================================================================== + + + + + +.. _ref_flyteidl.plugins.SparkApplication: + +SparkApplication +------------------------------------------------------------------ + + + + + + + + + + +.. _ref_flyteidl.plugins.SparkJob: + +SparkJob +------------------------------------------------------------------ + +Custom Proto for Spark Plugin. + + + +.. csv-table:: SparkJob type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "applicationType", ":ref:`ref_flyteidl.plugins.SparkApplication.Type`", "", "" + "mainApplicationFile", ":ref:`ref_string`", "", "" + "mainClass", ":ref:`ref_string`", "", "" + "sparkConf", ":ref:`ref_flyteidl.plugins.SparkJob.SparkConfEntry`", "repeated", "" + "hadoopConf", ":ref:`ref_flyteidl.plugins.SparkJob.HadoopConfEntry`", "repeated", "" + "executorPath", ":ref:`ref_string`", "", "Executor path for Python jobs." + "databricksConf", ":ref:`ref_string`", "", "databricksConf is base64 encoded string which stores databricks job configuration. Config structure can be found here. https://docs.databricks.com/dev-tools/api/2.0/jobs.html#request-structure The config is automatically encoded by flytekit, and decoded in the propeller." + + + + + + + +.. _ref_flyteidl.plugins.SparkJob.HadoopConfEntry: + +SparkJob.HadoopConfEntry +------------------------------------------------------------------ + + + + + +.. csv-table:: SparkJob.HadoopConfEntry type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "key", ":ref:`ref_string`", "", "" + "value", ":ref:`ref_string`", "", "" + + + + + + + +.. _ref_flyteidl.plugins.SparkJob.SparkConfEntry: + +SparkJob.SparkConfEntry +------------------------------------------------------------------ + + + + + +.. csv-table:: SparkJob.SparkConfEntry type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "key", ":ref:`ref_string`", "", "" + "value", ":ref:`ref_string`", "", "" + + + + + + +.. + end messages + + + +.. _ref_flyteidl.plugins.SparkApplication.Type: + +SparkApplication.Type +------------------------------------------------------------------ + + + +.. csv-table:: Enum SparkApplication.Type values + :header: "Name", "Number", "Description" + :widths: auto + + "PYTHON", "0", "" + "JAVA", "1", "" + "SCALA", "2", "" + "R", "3", "" + + +.. + end enums + + +.. + end HasExtensions + + +.. + end services + + + + +.. _ref_flyteidl/plugins/tensorflow.proto: + +flyteidl/plugins/tensorflow.proto +================================================================== + + + + + +.. _ref_flyteidl.plugins.DistributedTensorflowTrainingTask: + +DistributedTensorflowTrainingTask +------------------------------------------------------------------ + +Custom proto for plugin that enables distributed training using https://github.com/kubeflow/tf-operator + + + +.. csv-table:: DistributedTensorflowTrainingTask type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "workers", ":ref:`ref_int32`", "", "number of worker, ps, chief replicas spawned in the cluster for this job" + "ps_replicas", ":ref:`ref_int32`", "", "PS -> Parameter server" + "chief_replicas", ":ref:`ref_int32`", "", "" + + + + + + +.. + end messages + + +.. + end enums + + +.. + end HasExtensions + + +.. + end services + + + + +.. _ref_flyteidl/plugins/waitable.proto: + +flyteidl/plugins/waitable.proto +================================================================== + + + + + +.. _ref_flyteidl.plugins.Waitable: + +Waitable +------------------------------------------------------------------ + +Represents an Execution that was launched and could be waited on. + + + +.. csv-table:: Waitable type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "wf_exec_id", ":ref:`ref_flyteidl.core.WorkflowExecutionIdentifier`", "", "" + "phase", ":ref:`ref_flyteidl.core.WorkflowExecution.Phase`", "", "" + "workflow_id", ":ref:`ref_string`", "", "" + + + + + + +.. + end messages + + +.. + end enums + + +.. + end HasExtensions + + +.. + end services + + diff --git a/docs/api/flyteidl/docs/restructuredtext.tmpl b/docs/api/flyteidl/docs/restructuredtext.tmpl new file mode 100644 index 0000000000..a408a70db0 --- /dev/null +++ b/docs/api/flyteidl/docs/restructuredtext.tmpl @@ -0,0 +1,129 @@ +###################### +Protocol Documentation +###################### + +{{range .Files}} +{{$file_name := .Name}} + +.. _ref_{{.Name}}: + +{{.Name}} +================================================================== + +{{.Description}} + +{{range .Messages}} + +.. _ref_{{.FullName}}: + +{{.LongName}} +------------------------------------------------------------------ + +{{.Description}} + +{{if .HasFields}} + +.. csv-table:: {{.LongName}} type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto +{{range .Fields }} + "{{.Name}}", ":ref:`ref_{{.FullType}}`", "{{.Label}}", "{{if (index .Options "deprecated"|default false)}}**Deprecated.** {{end}}{{nobr .Description}}{{if .DefaultValue}} Default: {{.DefaultValue}}{{end}}" +{{- end}} +{{end}} + + +{{if .HasExtensions}} + +.. csv-table:: {{.LongName}} type extensions + :header: "Extension", "Type", "Base", "Number", "Description" + :widths: auto +{{range .Extensions }} + "{{.Name}}", "{{.LongType}}", "{{.ContainingLongType}}", "{{.Number}}", "{{nobr .Description}}{{if .DefaultValue}} Default: {{.DefaultValue}}{{end}}" +{{- end}} +{{end}} + +{{end}} +.. + end messages + +{{range .Enums}} + +.. _ref_{{.FullName}}: + +{{.LongName}} +------------------------------------------------------------------ + +{{.Description}} + +.. csv-table:: Enum {{.LongName}} values + :header: "Name", "Number", "Description" + :widths: auto +{{range .Values }} + "{{.Name}}", "{{.Number}}", "{{nobr .Description}}" +{{- end}} + +{{end}} +.. + end enums + +{{if .HasExtensions}} + +.. _ref_{{$file_name}}_extensions: + +File-level Extensions +-------------------------------------------------------------------------------- + +.. csv-table:: {{.Name}} file-level Extensions + :header: "Extension", "Type", "Base", "Number", "Description" + :widths: auto +{{range .Extensions}} + "{{.Name}}", "{{.LongType}}", "{{.ContainingLongType}}", "{{.Number}}", "{{nobr .Description}}{{if .DefaultValue}} Default: `{{.DefaultValue}}`{{end}}" +{{- end}} +{{end}} +.. + end HasExtensions + +{{range .Services}} + +.. _ref_{{.FullName}}: + +{{.Name}} +------------------------------------------------------------------ + +{{.Description}} + +.. csv-table:: {{.Name}} service methods + :header: "Method Name", "Request Type", "Response Type", "Description" + :widths: auto +{{range .Methods}} + "{{.Name}}", ":ref:`ref_{{.RequestFullType}}`{{if .RequestStreaming}} stream{{end}}", ":ref:`ref_{{.ResponseFullType}}`{{if .ResponseStreaming}} stream{{end}}", "{{nobr .Description}}" +{{- end}} +{{end}} +.. + end services + +{{end}} + +.. _ref_scala_types: + +Scalar Value Types +================== + +{{range .Scalars}} + +.. _ref_{{.ProtoType}}: + +{{.ProtoType}} +----------------------------- + +{{.Notes}} + +.. csv-table:: {{.ProtoType}} language representation + :header: ".proto Type", "C++", "Java", "Python", "Go", "C#", "PHP", "Ruby" + :widths: auto + + "{{.ProtoType}}", "{{.CppType}}", "{{.JavaType}}", "{{.PythonType}}", "{{.GoType}}", "{{.CSharp}}", "{{.PhpType}}", "{{.RubyType}}" + +{{end}} +.. + end scalars \ No newline at end of file diff --git a/docs/api/flyteidl/docs/service/index.rst b/docs/api/flyteidl/docs/service/index.rst new file mode 100644 index 0000000000..6c5bebe6b2 --- /dev/null +++ b/docs/api/flyteidl/docs/service/index.rst @@ -0,0 +1,13 @@ +REST and gRPC interface for the Flyte Admin Service +=================================================== + +This section provides all endpoint definitions that are implemented by the Admin service. + +`Admin service raw protos `__ + +.. toctree:: + :maxdepth: 1 + :caption: service + :name: servicetoc + + service diff --git a/docs/api/flyteidl/docs/service/service.rst b/docs/api/flyteidl/docs/service/service.rst new file mode 100644 index 0000000000..3ca8ff500c --- /dev/null +++ b/docs/api/flyteidl/docs/service/service.rst @@ -0,0 +1,543 @@ +###################### +Protocol Documentation +###################### + + + + +.. _ref_flyteidl/service/admin.proto: + +flyteidl/service/admin.proto +================================================================== + + + + +.. + end messages + + +.. + end enums + + +.. + end HasExtensions + + + +.. _ref_flyteidl.service.AdminService: + +AdminService +------------------------------------------------------------------ + +The following defines an RPC service that is also served over HTTP via grpc-gateway. +Standard response codes for both are defined here: https://github.com/grpc-ecosystem/grpc-gateway/blob/master/runtime/errors.go + +.. csv-table:: AdminService service methods + :header: "Method Name", "Request Type", "Response Type", "Description" + :widths: auto + + "CreateTask", ":ref:`ref_flyteidl.admin.TaskCreateRequest`", ":ref:`ref_flyteidl.admin.TaskCreateResponse`", "Create and upload a :ref:`ref_flyteidl.admin.Task` definition" + "GetTask", ":ref:`ref_flyteidl.admin.ObjectGetRequest`", ":ref:`ref_flyteidl.admin.Task`", "Fetch a :ref:`ref_flyteidl.admin.Task` definition." + "ListTaskIds", ":ref:`ref_flyteidl.admin.NamedEntityIdentifierListRequest`", ":ref:`ref_flyteidl.admin.NamedEntityIdentifierList`", "Fetch a list of :ref:`ref_flyteidl.admin.NamedEntityIdentifier` of task objects." + "ListTasks", ":ref:`ref_flyteidl.admin.ResourceListRequest`", ":ref:`ref_flyteidl.admin.TaskList`", "Fetch a list of :ref:`ref_flyteidl.admin.Task` definitions." + "CreateWorkflow", ":ref:`ref_flyteidl.admin.WorkflowCreateRequest`", ":ref:`ref_flyteidl.admin.WorkflowCreateResponse`", "Create and upload a :ref:`ref_flyteidl.admin.Workflow` definition" + "GetWorkflow", ":ref:`ref_flyteidl.admin.ObjectGetRequest`", ":ref:`ref_flyteidl.admin.Workflow`", "Fetch a :ref:`ref_flyteidl.admin.Workflow` definition." + "ListWorkflowIds", ":ref:`ref_flyteidl.admin.NamedEntityIdentifierListRequest`", ":ref:`ref_flyteidl.admin.NamedEntityIdentifierList`", "Fetch a list of :ref:`ref_flyteidl.admin.NamedEntityIdentifier` of workflow objects." + "ListWorkflows", ":ref:`ref_flyteidl.admin.ResourceListRequest`", ":ref:`ref_flyteidl.admin.WorkflowList`", "Fetch a list of :ref:`ref_flyteidl.admin.Workflow` definitions." + "CreateLaunchPlan", ":ref:`ref_flyteidl.admin.LaunchPlanCreateRequest`", ":ref:`ref_flyteidl.admin.LaunchPlanCreateResponse`", "Create and upload a :ref:`ref_flyteidl.admin.LaunchPlan` definition" + "GetLaunchPlan", ":ref:`ref_flyteidl.admin.ObjectGetRequest`", ":ref:`ref_flyteidl.admin.LaunchPlan`", "Fetch a :ref:`ref_flyteidl.admin.LaunchPlan` definition." + "GetActiveLaunchPlan", ":ref:`ref_flyteidl.admin.ActiveLaunchPlanRequest`", ":ref:`ref_flyteidl.admin.LaunchPlan`", "Fetch the active version of a :ref:`ref_flyteidl.admin.LaunchPlan`." + "ListActiveLaunchPlans", ":ref:`ref_flyteidl.admin.ActiveLaunchPlanListRequest`", ":ref:`ref_flyteidl.admin.LaunchPlanList`", "List active versions of :ref:`ref_flyteidl.admin.LaunchPlan`." + "ListLaunchPlanIds", ":ref:`ref_flyteidl.admin.NamedEntityIdentifierListRequest`", ":ref:`ref_flyteidl.admin.NamedEntityIdentifierList`", "Fetch a list of :ref:`ref_flyteidl.admin.NamedEntityIdentifier` of launch plan objects." + "ListLaunchPlans", ":ref:`ref_flyteidl.admin.ResourceListRequest`", ":ref:`ref_flyteidl.admin.LaunchPlanList`", "Fetch a list of :ref:`ref_flyteidl.admin.LaunchPlan` definitions." + "UpdateLaunchPlan", ":ref:`ref_flyteidl.admin.LaunchPlanUpdateRequest`", ":ref:`ref_flyteidl.admin.LaunchPlanUpdateResponse`", "Updates the status of a registered :ref:`ref_flyteidl.admin.LaunchPlan`." + "CreateExecution", ":ref:`ref_flyteidl.admin.ExecutionCreateRequest`", ":ref:`ref_flyteidl.admin.ExecutionCreateResponse`", "Triggers the creation of a :ref:`ref_flyteidl.admin.Execution`" + "RelaunchExecution", ":ref:`ref_flyteidl.admin.ExecutionRelaunchRequest`", ":ref:`ref_flyteidl.admin.ExecutionCreateResponse`", "Triggers the creation of an identical :ref:`ref_flyteidl.admin.Execution`" + "RecoverExecution", ":ref:`ref_flyteidl.admin.ExecutionRecoverRequest`", ":ref:`ref_flyteidl.admin.ExecutionCreateResponse`", "Recreates a previously-run workflow execution that will only start executing from the last known failure point. In Recover mode, users cannot change any input parameters or update the version of the execution. This is extremely useful to recover from system errors and byzantine faults like - Loss of K8s cluster, bugs in platform or instability, machine failures, downstream system failures (downstream services), or simply to recover executions that failed because of retry exhaustion and should complete if tried again. See :ref:`ref_flyteidl.admin.ExecutionRecoverRequest` for more details." + "GetExecution", ":ref:`ref_flyteidl.admin.WorkflowExecutionGetRequest`", ":ref:`ref_flyteidl.admin.Execution`", "Fetches a :ref:`ref_flyteidl.admin.Execution`." + "UpdateExecution", ":ref:`ref_flyteidl.admin.ExecutionUpdateRequest`", ":ref:`ref_flyteidl.admin.ExecutionUpdateResponse`", "Update execution belonging to project domain :ref:`ref_flyteidl.admin.Execution`." + "GetExecutionData", ":ref:`ref_flyteidl.admin.WorkflowExecutionGetDataRequest`", ":ref:`ref_flyteidl.admin.WorkflowExecutionGetDataResponse`", "Fetches input and output data for a :ref:`ref_flyteidl.admin.Execution`." + "ListExecutions", ":ref:`ref_flyteidl.admin.ResourceListRequest`", ":ref:`ref_flyteidl.admin.ExecutionList`", "Fetch a list of :ref:`ref_flyteidl.admin.Execution`." + "TerminateExecution", ":ref:`ref_flyteidl.admin.ExecutionTerminateRequest`", ":ref:`ref_flyteidl.admin.ExecutionTerminateResponse`", "Terminates an in-progress :ref:`ref_flyteidl.admin.Execution`." + "GetNodeExecution", ":ref:`ref_flyteidl.admin.NodeExecutionGetRequest`", ":ref:`ref_flyteidl.admin.NodeExecution`", "Fetches a :ref:`ref_flyteidl.admin.NodeExecution`." + "ListNodeExecutions", ":ref:`ref_flyteidl.admin.NodeExecutionListRequest`", ":ref:`ref_flyteidl.admin.NodeExecutionList`", "Fetch a list of :ref:`ref_flyteidl.admin.NodeExecution`." + "ListNodeExecutionsForTask", ":ref:`ref_flyteidl.admin.NodeExecutionForTaskListRequest`", ":ref:`ref_flyteidl.admin.NodeExecutionList`", "Fetch a list of :ref:`ref_flyteidl.admin.NodeExecution` launched by the reference :ref:`ref_flyteidl.admin.TaskExecution`." + "GetNodeExecutionData", ":ref:`ref_flyteidl.admin.NodeExecutionGetDataRequest`", ":ref:`ref_flyteidl.admin.NodeExecutionGetDataResponse`", "Fetches input and output data for a :ref:`ref_flyteidl.admin.NodeExecution`." + "RegisterProject", ":ref:`ref_flyteidl.admin.ProjectRegisterRequest`", ":ref:`ref_flyteidl.admin.ProjectRegisterResponse`", "Registers a :ref:`ref_flyteidl.admin.Project` with the Flyte deployment." + "UpdateProject", ":ref:`ref_flyteidl.admin.Project`", ":ref:`ref_flyteidl.admin.ProjectUpdateResponse`", "Updates an existing :ref:`ref_flyteidl.admin.Project` flyteidl.admin.Project should be passed but the domains property should be empty; it will be ignored in the handler as domains cannot be updated via this API." + "ListProjects", ":ref:`ref_flyteidl.admin.ProjectListRequest`", ":ref:`ref_flyteidl.admin.Projects`", "Fetches a list of :ref:`ref_flyteidl.admin.Project`" + "CreateWorkflowEvent", ":ref:`ref_flyteidl.admin.WorkflowExecutionEventRequest`", ":ref:`ref_flyteidl.admin.WorkflowExecutionEventResponse`", "Indicates a :ref:`ref_flyteidl.event.WorkflowExecutionEvent` has occurred." + "CreateNodeEvent", ":ref:`ref_flyteidl.admin.NodeExecutionEventRequest`", ":ref:`ref_flyteidl.admin.NodeExecutionEventResponse`", "Indicates a :ref:`ref_flyteidl.event.NodeExecutionEvent` has occurred." + "CreateTaskEvent", ":ref:`ref_flyteidl.admin.TaskExecutionEventRequest`", ":ref:`ref_flyteidl.admin.TaskExecutionEventResponse`", "Indicates a :ref:`ref_flyteidl.event.TaskExecutionEvent` has occurred." + "GetTaskExecution", ":ref:`ref_flyteidl.admin.TaskExecutionGetRequest`", ":ref:`ref_flyteidl.admin.TaskExecution`", "Fetches a :ref:`ref_flyteidl.admin.TaskExecution`." + "ListTaskExecutions", ":ref:`ref_flyteidl.admin.TaskExecutionListRequest`", ":ref:`ref_flyteidl.admin.TaskExecutionList`", "Fetches a list of :ref:`ref_flyteidl.admin.TaskExecution`." + "GetTaskExecutionData", ":ref:`ref_flyteidl.admin.TaskExecutionGetDataRequest`", ":ref:`ref_flyteidl.admin.TaskExecutionGetDataResponse`", "Fetches input and output data for a :ref:`ref_flyteidl.admin.TaskExecution`." + "UpdateProjectDomainAttributes", ":ref:`ref_flyteidl.admin.ProjectDomainAttributesUpdateRequest`", ":ref:`ref_flyteidl.admin.ProjectDomainAttributesUpdateResponse`", "Creates or updates custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project and domain." + "GetProjectDomainAttributes", ":ref:`ref_flyteidl.admin.ProjectDomainAttributesGetRequest`", ":ref:`ref_flyteidl.admin.ProjectDomainAttributesGetResponse`", "Fetches custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project and domain." + "DeleteProjectDomainAttributes", ":ref:`ref_flyteidl.admin.ProjectDomainAttributesDeleteRequest`", ":ref:`ref_flyteidl.admin.ProjectDomainAttributesDeleteResponse`", "Deletes custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project and domain." + "UpdateProjectAttributes", ":ref:`ref_flyteidl.admin.ProjectAttributesUpdateRequest`", ":ref:`ref_flyteidl.admin.ProjectAttributesUpdateResponse`", "Creates or updates custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` at the project level" + "GetProjectAttributes", ":ref:`ref_flyteidl.admin.ProjectAttributesGetRequest`", ":ref:`ref_flyteidl.admin.ProjectAttributesGetResponse`", "Fetches custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project and domain." + "DeleteProjectAttributes", ":ref:`ref_flyteidl.admin.ProjectAttributesDeleteRequest`", ":ref:`ref_flyteidl.admin.ProjectAttributesDeleteResponse`", "Deletes custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project and domain." + "UpdateWorkflowAttributes", ":ref:`ref_flyteidl.admin.WorkflowAttributesUpdateRequest`", ":ref:`ref_flyteidl.admin.WorkflowAttributesUpdateResponse`", "Creates or updates custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project, domain and workflow." + "GetWorkflowAttributes", ":ref:`ref_flyteidl.admin.WorkflowAttributesGetRequest`", ":ref:`ref_flyteidl.admin.WorkflowAttributesGetResponse`", "Fetches custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project, domain and workflow." + "DeleteWorkflowAttributes", ":ref:`ref_flyteidl.admin.WorkflowAttributesDeleteRequest`", ":ref:`ref_flyteidl.admin.WorkflowAttributesDeleteResponse`", "Deletes custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project, domain and workflow." + "ListMatchableAttributes", ":ref:`ref_flyteidl.admin.ListMatchableAttributesRequest`", ":ref:`ref_flyteidl.admin.ListMatchableAttributesResponse`", "Lists custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a specific resource type." + "ListNamedEntities", ":ref:`ref_flyteidl.admin.NamedEntityListRequest`", ":ref:`ref_flyteidl.admin.NamedEntityList`", "Returns a list of :ref:`ref_flyteidl.admin.NamedEntity` objects." + "GetNamedEntity", ":ref:`ref_flyteidl.admin.NamedEntityGetRequest`", ":ref:`ref_flyteidl.admin.NamedEntity`", "Returns a :ref:`ref_flyteidl.admin.NamedEntity` object." + "UpdateNamedEntity", ":ref:`ref_flyteidl.admin.NamedEntityUpdateRequest`", ":ref:`ref_flyteidl.admin.NamedEntityUpdateResponse`", "Updates a :ref:`ref_flyteidl.admin.NamedEntity` object." + "GetVersion", ":ref:`ref_flyteidl.admin.GetVersionRequest`", ":ref:`ref_flyteidl.admin.GetVersionResponse`", "" + "GetDescriptionEntity", ":ref:`ref_flyteidl.admin.ObjectGetRequest`", ":ref:`ref_flyteidl.admin.DescriptionEntity`", "Fetch a :ref:`ref_flyteidl.admin.DescriptionEntity` object." + "ListDescriptionEntities", ":ref:`ref_flyteidl.admin.DescriptionEntityListRequest`", ":ref:`ref_flyteidl.admin.DescriptionEntityList`", "Fetch a list of :ref:`ref_flyteidl.admin.DescriptionEntity` definitions." + +.. + end services + + + + +.. _ref_flyteidl/service/auth.proto: + +flyteidl/service/auth.proto +================================================================== + + + + + +.. _ref_flyteidl.service.OAuth2MetadataRequest: + +OAuth2MetadataRequest +------------------------------------------------------------------ + + + + + + + + + + +.. _ref_flyteidl.service.OAuth2MetadataResponse: + +OAuth2MetadataResponse +------------------------------------------------------------------ + +OAuth2MetadataResponse defines an RFC-Compliant response for /.well-known/oauth-authorization-server metadata +as defined in https://tools.ietf.org/html/rfc8414 + + + +.. csv-table:: OAuth2MetadataResponse type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "issuer", ":ref:`ref_string`", "", "Defines the issuer string in all JWT tokens this server issues. The issuer can be admin itself or an external issuer." + "authorization_endpoint", ":ref:`ref_string`", "", "URL of the authorization server's authorization endpoint [RFC6749]. This is REQUIRED unless no grant types are supported that use the authorization endpoint." + "token_endpoint", ":ref:`ref_string`", "", "URL of the authorization server's token endpoint [RFC6749]." + "response_types_supported", ":ref:`ref_string`", "repeated", "Array containing a list of the OAuth 2.0 response_type values that this authorization server supports." + "scopes_supported", ":ref:`ref_string`", "repeated", "JSON array containing a list of the OAuth 2.0 [RFC6749] scope values that this authorization server supports." + "token_endpoint_auth_methods_supported", ":ref:`ref_string`", "repeated", "JSON array containing a list of client authentication methods supported by this token endpoint." + "jwks_uri", ":ref:`ref_string`", "", "URL of the authorization server's JWK Set [JWK] document. The referenced document contains the signing key(s) the client uses to validate signatures from the authorization server." + "code_challenge_methods_supported", ":ref:`ref_string`", "repeated", "JSON array containing a list of Proof Key for Code Exchange (PKCE) [RFC7636] code challenge methods supported by this authorization server." + "grant_types_supported", ":ref:`ref_string`", "repeated", "JSON array containing a list of the OAuth 2.0 grant type values that this authorization server supports." + "device_authorization_endpoint", ":ref:`ref_string`", "", "URL of the authorization server's device authorization endpoint, as defined in Section 3.1 of [RFC8628]" + + + + + + + +.. _ref_flyteidl.service.PublicClientAuthConfigRequest: + +PublicClientAuthConfigRequest +------------------------------------------------------------------ + + + + + + + + + + +.. _ref_flyteidl.service.PublicClientAuthConfigResponse: + +PublicClientAuthConfigResponse +------------------------------------------------------------------ + +FlyteClientResponse encapsulates public information that flyte clients (CLIs... etc.) can use to authenticate users. + + + +.. csv-table:: PublicClientAuthConfigResponse type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "client_id", ":ref:`ref_string`", "", "client_id to use when initiating OAuth2 authorization requests." + "redirect_uri", ":ref:`ref_string`", "", "redirect uri to use when initiating OAuth2 authorization requests." + "scopes", ":ref:`ref_string`", "repeated", "scopes to request when initiating OAuth2 authorization requests." + "authorization_metadata_key", ":ref:`ref_string`", "", "Authorization Header to use when passing Access Tokens to the server. If not provided, the client should use the default http `Authorization` header." + "service_http_endpoint", ":ref:`ref_string`", "", "ServiceHttpEndpoint points to the http endpoint for the backend. If empty, clients can assume the endpoint used to configure the gRPC connection can be used for the http one respecting the insecure flag to choose between SSL or no SSL connections." + "audience", ":ref:`ref_string`", "", "audience to use when initiating OAuth2 authorization requests." + + + + + + +.. + end messages + + +.. + end enums + + +.. + end HasExtensions + + + +.. _ref_flyteidl.service.AuthMetadataService: + +AuthMetadataService +------------------------------------------------------------------ + +The following defines an RPC service that is also served over HTTP via grpc-gateway. +Standard response codes for both are defined here: https://github.com/grpc-ecosystem/grpc-gateway/blob/master/runtime/errors.go +RPCs defined in this service must be anonymously accessible. + +.. csv-table:: AuthMetadataService service methods + :header: "Method Name", "Request Type", "Response Type", "Description" + :widths: auto + + "GetOAuth2Metadata", ":ref:`ref_flyteidl.service.OAuth2MetadataRequest`", ":ref:`ref_flyteidl.service.OAuth2MetadataResponse`", "Anonymously accessible. Retrieves local or external oauth authorization server metadata." + "GetPublicClientConfig", ":ref:`ref_flyteidl.service.PublicClientAuthConfigRequest`", ":ref:`ref_flyteidl.service.PublicClientAuthConfigResponse`", "Anonymously accessible. Retrieves the client information clients should use when initiating OAuth2 authorization requests." + +.. + end services + + + + +.. _ref_flyteidl/service/dataproxy.proto: + +flyteidl/service/dataproxy.proto +================================================================== + + + + + +.. _ref_flyteidl.service.CreateDownloadLinkRequest: + +CreateDownloadLinkRequest +------------------------------------------------------------------ + +CreateDownloadLinkRequest defines the request parameters to create a download link (signed url) + + + +.. csv-table:: CreateDownloadLinkRequest type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "artifact_type", ":ref:`ref_flyteidl.service.ArtifactType`", "", "ArtifactType of the artifact requested." + "expires_in", ":ref:`ref_google.protobuf.Duration`", "", "ExpiresIn defines a requested expiration duration for the generated url. The request will be rejected if this exceeds the platform allowed max. +optional. The default value comes from a global config." + "node_execution_id", ":ref:`ref_flyteidl.core.NodeExecutionIdentifier`", "", "NodeId is the unique identifier for the node execution. For a task node, this will retrieve the output of the most recent attempt of the task." + + + + + + + +.. _ref_flyteidl.service.CreateDownloadLinkResponse: + +CreateDownloadLinkResponse +------------------------------------------------------------------ + +CreateDownloadLinkResponse defines the response for the generated links + + + +.. csv-table:: CreateDownloadLinkResponse type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "signed_url", ":ref:`ref_string`", "repeated", "SignedUrl specifies the url to use to download content from (e.g. https://my-bucket.s3.amazonaws.com/randomstring/suffix.tar?X-...)" + "expires_at", ":ref:`ref_google.protobuf.Timestamp`", "", "ExpiresAt defines when will the signed URL expire." + + + + + + + +.. _ref_flyteidl.service.CreateDownloadLocationRequest: + +CreateDownloadLocationRequest +------------------------------------------------------------------ + +CreateDownloadLocationRequest specified request for the CreateDownloadLocation API. + + + +.. csv-table:: CreateDownloadLocationRequest type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "native_url", ":ref:`ref_string`", "", "NativeUrl specifies the url in the format of the configured storage provider (e.g. s3://my-bucket/randomstring/suffix.tar)" + "expires_in", ":ref:`ref_google.protobuf.Duration`", "", "ExpiresIn defines a requested expiration duration for the generated url. The request will be rejected if this exceeds the platform allowed max. +optional. The default value comes from a global config." + + + + + + + +.. _ref_flyteidl.service.CreateDownloadLocationResponse: + +CreateDownloadLocationResponse +------------------------------------------------------------------ + + + + + +.. csv-table:: CreateDownloadLocationResponse type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "signed_url", ":ref:`ref_string`", "", "SignedUrl specifies the url to use to download content from (e.g. https://my-bucket.s3.amazonaws.com/randomstring/suffix.tar?X-...)" + "expires_at", ":ref:`ref_google.protobuf.Timestamp`", "", "ExpiresAt defines when will the signed URL expires." + + + + + + + +.. _ref_flyteidl.service.CreateUploadLocationRequest: + +CreateUploadLocationRequest +------------------------------------------------------------------ + +CreateUploadLocationRequest specified request for the CreateUploadLocation API. + + + +.. csv-table:: CreateUploadLocationRequest type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "project", ":ref:`ref_string`", "", "Project to create the upload location for +required" + "domain", ":ref:`ref_string`", "", "Domain to create the upload location for. +required" + "filename", ":ref:`ref_string`", "", "Filename specifies a desired suffix for the generated location. E.g. `file.py` or `pre/fix/file.zip`. +optional. By default, the service will generate a consistent name based on the provided parameters." + "expires_in", ":ref:`ref_google.protobuf.Duration`", "", "ExpiresIn defines a requested expiration duration for the generated url. The request will be rejected if this exceeds the platform allowed max. +optional. The default value comes from a global config." + "content_md5", ":ref:`ref_bytes`", "", "ContentMD5 restricts the upload location to the specific MD5 provided. The ContentMD5 will also appear in the generated path. +required" + + + + + + + +.. _ref_flyteidl.service.CreateUploadLocationResponse: + +CreateUploadLocationResponse +------------------------------------------------------------------ + + + + + +.. csv-table:: CreateUploadLocationResponse type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "signed_url", ":ref:`ref_string`", "", "SignedUrl specifies the url to use to upload content to (e.g. https://my-bucket.s3.amazonaws.com/randomstring/suffix.tar?X-...)" + "native_url", ":ref:`ref_string`", "", "NativeUrl specifies the url in the format of the configured storage provider (e.g. s3://my-bucket/randomstring/suffix.tar)" + "expires_at", ":ref:`ref_google.protobuf.Timestamp`", "", "ExpiresAt defines when will the signed URL expires." + + + + + + +.. + end messages + + + +.. _ref_flyteidl.service.ArtifactType: + +ArtifactType +------------------------------------------------------------------ + +ArtifactType + +.. csv-table:: Enum ArtifactType values + :header: "Name", "Number", "Description" + :widths: auto + + "ARTIFACT_TYPE_UNDEFINED", "0", "ARTIFACT_TYPE_UNDEFINED is the default, often invalid, value for the enum." + "ARTIFACT_TYPE_DECK", "1", "ARTIFACT_TYPE_DECK refers to the deck html file optionally generated after a task, a workflow or a launch plan finishes executing." + + +.. + end enums + + +.. + end HasExtensions + + + +.. _ref_flyteidl.service.DataProxyService: + +DataProxyService +------------------------------------------------------------------ + +DataProxyService defines an RPC Service that allows access to user-data in a controlled manner. + +.. csv-table:: DataProxyService service methods + :header: "Method Name", "Request Type", "Response Type", "Description" + :widths: auto + + "CreateUploadLocation", ":ref:`ref_flyteidl.service.CreateUploadLocationRequest`", ":ref:`ref_flyteidl.service.CreateUploadLocationResponse`", "CreateUploadLocation creates a signed url to upload artifacts to for a given project/domain." + "CreateDownloadLocation", ":ref:`ref_flyteidl.service.CreateDownloadLocationRequest`", ":ref:`ref_flyteidl.service.CreateDownloadLocationResponse`", "CreateDownloadLocation creates a signed url to download artifacts." + "CreateDownloadLink", ":ref:`ref_flyteidl.service.CreateDownloadLinkRequest`", ":ref:`ref_flyteidl.service.CreateDownloadLinkResponse`", "CreateDownloadLocation creates a signed url to download artifacts." + +.. + end services + + + + +.. _ref_flyteidl/service/identity.proto: + +flyteidl/service/identity.proto +================================================================== + + + + + +.. _ref_flyteidl.service.UserInfoRequest: + +UserInfoRequest +------------------------------------------------------------------ + + + + + + + + + + +.. _ref_flyteidl.service.UserInfoResponse: + +UserInfoResponse +------------------------------------------------------------------ + +See the OpenID Connect spec at https://openid.net/specs/openid-connect-core-1_0.html#UserInfoResponse for more information. + + + +.. csv-table:: UserInfoResponse type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto + + "subject", ":ref:`ref_string`", "", "Locally unique and never reassigned identifier within the Issuer for the End-User, which is intended to be consumed by the Client." + "name", ":ref:`ref_string`", "", "Full name" + "preferred_username", ":ref:`ref_string`", "", "Shorthand name by which the End-User wishes to be referred to" + "given_name", ":ref:`ref_string`", "", "Given name(s) or first name(s)" + "family_name", ":ref:`ref_string`", "", "Surname(s) or last name(s)" + "email", ":ref:`ref_string`", "", "Preferred e-mail address" + "picture", ":ref:`ref_string`", "", "Profile picture URL" + + + + + + +.. + end messages + + +.. + end enums + + +.. + end HasExtensions + + + +.. _ref_flyteidl.service.IdentityService: + +IdentityService +------------------------------------------------------------------ + +IdentityService defines an RPC Service that interacts with user/app identities. + +.. csv-table:: IdentityService service methods + :header: "Method Name", "Request Type", "Response Type", "Description" + :widths: auto + + "UserInfo", ":ref:`ref_flyteidl.service.UserInfoRequest`", ":ref:`ref_flyteidl.service.UserInfoResponse`", "Retrieves user information about the currently logged in user." + +.. + end services + + + + +.. _ref_flyteidl/service/signal.proto: + +flyteidl/service/signal.proto +================================================================== + + + + +.. + end messages + + +.. + end enums + + +.. + end HasExtensions + + + +.. _ref_flyteidl.service.SignalService: + +SignalService +------------------------------------------------------------------ + +SignalService defines an RPC Service that may create, update, and retrieve signal(s). + +.. csv-table:: SignalService service methods + :header: "Method Name", "Request Type", "Response Type", "Description" + :widths: auto + + "GetOrCreateSignal", ":ref:`ref_flyteidl.admin.SignalGetOrCreateRequest`", ":ref:`ref_flyteidl.admin.Signal`", "Fetches or creates a :ref:`ref_flyteidl.admin.Signal`." + "ListSignals", ":ref:`ref_flyteidl.admin.SignalListRequest`", ":ref:`ref_flyteidl.admin.SignalList`", "Fetch a list of :ref:`ref_flyteidl.admin.Signal` definitions." + "SetSignal", ":ref:`ref_flyteidl.admin.SignalSetRequest`", ":ref:`ref_flyteidl.admin.SignalSetResponse`", "Sets the value on a :ref:`ref_flyteidl.admin.Signal` definition" + +.. + end services + + diff --git a/docs/api/flyteidl/docs/withoutscalar_restructuredtext.tmpl b/docs/api/flyteidl/docs/withoutscalar_restructuredtext.tmpl new file mode 100644 index 0000000000..9fef938d99 --- /dev/null +++ b/docs/api/flyteidl/docs/withoutscalar_restructuredtext.tmpl @@ -0,0 +1,105 @@ +###################### +Protocol Documentation +###################### + +{{range .Files}} +{{$file_name := .Name}} + +.. _ref_{{.Name}}: + +{{.Name}} +================================================================== + +{{.Description}} + +{{range .Messages}} + +.. _ref_{{.FullName}}: + +{{.LongName}} +------------------------------------------------------------------ + +{{.Description}} + +{{if .HasFields}} + +.. csv-table:: {{.LongName}} type fields + :header: "Field", "Type", "Label", "Description" + :widths: auto +{{range .Fields }} + "{{.Name}}", ":ref:`ref_{{.FullType}}`", "{{.Label}}", "{{if (index .Options "deprecated"|default false)}}**Deprecated.** {{end}}{{nobr .Description}}{{if .DefaultValue}} Default: {{.DefaultValue}}{{end}}" +{{- end}} +{{end}} + + +{{if .HasExtensions}} + +.. csv-table:: {{.LongName}} type extensions + :header: "Extension", "Type", "Base", "Number", "Description" + :widths: auto +{{range .Extensions }} + "{{.Name}}", "{{.LongType}}", "{{.ContainingLongType}}", "{{.Number}}", "{{nobr .Description}}{{if .DefaultValue}} Default: {{.DefaultValue}}{{end}}" +{{- end}} +{{end}} + +{{end}} +.. + end messages + +{{range .Enums}} + +.. _ref_{{.FullName}}: + +{{.LongName}} +------------------------------------------------------------------ + +{{.Description}} + +.. csv-table:: Enum {{.LongName}} values + :header: "Name", "Number", "Description" + :widths: auto +{{range .Values }} + "{{.Name}}", "{{.Number}}", "{{nobr .Description}}" +{{- end}} + +{{end}} +.. + end enums + +{{if .HasExtensions}} + +.. _ref_{{$file_name}}_extensions: + +File-level Extensions +-------------------------------------------------------------------------------- + +.. csv-table:: {{.Name}} file-level Extensions + :header: "Extension", "Type", "Base", "Number", "Description" + :widths: auto +{{range .Extensions}} + "{{.Name}}", "{{.LongType}}", "{{.ContainingLongType}}", "{{.Number}}", "{{nobr .Description}}{{if .DefaultValue}} Default: `{{.DefaultValue}}`{{end}}" +{{- end}} +{{end}} +.. + end HasExtensions + +{{range .Services}} + +.. _ref_{{.FullName}}: + +{{.Name}} +------------------------------------------------------------------ + +{{.Description}} + +.. csv-table:: {{.Name}} service methods + :header: "Method Name", "Request Type", "Response Type", "Description" + :widths: auto +{{range .Methods}} + "{{.Name}}", ":ref:`ref_{{.RequestFullType}}`{{if .RequestStreaming}} stream{{end}}", ":ref:`ref_{{.ResponseFullType}}`{{if .ResponseStreaming}} stream{{end}}", "{{nobr .Description}}" +{{- end}} +{{end}} +.. + end services + +{{end}} diff --git a/flyteidl/protos/index.rst b/docs/api/flyteidl/docs_index.rst similarity index 96% rename from flyteidl/protos/index.rst rename to docs/api/flyteidl/docs_index.rst index 39544bcc2e..27383673de 100644 --- a/flyteidl/protos/index.rst +++ b/docs/api/flyteidl/docs_index.rst @@ -16,3 +16,4 @@ and documentation of all these entities. docs/event/index docs/plugins/index docs/service/index + docs/contributing diff --git a/docs/api/flyteidl/flyteidl/admin/agent.proto b/docs/api/flyteidl/flyteidl/admin/agent.proto new file mode 100644 index 0000000000..931c27785f --- /dev/null +++ b/docs/api/flyteidl/flyteidl/admin/agent.proto @@ -0,0 +1,258 @@ +syntax = "proto3"; + +package flyteidl.admin; +option go_package = "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/admin"; + +import "flyteidl/core/literals.proto"; +import "flyteidl/core/tasks.proto"; +import "flyteidl/core/workflow.proto"; +import "flyteidl/core/identifier.proto"; +import "flyteidl/core/execution.proto"; +import "flyteidl/core/metrics.proto"; +import "flyteidl/core/security.proto"; +import "google/protobuf/duration.proto"; +import "google/protobuf/timestamp.proto"; +import "google/protobuf/struct.proto"; + +// The state of the execution is used to control its visibility in the UI/CLI. +enum State { + option deprecated = true; + RETRYABLE_FAILURE = 0; + PERMANENT_FAILURE = 1; + PENDING = 2; + RUNNING = 3; + SUCCEEDED = 4; +} + +// Represents a subset of runtime task execution metadata that are relevant to external plugins. +message TaskExecutionMetadata { + // ID of the task execution + + core.TaskExecutionIdentifier task_execution_id = 1; + // k8s namespace where the task is executed in + string namespace = 2; + // Labels attached to the task execution + map labels = 3; + // Annotations attached to the task execution + map annotations = 4; + // k8s service account associated with the task execution + string k8s_service_account = 5; + // Environment variables attached to the task execution + map environment_variables = 6; + // Represents the maximum number of attempts allowed for a task. + // If a task fails, it can be retried up to this maximum number of attempts. + int32 max_attempts = 7; + // Indicates whether the task execution can be interrupted. + // If set to true, the task can be stopped before completion. + bool interruptible = 8; + // Specifies the threshold for failure count at which the interruptible property + // will take effect. If the number of consecutive task failures exceeds this threshold, + // interruptible behavior will be activated. + int32 interruptible_failure_threshold = 9; + // Overrides for specific properties of the task node. + // These overrides can be used to customize the behavior of the task node. + core.TaskNodeOverrides overrides = 10; + // Identity of user running this task execution + core.Identity identity = 11; +} + +// Represents a request structure to create task. +message CreateTaskRequest { + // The inputs required to start the execution. All required inputs must be + // included in this map. If not required and not provided, defaults apply. + // +optional + core.LiteralMap inputs = 1; + // Template of the task that encapsulates all the metadata of the task. + core.TaskTemplate template = 2; + // Prefix for where task output data will be written. (e.g. s3://my-bucket/randomstring) + string output_prefix = 3; + // subset of runtime task execution metadata. + TaskExecutionMetadata task_execution_metadata = 4; +} + +// Represents a create response structure. +message CreateTaskResponse { + // ResourceMeta is created by the agent. It could be a string (jobId) or a dict (more complex metadata). + bytes resource_meta = 1; +} + +message CreateRequestHeader { + // Template of the task that encapsulates all the metadata of the task. + core.TaskTemplate template = 1; + // Prefix for where task output data will be written. (e.g. s3://my-bucket/randomstring) + string output_prefix = 2; + // subset of runtime task execution metadata. + TaskExecutionMetadata task_execution_metadata = 3; + // MaxDatasetSizeBytes is the maximum size of the dataset that can be generated by the task. + int64 max_dataset_size_bytes = 4; +} + + +message ExecuteTaskSyncRequest { + oneof part { + CreateRequestHeader header = 1; + core.LiteralMap inputs = 2; + } +} + +message ExecuteTaskSyncResponseHeader { + Resource resource = 1; +} + +message ExecuteTaskSyncResponse { + // Metadata is created by the agent. It could be a string (jobId) or a dict (more complex metadata). + // Resource is for synchronous task execution. + oneof res { + ExecuteTaskSyncResponseHeader header = 1; + core.LiteralMap outputs = 2; + } +} + +// A message used to fetch a job resource from flyte agent server. +message GetTaskRequest { + // A predefined yet extensible Task type identifier. + string task_type = 1 [deprecated = true]; + // Metadata about the resource to be pass to the agent. + bytes resource_meta = 2; + // A predefined yet extensible Task type identifier. + TaskCategory task_category = 3; +} + +// Response to get an individual task resource. +message GetTaskResponse { + Resource resource = 1; +} + +message Resource { + // DEPRECATED. The state of the execution is used to control its visibility in the UI/CLI. + State state = 1 [deprecated = true]; + // The outputs of the execution. It's typically used by sql task. Agent service will create a + // Structured dataset pointing to the query result table. + // +optional + core.LiteralMap outputs = 2; + // A descriptive message for the current state. e.g. waiting for cluster. + string message = 3; + // log information for the task execution. + repeated core.TaskLog log_links = 4; + // The phase of the execution is used to determine the phase of the plugin's execution. + core.TaskExecution.Phase phase = 5; + // Custom data specific to the agent. + google.protobuf.Struct custom_info = 6; +} + +// A message used to delete a task. +message DeleteTaskRequest { + // A predefined yet extensible Task type identifier. + string task_type = 1 [deprecated = true]; + // Metadata about the resource to be pass to the agent. + bytes resource_meta = 2; + // A predefined yet extensible Task type identifier. + TaskCategory task_category = 3; +} + +// Response to delete a task. +message DeleteTaskResponse {} + +// A message containing the agent metadata. +message Agent { + // Name is the developer-assigned name of the agent. + string name = 1; + + // SupportedTaskTypes are the types of the tasks that the agent can handle. + repeated string supported_task_types = 2 [deprecated = true]; + + // IsSync indicates whether this agent is a sync agent. Sync agents are expected to return their + // results synchronously when called by propeller. Given that sync agents can affect the performance + // of the system, it's important to enforce strict timeout policies. + // An Async agent, on the other hand, is required to be able to identify jobs by an + // identifier and query for job statuses as jobs progress. + bool is_sync = 3; + + // Supported_task_categories are the categories of the tasks that the agent can handle. + repeated TaskCategory supported_task_categories = 4; +} + +message TaskCategory { + // The name of the task type. + string name = 1; + // The version of the task type. + int32 version = 2; +} + +// A request to get an agent. +message GetAgentRequest { + // The name of the agent. + string name = 1; +} + +// A response containing an agent. +message GetAgentResponse { + Agent agent = 1; +} + +// A request to list all agents. +message ListAgentsRequest {} + +// A response containing a list of agents. +message ListAgentsResponse { + repeated Agent agents = 1; +} + +// A request to get the metrics from a task execution. +message GetTaskMetricsRequest { + // A predefined yet extensible Task type identifier. + string task_type = 1 [deprecated = true]; + // Metadata is created by the agent. It could be a string (jobId) or a dict (more complex metadata). + bytes resource_meta = 2; + // The metrics to query. If empty, will return a default set of metrics. + // e.g. EXECUTION_METRIC_USED_CPU_AVG or EXECUTION_METRIC_USED_MEMORY_BYTES_AVG + repeated string queries = 3; + // Start timestamp, inclusive. + google.protobuf.Timestamp start_time = 4; + // End timestamp, inclusive.. + google.protobuf.Timestamp end_time = 5; + // Query resolution step width in duration format or float number of seconds. + google.protobuf.Duration step = 6; + // A predefined yet extensible Task type identifier. + TaskCategory task_category = 7; +} + +// A response containing a list of metrics for a task execution. +message GetTaskMetricsResponse { + // The execution metric results. + repeated core.ExecutionMetricResult results = 1; +} + +// A request to get the log from a task execution. +message GetTaskLogsRequest { + // A predefined yet extensible Task type identifier. + string task_type = 1 [deprecated = true]; + // Metadata is created by the agent. It could be a string (jobId) or a dict (more complex metadata). + bytes resource_meta = 2; + // Number of lines to return. + uint64 lines = 3; + // In the case of multiple pages of results, the server-provided token can be used to fetch the next page + // in a query. If there are no more results, this value will be empty. + string token = 4; + // A predefined yet extensible Task type identifier. + TaskCategory task_category = 5; +} + +message GetTaskLogsResponseHeader { + // In the case of multiple pages of results, the server-provided token can be used to fetch the next page + // in a query. If there are no more results, this value will be empty. + string token = 1; +} + +message GetTaskLogsResponseBody { + // The execution log results. + repeated string results = 1; +} + +// A response containing the logs for a task execution. +message GetTaskLogsResponse { + oneof part { + GetTaskLogsResponseHeader header = 1; + GetTaskLogsResponseBody body = 2; + } +} diff --git a/docs/api/flyteidl/flyteidl/admin/cluster_assignment.proto b/docs/api/flyteidl/flyteidl/admin/cluster_assignment.proto new file mode 100644 index 0000000000..6a55798436 --- /dev/null +++ b/docs/api/flyteidl/flyteidl/admin/cluster_assignment.proto @@ -0,0 +1,11 @@ +syntax = "proto3"; + +package flyteidl.admin; +option go_package = "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/admin"; + + +// Encapsulates specifications for routing an execution onto a specific cluster. +message ClusterAssignment { + reserved 1, 2; + string cluster_pool_name = 3; +} diff --git a/docs/api/flyteidl/flyteidl/admin/common.proto b/docs/api/flyteidl/flyteidl/admin/common.proto new file mode 100644 index 0000000000..6c04b0531a --- /dev/null +++ b/docs/api/flyteidl/flyteidl/admin/common.proto @@ -0,0 +1,327 @@ +syntax = "proto3"; + +package flyteidl.admin; +option go_package = "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/admin"; + +import "flyteidl/core/execution.proto"; +import "flyteidl/core/identifier.proto"; +import "flyteidl/core/literals.proto"; +import "google/protobuf/timestamp.proto"; + +// Encapsulation of fields that identifies a Flyte resource. +// A Flyte resource can be a task, workflow or launch plan. +// A resource can internally have multiple versions and is uniquely identified +// by project, domain, and name. +message NamedEntityIdentifier { + // Name of the project the resource belongs to. + string project = 1; + // Name of the domain the resource belongs to. + // A domain can be considered as a subset within a specific project. + string domain = 2; + // User provided value for the resource. + // The combination of project + domain + name uniquely identifies the resource. + // +optional - in certain contexts - like 'List API', 'Launch plans' + string name = 3; + + // Optional, org key applied to the resource. + string org = 4; +} + +// The status of the named entity is used to control its visibility in the UI. +enum NamedEntityState { + // By default, all named entities are considered active and under development. + NAMED_ENTITY_ACTIVE = 0; + + // Archived named entities are no longer visible in the UI. + NAMED_ENTITY_ARCHIVED = 1; + + // System generated entities that aren't explicitly created or managed by a user. + SYSTEM_GENERATED = 2; +} + +// Additional metadata around a named entity. +message NamedEntityMetadata { + // Common description across all versions of the entity + // +optional + string description = 1; + + // Shared state across all version of the entity + // At this point in time, only workflow entities can have their state archived. + NamedEntityState state = 2; +} + +// Encapsulates information common to a NamedEntity, a Flyte resource such as a task, +// workflow or launch plan. A NamedEntity is exclusively identified by its resource type +// and identifier. +message NamedEntity { + // Resource type of the named entity. One of Task, Workflow or LaunchPlan. + flyteidl.core.ResourceType resource_type = 1; + NamedEntityIdentifier id = 2; + + // Additional metadata around a named entity. + NamedEntityMetadata metadata = 3; +} + +// Specifies sort ordering in a list request. +message Sort { + enum Direction { + + // By default, fields are sorted in descending order. + DESCENDING = 0; + ASCENDING = 1; + } + // Indicates an attribute to sort the response values. + // +required + string key = 1; + + // Indicates the direction to apply sort key for response values. + // +optional + Direction direction = 2; +} + +// Represents a request structure to list NamedEntityIdentifiers. +message NamedEntityIdentifierListRequest { + // Name of the project that contains the identifiers. + // +required + string project = 1; + + // Name of the domain the identifiers belongs to within the project. + // +required + string domain = 2; + + // Indicates the number of resources to be returned. + // +required + uint32 limit = 3; + + // In the case of multiple pages of results, the server-provided token can be used to fetch the next page + // in a query. + // +optional + string token = 4; + + // Specifies how listed entities should be sorted in the response. + // +optional + Sort sort_by = 5; + + // Indicates a list of filters passed as string. + // +optional + string filters = 6; + + // Optional, org key applied to the resource. + string org = 7; +} + +// Represents a request structure to list NamedEntity objects +message NamedEntityListRequest { + // Resource type of the metadata to query. One of Task, Workflow or LaunchPlan. + // +required + flyteidl.core.ResourceType resource_type = 1; + // Name of the project that contains the identifiers. + // +required + string project = 2; + // Name of the domain the identifiers belongs to within the project. + string domain = 3; + // Indicates the number of resources to be returned. + uint32 limit = 4; + // In the case of multiple pages of results, the server-provided token can be used to fetch the next page + // in a query. + // +optional + string token = 5; + + // Specifies how listed entities should be sorted in the response. + // +optional + Sort sort_by = 6; + + // Indicates a list of filters passed as string. + // +optional + string filters = 7; + + // Optional, org key applied to the resource. + string org = 8; +} + +// Represents a list of NamedEntityIdentifiers. +message NamedEntityIdentifierList { + // A list of identifiers. + repeated NamedEntityIdentifier entities = 1; + + // In the case of multiple pages of results, the server-provided token can be used to fetch the next page + // in a query. If there are no more results, this value will be empty. + string token = 2; +} + +// Represents a list of NamedEntityIdentifiers. +message NamedEntityList { + // A list of NamedEntity objects + repeated NamedEntity entities = 1; + + // In the case of multiple pages of results, the server-provided token can be used to fetch the next page + // in a query. If there are no more results, this value will be empty. + string token = 2; +} + +// A request to retrieve the metadata associated with a NamedEntityIdentifier +message NamedEntityGetRequest { + // Resource type of the metadata to get. One of Task, Workflow or LaunchPlan. + // +required + flyteidl.core.ResourceType resource_type = 1; + + // The identifier for the named entity for which to fetch metadata. + // +required + NamedEntityIdentifier id = 2; +} + +// Request to set the referenced named entity state to the configured value. +message NamedEntityUpdateRequest { + // Resource type of the metadata to update + // +required + flyteidl.core.ResourceType resource_type = 1; + + // Identifier of the metadata to update + // +required + NamedEntityIdentifier id = 2; + + // Metadata object to set as the new value + // +required + NamedEntityMetadata metadata = 3; +} + +// Purposefully empty, may be populated in the future. +message NamedEntityUpdateResponse { +} + +// Shared request structure to fetch a single resource. +// Resources include: Task, Workflow, LaunchPlan +message ObjectGetRequest { + // Indicates a unique version of resource. + // +required + core.Identifier id = 1; +} + +// Shared request structure to retrieve a list of resources. +// Resources include: Task, Workflow, LaunchPlan +message ResourceListRequest { + // id represents the unique identifier of the resource. + // +required + NamedEntityIdentifier id = 1; + + // Indicates the number of resources to be returned. + // +required + uint32 limit = 2; + + // In the case of multiple pages of results, this server-provided token can be used to fetch the next page + // in a query. + // +optional + string token = 3; + + // Indicates a list of filters passed as string. + // More info on constructing filters : + // +optional + string filters = 4; + + // Sort ordering. + // +optional + Sort sort_by = 5; +} + +// Defines an email notification specification. +message EmailNotification { + // The list of email addresses recipients for this notification. + // +required + repeated string recipients_email = 1; +} + +// Defines a pager duty notification specification. +message PagerDutyNotification { + // Currently, PagerDuty notifications leverage email to trigger a notification. + // +required + repeated string recipients_email = 1; +} + +// Defines a slack notification specification. +message SlackNotification { + // Currently, Slack notifications leverage email to trigger a notification. + // +required + repeated string recipients_email = 1; +} + +// Represents a structure for notifications based on execution status. +// The notification content is configured within flyte admin but can be templatized. +// Future iterations could expose configuring notifications with custom content. +message Notification { + // A list of phases to which users can associate the notifications to. + // +required + repeated core.WorkflowExecution.Phase phases = 1; + + // The type of notification to trigger. + // +required + oneof type { + EmailNotification email = 2; + PagerDutyNotification pager_duty = 3; + SlackNotification slack = 4; + } + +} + +// Represents a string url and associated metadata used throughout the platform. +message UrlBlob { + option deprecated = true; + + // Actual url value. + string url = 1; + + // Represents the size of the file accessible at the above url. + int64 bytes = 2; +} + +// Label values to be applied to an execution resource. +// In the future a mode (e.g. OVERRIDE, APPEND, etc) can be defined +// to specify how to merge labels defined at registration and execution time. +message Labels { + // Map of custom labels to be applied to the execution resource. + map values = 1; +} + +// Annotation values to be applied to an execution resource. +// In the future a mode (e.g. OVERRIDE, APPEND, etc) can be defined +// to specify how to merge annotations defined at registration and execution time. +message Annotations { + // Map of custom annotations to be applied to the execution resource. + map values = 1; +} + +// Environment variable values to be applied to an execution resource. +// In the future a mode (e.g. OVERRIDE, APPEND, etc) can be defined +// to specify how to merge environment variables defined at registration and execution time. +message Envs { + // Map of custom environment variables to be applied to the execution resource. + repeated flyteidl.core.KeyValuePair values = 1; +} + +// Defines permissions associated with executions created by this launch plan spec. +// Use either of these roles when they have permissions required by your workflow execution. +// Deprecated. +message AuthRole { + option deprecated = true; + + // Defines an optional iam role which will be used for tasks run in executions created with this launch plan. + string assumable_iam_role = 1; + + // Defines an optional kubernetes service account which will be used for tasks run in executions created with this launch plan. + string kubernetes_service_account = 2; +} + + +// Encapsulates user settings pertaining to offloaded data (i.e. Blobs, Schema, query data, etc.). +// See https://github.com/flyteorg/flyte/issues/211 for more background information. +message RawOutputDataConfig { + // Prefix for where offloaded data from user workflows will be written + // e.g. s3://bucket/key or s3://bucket/ + string output_location_prefix = 1; +} + +// These URLs are returned as part of node and task execution data requests. +message FlyteURLs { + string inputs = 1; + string outputs = 2; + string deck = 3; +} diff --git a/docs/api/flyteidl/flyteidl/admin/description_entity.proto b/docs/api/flyteidl/flyteidl/admin/description_entity.proto new file mode 100644 index 0000000000..055ca0f4b6 --- /dev/null +++ b/docs/api/flyteidl/flyteidl/admin/description_entity.proto @@ -0,0 +1,95 @@ +syntax = "proto3"; + +package flyteidl.admin; +option go_package = "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/admin"; + +import "flyteidl/core/identifier.proto"; +import "flyteidl/admin/common.proto"; + +// DescriptionEntity contains detailed description for the task/workflow. +// Documentation could provide insight into the algorithms, business use case, etc. +message DescriptionEntity { + // id represents the unique identifier of the description entity. + core.Identifier id = 1; + // One-liner overview of the entity. + string short_description = 2; + // Full user description with formatting preserved. + Description long_description = 3; + // Optional link to source code used to define this entity. + SourceCode source_code = 4; + // User-specified tags. These are arbitrary and can be used for searching + // filtering and discovering tasks. + repeated string tags = 5; +} + +// The format of the long description +enum DescriptionFormat { + DESCRIPTION_FORMAT_UNKNOWN = 0; + DESCRIPTION_FORMAT_MARKDOWN = 1; + DESCRIPTION_FORMAT_HTML = 2; + // python default documentation - comments is rst + DESCRIPTION_FORMAT_RST = 3; +} + +// Full user description with formatting preserved. This can be rendered +// by clients, such as the console or command line tools with in-tact +// formatting. +message Description { + oneof content { + // long description - no more than 4KB + string value = 1; + // if the description sizes exceed some threshold we can offload the entire + // description proto altogether to an external data store, like S3 rather than store inline in the db + string uri = 2; + } + + // Format of the long description + DescriptionFormat format = 3; + // Optional link to an icon for the entity + string icon_link = 4; +} + +// Link to source code used to define this entity +message SourceCode { + string link = 1; +} + +// Represents a list of DescriptionEntities returned from the admin. +// See :ref:`ref_flyteidl.admin.DescriptionEntity` for more details +message DescriptionEntityList { + // A list of DescriptionEntities returned based on the request. + repeated DescriptionEntity descriptionEntities = 1; + + // In the case of multiple pages of results, the server-provided token can be used to fetch the next page + // in a query. If there are no more results, this value will be empty. + string token = 2; +} + +// Represents a request structure to retrieve a list of DescriptionEntities. +// See :ref:`ref_flyteidl.admin.DescriptionEntity` for more details +message DescriptionEntityListRequest { + // Identifies the specific type of resource that this identifier corresponds to. + flyteidl.core.ResourceType resource_type = 1; + + // The identifier for the description entity. + // +required + NamedEntityIdentifier id = 2; + + // Indicates the number of resources to be returned. + // +required + uint32 limit = 3; + + // In the case of multiple pages of results, the server-provided token can be used to fetch the next page + // in a query. + // +optional + string token = 4; + + // Indicates a list of filters passed as string. + // More info on constructing filters : + // +optional + string filters = 5; + + // Sort ordering for returned list. + // +optional + Sort sort_by = 6; +} diff --git a/docs/api/flyteidl/flyteidl/admin/event.proto b/docs/api/flyteidl/flyteidl/admin/event.proto new file mode 100644 index 0000000000..c1eea1e045 --- /dev/null +++ b/docs/api/flyteidl/flyteidl/admin/event.proto @@ -0,0 +1,70 @@ +syntax = "proto3"; + +package flyteidl.admin; +option go_package = "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/admin"; + +import "flyteidl/event/event.proto"; + +// Indicates that a sent event was not used to update execution state due to +// the referenced execution already being terminated (and therefore ineligible +// for further state transitions). +message EventErrorAlreadyInTerminalState { + // +required + string current_phase = 1; +} + +// Indicates an event was rejected because it came from a different cluster than +// is on record as running the execution. +message EventErrorIncompatibleCluster { + // The cluster which has been recorded as processing the execution. + // +required + string cluster = 1; +} + +// Indicates why a sent event was not used to update execution. +message EventFailureReason { + // +required + oneof reason { + EventErrorAlreadyInTerminalState already_in_terminal_state = 1; + EventErrorIncompatibleCluster incompatible_cluster = 2; + } +} + +// Request to send a notification that a workflow execution event has occurred. +message WorkflowExecutionEventRequest { + // Unique ID for this request that can be traced between services + string request_id = 1; + + // Details about the event that occurred. + event.WorkflowExecutionEvent event = 2; +} + +message WorkflowExecutionEventResponse { + // Purposefully empty, may be populated in the future. +} + +// Request to send a notification that a node execution event has occurred. +message NodeExecutionEventRequest { + // Unique ID for this request that can be traced between services + string request_id = 1; + + // Details about the event that occurred. + event.NodeExecutionEvent event = 2; +} + +message NodeExecutionEventResponse { + // Purposefully empty, may be populated in the future. +} + +// Request to send a notification that a task execution event has occurred. +message TaskExecutionEventRequest { + // Unique ID for this request that can be traced between services + string request_id = 1; + + // Details about the event that occurred. + event.TaskExecutionEvent event = 2; +} + +message TaskExecutionEventResponse { + // Purposefully empty, may be populated in the future. +} diff --git a/docs/api/flyteidl/flyteidl/admin/execution.proto b/docs/api/flyteidl/flyteidl/admin/execution.proto new file mode 100644 index 0000000000..6197576bd9 --- /dev/null +++ b/docs/api/flyteidl/flyteidl/admin/execution.proto @@ -0,0 +1,428 @@ +syntax = "proto3"; + +package flyteidl.admin; +option go_package = "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/admin"; + +import "flyteidl/admin/cluster_assignment.proto"; +import "flyteidl/admin/common.proto"; +import "flyteidl/core/literals.proto"; +import "flyteidl/core/execution.proto"; +import "flyteidl/core/execution_envs.proto"; +import "flyteidl/core/artifact_id.proto"; +import "flyteidl/core/identifier.proto"; +import "flyteidl/core/metrics.proto"; +import "flyteidl/core/security.proto"; +import "google/protobuf/duration.proto"; +import "google/protobuf/timestamp.proto"; +import "google/protobuf/wrappers.proto"; +import "flyteidl/admin/matchable_resource.proto"; + +// Request to launch an execution with the given project, domain and optionally-assigned name. +message ExecutionCreateRequest { + // Name of the project the execution belongs to. + // +required + string project = 1; + + // Name of the domain the execution belongs to. + // A domain can be considered as a subset within a specific project. + // +required + string domain = 2; + + // User provided value for the resource. + // If none is provided the system will generate a unique string. + // +optional + string name = 3; + + // Additional fields necessary to launch the execution. + // +optional + ExecutionSpec spec = 4; + + // The inputs required to start the execution. All required inputs must be + // included in this map. If not required and not provided, defaults apply. + // +optional + core.LiteralMap inputs = 5; + + // Optional, org key applied to the resource. + string org = 6; +} + +// Request to relaunch the referenced execution. +message ExecutionRelaunchRequest { + // Identifier of the workflow execution to relaunch. + // +required + core.WorkflowExecutionIdentifier id = 1; + + // Deprecated field, do not use. + reserved 2; + + // User provided value for the relaunched execution. + // If none is provided the system will generate a unique string. + // +optional + string name = 3; + + // Allows for all cached values of a workflow and its tasks to be overwritten for a single execution. + // If enabled, all calculations are performed even if cached results would be available, overwriting the stored + // data once execution finishes successfully. + bool overwrite_cache = 4; +} + +// Request to recover the referenced execution. +message ExecutionRecoverRequest { + // Identifier of the workflow execution to recover. + core.WorkflowExecutionIdentifier id = 1; + + // User provided value for the recovered execution. + // If none is provided the system will generate a unique string. + // +optional + string name = 2; + + // Additional metadata which will be used to overwrite any metadata in the reference execution when triggering a recovery execution. + ExecutionMetadata metadata = 3; +} + +// The unique identifier for a successfully created execution. +// If the name was *not* specified in the create request, this identifier will include a generated name. +message ExecutionCreateResponse { + core.WorkflowExecutionIdentifier id = 1; +} + +// A message used to fetch a single workflow execution entity. +// See :ref:`ref_flyteidl.admin.Execution` for more details +message WorkflowExecutionGetRequest { + // Uniquely identifies an individual workflow execution. + core.WorkflowExecutionIdentifier id = 1; +} + +// A workflow execution represents an instantiated workflow, including all inputs and additional +// metadata as well as computed results included state, outputs, and duration-based attributes. +// Used as a response object used in Get and List execution requests. +message Execution { + // Unique identifier of the workflow execution. + core.WorkflowExecutionIdentifier id = 1; + + // User-provided configuration and inputs for launching the execution. + ExecutionSpec spec = 2; + + // Execution results. + ExecutionClosure closure = 3; +} + +// Used as a response for request to list executions. +// See :ref:`ref_flyteidl.admin.Execution` for more details +message ExecutionList { + repeated Execution executions = 1; + + // In the case of multiple pages of results, the server-provided token can be used to fetch the next page + // in a query. If there are no more results, this value will be empty. + string token = 2; +} + +// Input/output data can represented by actual values or a link to where values are stored +message LiteralMapBlob { + oneof data { + // Data in LiteralMap format + core.LiteralMap values = 1 [deprecated = true]; + + // In the event that the map is too large, we return a uri to the data + string uri = 2; + } +} + +// Specifies metadata around an aborted workflow execution. +message AbortMetadata { + // In the case of a user-specified abort, this will pass along the user-supplied cause. + string cause = 1; + + // Identifies the entity (if any) responsible for terminating the execution + string principal = 2; +} + +// Encapsulates the results of the Execution +message ExecutionClosure { + // A result produced by a terminated execution. + // A pending (non-terminal) execution will not have any output result. + oneof output_result { + // Output URI in the case of a successful execution. + // DEPRECATED. Use GetExecutionData to fetch output data instead. + LiteralMapBlob outputs = 1 [deprecated = true]; + + // Error information in the case of a failed execution. + core.ExecutionError error = 2; + + // In the case of a user-specified abort, this will pass along the user-supplied cause. + string abort_cause = 10 [deprecated = true]; + + // In the case of a user-specified abort, this will pass along the user and their supplied cause. + AbortMetadata abort_metadata = 12; + + // Raw output data produced by this execution. + // DEPRECATED. Use GetExecutionData to fetch output data instead. + core.LiteralMap output_data = 13 [deprecated = true]; + } + + // Inputs computed and passed for execution. + // computed_inputs depends on inputs in ExecutionSpec, fixed and default inputs in launch plan + core.LiteralMap computed_inputs = 3 [deprecated = true]; + + // Most recent recorded phase for the execution. + core.WorkflowExecution.Phase phase = 4; + + // Reported time at which the execution began running. + google.protobuf.Timestamp started_at = 5; + + // The amount of time the execution spent running. + google.protobuf.Duration duration = 6; + + // Reported time at which the execution was created. + google.protobuf.Timestamp created_at = 7; + + // Reported time at which the execution was last updated. + google.protobuf.Timestamp updated_at = 8; + + // The notification settings to use after merging the CreateExecutionRequest and the launch plan + // notification settings. An execution launched with notifications will always prefer that definition + // to notifications defined statically in a launch plan. + repeated Notification notifications = 9; + + // Identifies the workflow definition for this execution. + core.Identifier workflow_id = 11; + + // Provides the details of the last stage change + ExecutionStateChangeDetails state_change_details = 14; +} + +// Represents system, rather than user-facing, metadata about an execution. +message SystemMetadata { + + // Which execution cluster this execution ran on. + string execution_cluster = 1; + + // Which kubernetes namespace the execution ran under. + string namespace = 2; +} + +// Represents attributes about an execution which are not required to launch the execution but are useful to record. +// These attributes are assigned at launch time and do not change. +message ExecutionMetadata { + // The method by which this execution was launched. + enum ExecutionMode { + // The default execution mode, MANUAL implies that an execution was launched by an individual. + MANUAL = 0; + + // A schedule triggered this execution launch. + SCHEDULED = 1; + + // A system process was responsible for launching this execution rather an individual. + SYSTEM = 2; + + // This execution was launched with identical inputs as a previous execution. + RELAUNCH = 3; + + // This execution was triggered by another execution. + CHILD_WORKFLOW = 4; + + // This execution was recovered from another execution. + RECOVERED = 5; + + // Execution was kicked off by the artifact trigger system + TRIGGER = 6; + } + ExecutionMode mode = 1; + + // Identifier of the entity that triggered this execution. + // For systems using back-end authentication any value set here will be discarded in favor of the + // authenticated user context. + string principal = 2; + + // Indicates the nestedness of this execution. + // If a user launches a workflow execution, the default nesting is 0. + // If this execution further launches a workflow (child workflow), the nesting level is incremented by 0 => 1 + // Generally, if workflow at nesting level k launches a workflow then the child workflow will have + // nesting = k + 1. + uint32 nesting = 3; + + // For scheduled executions, the requested time for execution for this specific schedule invocation. + google.protobuf.Timestamp scheduled_at = 4; + + // Which subworkflow node (if any) launched this execution + core.NodeExecutionIdentifier parent_node_execution = 5; + + // Optional, a reference workflow execution related to this execution. + // In the case of a relaunch, this references the original workflow execution. + core.WorkflowExecutionIdentifier reference_execution = 16; + + // Optional, platform-specific metadata about the execution. + // In this the future this may be gated behind an ACL or some sort of authorization. + SystemMetadata system_metadata = 17; + + // Save a list of the artifacts used in this execution for now. This is a list only rather than a mapping + // since we don't have a structure to handle nested ones anyways. + repeated core.ArtifactID artifact_ids = 18; +} + +message NotificationList { + repeated Notification notifications = 1; +} + +// An ExecutionSpec encompasses all data used to launch this execution. The Spec does not change over the lifetime +// of an execution as it progresses across phase changes. +message ExecutionSpec { + // Launch plan to be executed + core.Identifier launch_plan = 1; + + // Input values to be passed for the execution + core.LiteralMap inputs = 2 [deprecated = true]; + + // Metadata for the execution + ExecutionMetadata metadata = 3; + + // This field is deprecated. Do not use. + reserved 4; + + oneof notification_overrides { + // List of notifications based on Execution status transitions + // When this list is not empty it is used rather than any notifications defined in the referenced launch plan. + // When this list is empty, the notifications defined for the launch plan will be applied. + NotificationList notifications = 5; + + // This should be set to true if all notifications are intended to be disabled for this execution. + bool disable_all = 6; + } + + // Labels to apply to the execution resource. + Labels labels = 7; + + // Annotations to apply to the execution resource. + Annotations annotations = 8; + + // Optional: security context override to apply this execution. + core.SecurityContext security_context = 10; + + // Optional: auth override to apply this execution. + AuthRole auth_role = 16 [deprecated = true]; + + // Indicates the runtime priority of the execution. + core.QualityOfService quality_of_service = 17; + + // Controls the maximum number of task nodes that can be run in parallel for the entire workflow. + // This is useful to achieve fairness. Note: MapTasks are regarded as one unit, + // and parallelism/concurrency of MapTasks is independent from this. + int32 max_parallelism = 18; + + // User setting to configure where to store offloaded data (i.e. Blobs, structured datasets, query data, etc.). + // This should be a prefix like s3://my-bucket/my-data + RawOutputDataConfig raw_output_data_config = 19; + + // Controls how to select an available cluster on which this execution should run. + ClusterAssignment cluster_assignment = 20; + + // Allows for the interruptible flag of a workflow to be overwritten for a single execution. + // Omitting this field uses the workflow's value as a default. + // As we need to distinguish between the field not being provided and its default value false, we have to use a wrapper + // around the bool field. + google.protobuf.BoolValue interruptible = 21; + + // Allows for all cached values of a workflow and its tasks to be overwritten for a single execution. + // If enabled, all calculations are performed even if cached results would be available, overwriting the stored + // data once execution finishes successfully. + bool overwrite_cache = 22; + + // Environment variables to be set for the execution. + Envs envs = 23; + + // Tags to be set for the execution. + repeated string tags = 24 [deprecated = true]; + + // Execution cluster label to be set for the execution. + ExecutionClusterLabel execution_cluster_label = 25; + + // Execution environment assignments to be set for the execution. + repeated core.ExecutionEnvAssignment execution_env_assignments = 26; +} + +// Request to terminate an in-progress execution. This action is irreversible. +// If an execution is already terminated, this request will simply be a no-op. +// This request will fail if it references a non-existent execution. +// If the request succeeds the phase "ABORTED" will be recorded for the termination +// with the optional cause added to the output_result. +message ExecutionTerminateRequest { + // Uniquely identifies the individual workflow execution to be terminated. + core.WorkflowExecutionIdentifier id = 1; + + // Optional reason for aborting. + string cause = 2; +} + +message ExecutionTerminateResponse { + // Purposefully empty, may be populated in the future. +} + +// Request structure to fetch inputs, output and other data produced by an execution. +// By default this data is not returned inline in :ref:`ref_flyteidl.admin.WorkflowExecutionGetRequest` +message WorkflowExecutionGetDataRequest { + // The identifier of the execution for which to fetch inputs and outputs. + core.WorkflowExecutionIdentifier id = 1; +} + +// Response structure for WorkflowExecutionGetDataRequest which contains inputs and outputs for an execution. +message WorkflowExecutionGetDataResponse { + // Signed url to fetch a core.LiteralMap of execution outputs. + // Deprecated: Please use full_outputs instead. + UrlBlob outputs = 1 [deprecated = true]; + + // Signed url to fetch a core.LiteralMap of execution inputs. + // Deprecated: Please use full_inputs instead. + UrlBlob inputs = 2 [deprecated = true]; + + // Full_inputs will only be populated if they are under a configured size threshold. + core.LiteralMap full_inputs = 3; + + // Full_outputs will only be populated if they are under a configured size threshold. + core.LiteralMap full_outputs = 4; +} + +// The state of the execution is used to control its visibility in the UI/CLI. +enum ExecutionState { + // By default, all executions are considered active. + EXECUTION_ACTIVE = 0; + + // Archived executions are no longer visible in the UI. + EXECUTION_ARCHIVED = 1; +} + +message ExecutionUpdateRequest { + // Identifier of the execution to update + core.WorkflowExecutionIdentifier id = 1; + + // State to set as the new value active/archive + ExecutionState state = 2; +} + +message ExecutionStateChangeDetails { + // The state of the execution is used to control its visibility in the UI/CLI. + ExecutionState state = 1; + + // This timestamp represents when the state changed. + google.protobuf.Timestamp occurred_at = 2; + + // Identifies the entity (if any) responsible for causing the state change of the execution + string principal = 3; +} + +message ExecutionUpdateResponse {} + +// WorkflowExecutionGetMetricsRequest represents a request to retrieve metrics for the specified workflow execution. +message WorkflowExecutionGetMetricsRequest { + // id defines the workflow execution to query for. + core.WorkflowExecutionIdentifier id = 1; + + // depth defines the number of Flyte entity levels to traverse when breaking down execution details. + int32 depth = 2; +} + +// WorkflowExecutionGetMetricsResponse represents the response containing metrics for the specified workflow execution. +message WorkflowExecutionGetMetricsResponse { + // Span defines the top-level breakdown of the workflows execution. More precise information is nested in a + // hierarchical structure using Flyte entity references. + core.Span span = 1; +} diff --git a/docs/api/flyteidl/flyteidl/admin/launch_plan.proto b/docs/api/flyteidl/flyteidl/admin/launch_plan.proto new file mode 100644 index 0000000000..4be8dedb91 --- /dev/null +++ b/docs/api/flyteidl/flyteidl/admin/launch_plan.proto @@ -0,0 +1,226 @@ +syntax = "proto3"; + +package flyteidl.admin; +option go_package = "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/admin"; + +import "flyteidl/core/execution.proto"; +import "flyteidl/core/execution_envs.proto"; +import "flyteidl/core/literals.proto"; +import "flyteidl/core/identifier.proto"; +import "flyteidl/core/interface.proto"; +import "flyteidl/core/security.proto"; +import "flyteidl/admin/schedule.proto"; +import "flyteidl/admin/common.proto"; +import "google/protobuf/any.proto"; +import "google/protobuf/timestamp.proto"; +import "google/protobuf/wrappers.proto"; + + +// Request to register a launch plan. The included LaunchPlanSpec may have a complete or incomplete set of inputs required +// to launch a workflow execution. By default all launch plans are registered in state INACTIVE. If you wish to +// set the state to ACTIVE, you must submit a LaunchPlanUpdateRequest, after you have successfully created a launch plan. +message LaunchPlanCreateRequest { + // Uniquely identifies a launch plan entity. + core.Identifier id = 1; + + // User-provided launch plan details, including reference workflow, inputs and other metadata. + LaunchPlanSpec spec = 2; +} + +message LaunchPlanCreateResponse { + // Purposefully empty, may be populated in the future. +} + +// By default any launch plan regardless of state can be used to launch a workflow execution. +// However, at most one version of a launch plan +// (e.g. a NamedEntityIdentifier set of shared project, domain and name values) can be +// active at a time in regards to *schedules*. That is, at most one schedule in a NamedEntityIdentifier +// group will be observed and trigger executions at a defined cadence. +enum LaunchPlanState { + INACTIVE = 0; + ACTIVE = 1; +} + +// A LaunchPlan provides the capability to templatize workflow executions. +// Launch plans simplify associating one or more schedules, inputs and notifications with your workflows. +// Launch plans can be shared and used to trigger executions with predefined inputs even when a workflow +// definition doesn't necessarily have a default value for said input. +message LaunchPlan { + // Uniquely identifies a launch plan entity. + core.Identifier id = 1; + + // User-provided launch plan details, including reference workflow, inputs and other metadata. + LaunchPlanSpec spec = 2; + + // Values computed by the flyte platform after launch plan registration. + LaunchPlanClosure closure = 3; +} + +// Response object for list launch plan requests. +// See :ref:`ref_flyteidl.admin.LaunchPlan` for more details +message LaunchPlanList { + repeated LaunchPlan launch_plans = 1; + + // In the case of multiple pages of results, the server-provided token can be used to fetch the next page + // in a query. If there are no more results, this value will be empty. + string token = 2; +} + +// Defines permissions associated with executions created by this launch plan spec. +// Use either of these roles when they have permissions required by your workflow execution. +// Deprecated. +message Auth { + option deprecated = true; + + // Defines an optional iam role which will be used for tasks run in executions created with this launch plan. + string assumable_iam_role = 1; + + // Defines an optional kubernetes service account which will be used for tasks run in executions created with this launch plan. + string kubernetes_service_account = 2; +} + +// User-provided launch plan definition and configuration values. +message LaunchPlanSpec { + // Reference to the Workflow template that the launch plan references + core.Identifier workflow_id = 1; + + // Metadata for the Launch Plan + LaunchPlanMetadata entity_metadata = 2; + + // Input values to be passed for the execution. + // These can be overridden when an execution is created with this launch plan. + core.ParameterMap default_inputs = 3; + + // Fixed, non-overridable inputs for the Launch Plan. + // These can not be overridden when an execution is created with this launch plan. + core.LiteralMap fixed_inputs = 4; + + // String to indicate the role to use to execute the workflow underneath + string role = 5 [deprecated = true]; + + // Custom labels to be applied to the execution resource. + Labels labels = 6; + + // Custom annotations to be applied to the execution resource. + Annotations annotations = 7; + + // Indicates the permission associated with workflow executions triggered with this launch plan. + Auth auth = 8 [deprecated = true]; + + AuthRole auth_role = 9 [deprecated = true]; + + // Indicates security context for permissions triggered with this launch plan + core.SecurityContext security_context = 10; + + // Indicates the runtime priority of the execution. + core.QualityOfService quality_of_service = 16; + + // Encapsulates user settings pertaining to offloaded data (i.e. Blobs, Schema, query data, etc.). + RawOutputDataConfig raw_output_data_config = 17; + + // Controls the maximum number of tasknodes that can be run in parallel for the entire workflow. + // This is useful to achieve fairness. Note: MapTasks are regarded as one unit, + // and parallelism/concurrency of MapTasks is independent from this. + int32 max_parallelism = 18; + + // Allows for the interruptible flag of a workflow to be overwritten for a single execution. + // Omitting this field uses the workflow's value as a default. + // As we need to distinguish between the field not being provided and its default value false, we have to use a wrapper + // around the bool field. + google.protobuf.BoolValue interruptible = 19; + + // Allows for all cached values of a workflow and its tasks to be overwritten for a single execution. + // If enabled, all calculations are performed even if cached results would be available, overwriting the stored + // data once execution finishes successfully. + bool overwrite_cache = 20; + + // Environment variables to be set for the execution. + Envs envs = 21; + + // Execution environment assignments to be set for the execution. + repeated core.ExecutionEnvAssignment execution_env_assignments = 22; +} + +// Values computed by the flyte platform after launch plan registration. +// These include expected_inputs required to be present in a CreateExecutionRequest +// to launch the reference workflow as well timestamp values associated with the launch plan. +message LaunchPlanClosure { + // Indicate the Launch plan state. + LaunchPlanState state = 1; + + // Indicates the set of inputs expected when creating an execution with the Launch plan + core.ParameterMap expected_inputs = 2; + + // Indicates the set of outputs expected to be produced by creating an execution with the Launch plan + core.VariableMap expected_outputs = 3; + + // Time at which the launch plan was created. + google.protobuf.Timestamp created_at = 4; + + // Time at which the launch plan was last updated. + google.protobuf.Timestamp updated_at = 5; +} + +// Additional launch plan attributes included in the LaunchPlanSpec not strictly required to launch +// the reference workflow. +message LaunchPlanMetadata { + // Schedule to execute the Launch Plan + Schedule schedule = 1; + + // List of notifications based on Execution status transitions + repeated Notification notifications = 2; + + // Additional metadata for how to launch the launch plan + google.protobuf.Any launch_conditions = 3; +} + +// Request to set the referenced launch plan state to the configured value. +// See :ref:`ref_flyteidl.admin.LaunchPlan` for more details +message LaunchPlanUpdateRequest { + // Identifier of launch plan for which to change state. + // +required. + core.Identifier id = 1; + + // Desired state to apply to the launch plan. + // +required. + LaunchPlanState state = 2; +} + +// Purposefully empty, may be populated in the future. +message LaunchPlanUpdateResponse { +} + +// Represents a request struct for finding an active launch plan for a given NamedEntityIdentifier +// See :ref:`ref_flyteidl.admin.LaunchPlan` for more details +message ActiveLaunchPlanRequest { + // +required. + NamedEntityIdentifier id = 1; +} + +// Represents a request structure to list active launch plans within a project/domain and optional org. +// See :ref:`ref_flyteidl.admin.LaunchPlan` for more details +message ActiveLaunchPlanListRequest { + // Name of the project that contains the identifiers. + // +required. + string project = 1; + + // Name of the domain the identifiers belongs to within the project. + // +required. + string domain = 2; + + // Indicates the number of resources to be returned. + // +required. + uint32 limit = 3; + + // In the case of multiple pages of results, the server-provided token can be used to fetch the next page + // in a query. + // +optional + string token = 4; + + // Sort ordering. + // +optional + Sort sort_by = 5; + + // Optional, org key applied to the resource. + string org = 6; +} diff --git a/docs/api/flyteidl/flyteidl/admin/matchable_resource.proto b/docs/api/flyteidl/flyteidl/admin/matchable_resource.proto new file mode 100644 index 0000000000..812d75fe4b --- /dev/null +++ b/docs/api/flyteidl/flyteidl/admin/matchable_resource.proto @@ -0,0 +1,194 @@ +syntax = "proto3"; + +package flyteidl.admin; +option go_package = "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/admin"; + +import "flyteidl/admin/common.proto"; +import "flyteidl/admin/cluster_assignment.proto"; +import "flyteidl/core/execution.proto"; +import "flyteidl/core/execution_envs.proto"; +import "flyteidl/core/security.proto"; +import "google/protobuf/wrappers.proto"; + +// Defines a resource that can be configured by customizable Project-, ProjectDomain- or WorkflowAttributes +// based on matching tags. +enum MatchableResource { + // Applies to customizable task resource requests and limits. + TASK_RESOURCE = 0; + + // Applies to configuring templated kubernetes cluster resources. + CLUSTER_RESOURCE = 1; + + // Configures task and dynamic task execution queue assignment. + EXECUTION_QUEUE = 2; + + // Configures the K8s cluster label to be used for execution to be run + EXECUTION_CLUSTER_LABEL = 3; + + // Configures default quality of service when undefined in an execution spec. + QUALITY_OF_SERVICE_SPECIFICATION = 4; + + // Selects configurable plugin implementation behavior for a given task type. + PLUGIN_OVERRIDE = 5; + + // Adds defaults for customizable workflow-execution specifications and overrides. + WORKFLOW_EXECUTION_CONFIG = 6; + + // Controls how to select an available cluster on which this execution should run. + CLUSTER_ASSIGNMENT = 7; +} + +// Defines a set of overridable task resource attributes set during task registration. +message TaskResourceSpec { + string cpu = 1; + + string gpu = 2; + + string memory = 3; + + string storage = 4; + + string ephemeral_storage = 5; +} + +// Defines task resource defaults and limits that will be applied at task registration. +message TaskResourceAttributes { + TaskResourceSpec defaults = 1; + + TaskResourceSpec limits = 2; +} + +message ClusterResourceAttributes { + // Custom resource attributes which will be applied in cluster resource creation (e.g. quotas). + // Map keys are the *case-sensitive* names of variables in templatized resource files. + // Map values should be the custom values which get substituted during resource creation. + map attributes = 1; +} + +message ExecutionQueueAttributes { + // Tags used for assigning execution queues for tasks defined within this project. + repeated string tags = 1; +} + +message ExecutionClusterLabel { + // Label value to determine where the execution will be run + string value = 1; +} + +// This MatchableAttribute configures selecting alternate plugin implementations for a given task type. +// In addition to an override implementation a selection of fallbacks can be provided or other modes +// for handling cases where the desired plugin override is not enabled in a given Flyte deployment. +message PluginOverride { + // A predefined yet extensible Task type identifier. + string task_type = 1; + + // A set of plugin ids which should handle tasks of this type instead of the default registered plugin. The list will be tried in order until a plugin is found with that id. + repeated string plugin_id = 2; + + enum MissingPluginBehavior { + // By default, if this plugin is not enabled for a Flyte deployment then execution will fail. + FAIL = 0; + + // Uses the system-configured default implementation. + USE_DEFAULT = 1; + } + + // Defines the behavior when no plugin from the plugin_id list is not found. + MissingPluginBehavior missing_plugin_behavior = 4; +} + + +message PluginOverrides { + repeated PluginOverride overrides = 1; +} + +// Adds defaults for customizable workflow-execution specifications and overrides. +message WorkflowExecutionConfig { + // Can be used to control the number of parallel nodes to run within the workflow. This is useful to achieve fairness. + int32 max_parallelism = 1; + + // Indicates security context permissions for executions triggered with this matchable attribute. + core.SecurityContext security_context = 2; + + // Encapsulates user settings pertaining to offloaded data (i.e. Blobs, Schema, query data, etc.). + RawOutputDataConfig raw_output_data_config = 3; + + // Custom labels to be applied to a triggered execution resource. + Labels labels = 4; + + // Custom annotations to be applied to a triggered execution resource. + Annotations annotations = 5; + + // Allows for the interruptible flag of a workflow to be overwritten for a single execution. + // Omitting this field uses the workflow's value as a default. + // As we need to distinguish between the field not being provided and its default value false, we have to use a wrapper + // around the bool field. + google.protobuf.BoolValue interruptible = 6; + + // Allows for all cached values of a workflow and its tasks to be overwritten for a single execution. + // If enabled, all calculations are performed even if cached results would be available, overwriting the stored + // data once execution finishes successfully. + bool overwrite_cache = 7; + + // Environment variables to be set for the execution. + Envs envs = 8; + + // Execution environment assignments to be set for the execution. + repeated core.ExecutionEnvAssignment execution_env_assignments = 9; +} + +// Generic container for encapsulating all types of the above attributes messages. +message MatchingAttributes { + oneof target { + TaskResourceAttributes task_resource_attributes = 1; + + ClusterResourceAttributes cluster_resource_attributes = 2; + + ExecutionQueueAttributes execution_queue_attributes = 3; + + ExecutionClusterLabel execution_cluster_label = 4; + + core.QualityOfService quality_of_service = 5; + + PluginOverrides plugin_overrides = 6; + + WorkflowExecutionConfig workflow_execution_config = 7; + + ClusterAssignment cluster_assignment = 8; + } +} + +// Represents a custom set of attributes applied for either a domain (and optional org); a domain and project (and optional org); +// or domain, project and workflow name (and optional org). +// These are used to override system level defaults for kubernetes cluster resource management, +// default execution values, and more all across different levels of specificity. +message MatchableAttributesConfiguration { + MatchingAttributes attributes = 1; + + string domain = 2; + + string project = 3; + + string workflow = 4; + + string launch_plan = 5; + + // Optional, org key applied to the resource. + string org = 6; +} + +// Request all matching resource attributes for a resource type. +// See :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for more details +message ListMatchableAttributesRequest { + // +required + MatchableResource resource_type = 1; + + // Optional, org filter applied to list project requests. + string org = 2; +} + +// Response for a request for all matching resource attributes for a resource type. +// See :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for more details +message ListMatchableAttributesResponse { + repeated MatchableAttributesConfiguration configurations = 1; +} diff --git a/docs/api/flyteidl/flyteidl/admin/node_execution.proto b/docs/api/flyteidl/flyteidl/admin/node_execution.proto new file mode 100644 index 0000000000..411201ea45 --- /dev/null +++ b/docs/api/flyteidl/flyteidl/admin/node_execution.proto @@ -0,0 +1,245 @@ +syntax = "proto3"; + +package flyteidl.admin; +option go_package = "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/admin"; + +import "flyteidl/admin/common.proto"; +import "flyteidl/core/execution.proto"; +import "flyteidl/core/catalog.proto"; +import "flyteidl/core/compiler.proto"; +import "flyteidl/core/identifier.proto"; +import "flyteidl/core/literals.proto"; +import "google/protobuf/timestamp.proto"; +import "google/protobuf/duration.proto"; + +// A message used to fetch a single node execution entity. +// See :ref:`ref_flyteidl.admin.NodeExecution` for more details +message NodeExecutionGetRequest { + + // Uniquely identifies an individual node execution. + // +required + core.NodeExecutionIdentifier id = 1; +} + +// Represents a request structure to retrieve a list of node execution entities. +// See :ref:`ref_flyteidl.admin.NodeExecution` for more details +message NodeExecutionListRequest { + // Indicates the workflow execution to filter by. + // +required + core.WorkflowExecutionIdentifier workflow_execution_id = 1; + + // Indicates the number of resources to be returned. + // +required + uint32 limit = 2; + + // In the case of multiple pages of results, the, server-provided token can be used to fetch the next page + // in a query. + // +optional + + string token = 3; + // Indicates a list of filters passed as string. + // More info on constructing filters : + // +optional + string filters = 4; + + // Sort ordering. + // +optional + Sort sort_by = 5; + + // Unique identifier of the parent node in the execution + // +optional + string unique_parent_id = 6; +} + +// Represents a request structure to retrieve a list of node execution entities launched by a specific task. +// This can arise when a task yields a subworkflow. +message NodeExecutionForTaskListRequest { + // Indicates the node execution to filter by. + // +required + core.TaskExecutionIdentifier task_execution_id = 1; + + // Indicates the number of resources to be returned. + // +required + uint32 limit = 2; + + // In the case of multiple pages of results, the, server-provided token can be used to fetch the next page + // in a query. + // +optional + string token = 3; + + // Indicates a list of filters passed as string. + // More info on constructing filters : + // +optional + string filters = 4; + + // Sort ordering. + // +optional + Sort sort_by = 5; +} + +// Encapsulates all details for a single node execution entity. +// A node represents a component in the overall workflow graph. A node launch a task, multiple tasks, an entire nested +// sub-workflow, or even a separate child-workflow execution. +// The same task can be called repeatedly in a single workflow but each node is unique. +message NodeExecution { + + // Uniquely identifies an individual node execution. + core.NodeExecutionIdentifier id = 1; + + // Path to remote data store where input blob is stored. + string input_uri = 2; + + // Computed results associated with this node execution. + NodeExecutionClosure closure = 3; + + // Metadata for Node Execution + NodeExecutionMetaData metadata = 4; +} + +// Represents additional attributes related to a Node Execution +message NodeExecutionMetaData { + // Node executions are grouped depending on retries of the parent + // Retry group is unique within the context of a parent node. + string retry_group = 1; + + // Boolean flag indicating if the node has child nodes under it + // This can be true when a node contains a dynamic workflow which then produces + // child nodes. + bool is_parent_node = 2; + + // Node id of the node in the original workflow + // This maps to value of WorkflowTemplate.nodes[X].id + string spec_node_id = 3; + + // Boolean flag indicating if the node has contains a dynamic workflow which then produces child nodes. + // This is to distinguish between subworkflows and dynamic workflows which can both have is_parent_node as true. + bool is_dynamic = 4; + + // Boolean flag indicating if the node is an array node. This is intended to uniquely identify + // array nodes from other nodes which can have is_parent_node as true. + bool is_array = 5; +} + +// Request structure to retrieve a list of node execution entities. +// See :ref:`ref_flyteidl.admin.NodeExecution` for more details +message NodeExecutionList { + repeated NodeExecution node_executions = 1; + + // In the case of multiple pages of results, the server-provided token can be used to fetch the next page + // in a query. If there are no more results, this value will be empty. + string token = 2; +} + +// Container for node execution details and results. +message NodeExecutionClosure { + // Only a node in a terminal state will have a non-empty output_result. + oneof output_result { + // Links to a remotely stored, serialized core.LiteralMap of node execution outputs. + // DEPRECATED. Use GetNodeExecutionData to fetch output data instead. + string output_uri = 1 [deprecated = true]; + + // Error information for the Node + core.ExecutionError error = 2; + + // Raw output data produced by this node execution. + // DEPRECATED. Use GetNodeExecutionData to fetch output data instead. + core.LiteralMap output_data = 10 [deprecated = true]; + } + + // The last recorded phase for this node execution. + core.NodeExecution.Phase phase = 3; + + // Time at which the node execution began running. + google.protobuf.Timestamp started_at = 4; + + // The amount of time the node execution spent running. + google.protobuf.Duration duration = 5; + + // Time at which the node execution was created. + google.protobuf.Timestamp created_at = 6; + + // Time at which the node execution was last updated. + google.protobuf.Timestamp updated_at = 7; + + // Store metadata for what the node launched. + // for ex: if this is a workflow node, we store information for the launched workflow. + oneof target_metadata { + WorkflowNodeMetadata workflow_node_metadata = 8; + TaskNodeMetadata task_node_metadata = 9; + } + + // String location uniquely identifying where the deck HTML file is. + // NativeUrl specifies the url in the format of the configured storage provider (e.g. s3://my-bucket/randomstring/suffix.tar) + string deck_uri = 11; + + // dynamic_job_spec_uri is the location of the DynamicJobSpec proto message for a DynamicWorkflow. This is required + // to correctly recover partially completed executions where the subworkflow has already been compiled. + string dynamic_job_spec_uri = 12; +} + +// Metadata for a WorkflowNode +message WorkflowNodeMetadata { + // The identifier for a workflow execution launched by a node. + core.WorkflowExecutionIdentifier executionId = 1; +} + +// Metadata for the case in which the node is a TaskNode +message TaskNodeMetadata { + // Captures the status of caching for this execution. + core.CatalogCacheStatus cache_status = 1; + // This structure carries the catalog artifact information + core.CatalogMetadata catalog_key = 2; + // The latest checkpoint location + string checkpoint_uri = 4; +} + +// For dynamic workflow nodes we capture information about the dynamic workflow definition that gets generated. +message DynamicWorkflowNodeMetadata { + // id represents the unique identifier of the workflow. + core.Identifier id = 1; + + // Represents the compiled representation of the embedded dynamic workflow. + core.CompiledWorkflowClosure compiled_workflow = 2; + + // dynamic_job_spec_uri is the location of the DynamicJobSpec proto message for this DynamicWorkflow. This is + // required to correctly recover partially completed executions where the subworkflow has already been compiled. + string dynamic_job_spec_uri = 3; +} + +// Request structure to fetch inputs and output for a node execution. +// By default, these are not returned in :ref:`ref_flyteidl.admin.NodeExecutionGetRequest` +message NodeExecutionGetDataRequest { + // The identifier of the node execution for which to fetch inputs and outputs. + core.NodeExecutionIdentifier id = 1; +} + +// Response structure for NodeExecutionGetDataRequest which contains inputs and outputs for a node execution. +message NodeExecutionGetDataResponse { + // Signed url to fetch a core.LiteralMap of node execution inputs. + // Deprecated: Please use full_inputs instead. + UrlBlob inputs = 1 [deprecated = true]; + + // Signed url to fetch a core.LiteralMap of node execution outputs. + // Deprecated: Please use full_outputs instead. + UrlBlob outputs = 2 [deprecated = true]; + + // Full_inputs will only be populated if they are under a configured size threshold. + core.LiteralMap full_inputs = 3; + + // Full_outputs will only be populated if they are under a configured size threshold. + core.LiteralMap full_outputs = 4; + + // Optional Workflow closure for a dynamically generated workflow, in the case this node yields a dynamic workflow we return its structure here. + DynamicWorkflowNodeMetadata dynamic_workflow = 16; + + FlyteURLs flyte_urls = 17; + +} + +message GetDynamicNodeWorkflowRequest { + core.NodeExecutionIdentifier id = 1; +} + +message DynamicNodeWorkflowResponse { + core.CompiledWorkflowClosure compiled_workflow = 1; +} diff --git a/docs/api/flyteidl/flyteidl/admin/notification.proto b/docs/api/flyteidl/flyteidl/admin/notification.proto new file mode 100644 index 0000000000..9ef54c9794 --- /dev/null +++ b/docs/api/flyteidl/flyteidl/admin/notification.proto @@ -0,0 +1,27 @@ +syntax = "proto3"; + +package flyteidl.admin; + +option go_package = "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/admin"; + +// Represents the Email object that is sent to a publisher/subscriber +// to forward the notification. +// Note: This is internal to Admin and doesn't need to be exposed to other components. +message EmailMessage { + // The list of email addresses to receive an email with the content populated in the other fields. + // Currently, each email recipient will receive its own email. + // This populates the TO field. + repeated string recipients_email = 1; + + // The email of the sender. + // This populates the FROM field. + string sender_email = 2; + + // The content of the subject line. + // This populates the SUBJECT field. + string subject_line = 3; + + // The content of the email body. + // This populates the BODY field. + string body = 4; +} diff --git a/docs/api/flyteidl/flyteidl/admin/project.proto b/docs/api/flyteidl/flyteidl/admin/project.proto new file mode 100644 index 0000000000..8b994b7267 --- /dev/null +++ b/docs/api/flyteidl/flyteidl/admin/project.proto @@ -0,0 +1,132 @@ +syntax = "proto3"; + +package flyteidl.admin; +option go_package = "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/admin"; + + +import "flyteidl/admin/common.proto"; + +// Empty request for GetDomain +message GetDomainRequest {} + +// Namespace within a project commonly used to differentiate between different service instances. +// e.g. "production", "development", etc. +message Domain { + // Globally unique domain name. + string id = 1; + + // Display name. + string name = 2; +} + +// Represents a list of domains. +message GetDomainsResponse { + repeated Domain domains = 1; +} + +// Top-level namespace used to classify different entities like workflows and executions. +message Project { + // Globally unique project name. + string id = 1; + + // Display name. + string name = 2; + + repeated Domain domains = 3; + + string description = 4; + + // Leverage Labels from flyteidl.admin.common.proto to + // tag projects with ownership information. + Labels labels = 5; + + // The state of the project is used to control its visibility in the UI and validity. + enum ProjectState { + // By default, all projects are considered active. + ACTIVE = 0; + + // Archived projects are no longer visible in the UI and no longer valid. + ARCHIVED = 1; + + // System generated projects that aren't explicitly created or managed by a user. + SYSTEM_GENERATED = 2; + + // System archived projects that aren't explicitly archived by a user. + SYSTEM_ARCHIVED = 3; + } + ProjectState state = 6; + + // Optional, org key applied to the resource. + string org = 7; +} + +// Represents a list of projects. +// See :ref:`ref_flyteidl.admin.Project` for more details +message Projects { + repeated Project projects = 1; + + // In the case of multiple pages of results, the server-provided token can be used to fetch the next page + // in a query. If there are no more results, this value will be empty. + string token = 2; +} + +// Request to retrieve a list of projects matching specified filters. +// See :ref:`ref_flyteidl.admin.Project` for more details +message ProjectListRequest { + // Indicates the number of projects to be returned. + // +required + uint32 limit = 1; + + // In the case of multiple pages of results, this server-provided token can be used to fetch the next page + // in a query. + // +optional + string token = 2; + + // Indicates a list of filters passed as string. + // More info on constructing filters : + // +optional + string filters = 3; + + // Sort ordering. + // +optional + Sort sort_by = 4; + + // Optional, org filter applied to list project requests. + string org = 5; +} + +// Adds a new user-project within the Flyte deployment. +// See :ref:`ref_flyteidl.admin.Project` for more details +message ProjectRegisterRequest { + // +required + Project project = 1; +} + +// Purposefully empty, may be updated in the future. +message ProjectRegisterResponse { +} + +// Purposefully empty, may be updated in the future. +message ProjectUpdateResponse { +} + +message ProjectGetRequest { + // Indicates a unique project. + // +required + string id = 1; + + // Optional, org key applied to the resource. + string org = 2; +} + + +// Error returned for inactive projects +message InactiveProject { + // Indicates a unique project. + // +required + string id = 1; + + // Optional, org key applied to the resource. + string org = 2; +} + diff --git a/docs/api/flyteidl/flyteidl/admin/project_attributes.proto b/docs/api/flyteidl/flyteidl/admin/project_attributes.proto new file mode 100644 index 0000000000..2656ab25f5 --- /dev/null +++ b/docs/api/flyteidl/flyteidl/admin/project_attributes.proto @@ -0,0 +1,69 @@ +syntax = "proto3"; + +package flyteidl.admin; +option go_package = "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/admin"; + +import "flyteidl/admin/matchable_resource.proto"; + +// Defines a set of custom matching attributes at the project level. +// For more info on matchable attributes, see :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` +message ProjectAttributes { + // Unique project id for which this set of attributes will be applied. + string project = 1; + + MatchingAttributes matching_attributes = 2; + + // Optional, org key applied to the project. + string org = 3; +} + +// Sets custom attributes for a project +// For more info on matchable attributes, see :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` +message ProjectAttributesUpdateRequest { + // +required + ProjectAttributes attributes = 1; +} + +// Purposefully empty, may be populated in the future. +message ProjectAttributesUpdateResponse { +} + +// Request to get an individual project level attribute override. +// For more info on matchable attributes, see :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` +message ProjectAttributesGetRequest { + // Unique project id which this set of attributes references. + // +required + string project = 1; + + // Which type of matchable attributes to return. + // +required + MatchableResource resource_type = 2; + + // Optional, org key applied to the project. + string org = 3; +} + +// Response to get an individual project level attribute override. +// For more info on matchable attributes, see :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` +message ProjectAttributesGetResponse { + ProjectAttributes attributes = 1; +} + +// Request to delete a set matchable project level attribute override. +// For more info on matchable attributes, see :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` +message ProjectAttributesDeleteRequest { + // Unique project id which this set of attributes references. + // +required + string project = 1; + + // Which type of matchable attributes to delete. + // +required + MatchableResource resource_type = 2; + + // Optional, org key applied to the project. + string org = 3; +} + +// Purposefully empty, may be populated in the future. +message ProjectAttributesDeleteResponse { +} diff --git a/docs/api/flyteidl/flyteidl/admin/project_domain_attributes.proto b/docs/api/flyteidl/flyteidl/admin/project_domain_attributes.proto new file mode 100644 index 0000000000..b493ae1178 --- /dev/null +++ b/docs/api/flyteidl/flyteidl/admin/project_domain_attributes.proto @@ -0,0 +1,80 @@ +syntax = "proto3"; + +package flyteidl.admin; +option go_package = "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/admin"; + +import "flyteidl/admin/matchable_resource.proto"; + +// Defines a set of custom matching attributes which defines resource defaults for a project and domain. +// For more info on matchable attributes, see :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` +message ProjectDomainAttributes { + // Unique project id for which this set of attributes will be applied. + string project = 1; + + // Unique domain id for which this set of attributes will be applied. + string domain = 2; + + MatchingAttributes matching_attributes = 3; + + // Optional, org key applied to the attributes. + string org = 4; +} + +// Sets custom attributes for a project-domain combination. +// For more info on matchable attributes, see :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` +message ProjectDomainAttributesUpdateRequest { + // +required + ProjectDomainAttributes attributes = 1; +} + +// Purposefully empty, may be populated in the future. +message ProjectDomainAttributesUpdateResponse { +} + +// Request to get an individual project domain attribute override. +// For more info on matchable attributes, see :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` +message ProjectDomainAttributesGetRequest { + // Unique project id which this set of attributes references. + // +required + string project = 1; + + // Unique domain id which this set of attributes references. + // +required + string domain = 2; + + // Which type of matchable attributes to return. + // +required + MatchableResource resource_type = 3; + + // Optional, org key applied to the attributes. + string org = 4; +} + +// Response to get an individual project domain attribute override. +// For more info on matchable attributes, see :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` +message ProjectDomainAttributesGetResponse { + ProjectDomainAttributes attributes = 1; +} + +// Request to delete a set matchable project domain attribute override. +// For more info on matchable attributes, see :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` +message ProjectDomainAttributesDeleteRequest { + // Unique project id which this set of attributes references. + // +required + string project = 1; + + // Unique domain id which this set of attributes references. + // +required + string domain = 2; + + // Which type of matchable attributes to delete. + // +required + MatchableResource resource_type = 3; + + // Optional, org key applied to the attributes. + string org = 4; +} + +// Purposefully empty, may be populated in the future. +message ProjectDomainAttributesDeleteResponse { +} diff --git a/docs/api/flyteidl/flyteidl/admin/schedule.proto b/docs/api/flyteidl/flyteidl/admin/schedule.proto new file mode 100644 index 0000000000..6bcbd90140 --- /dev/null +++ b/docs/api/flyteidl/flyteidl/admin/schedule.proto @@ -0,0 +1,43 @@ +syntax = "proto3"; + +package flyteidl.admin; +option go_package = "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/admin"; + +// Represents a frequency at which to run a schedule. +enum FixedRateUnit { + MINUTE = 0; + HOUR = 1; + DAY = 2; +} + +// Option for schedules run at a certain frequency e.g. every 2 minutes. +message FixedRate { + uint32 value = 1; + FixedRateUnit unit = 2; +} + +// Options for schedules to run according to a cron expression. +message CronSchedule { + // Standard/default cron implementation as described by https://en.wikipedia.org/wiki/Cron#CRON_expression; + // Also supports nonstandard predefined scheduling definitions + // as described by https://docs.aws.amazon.com/AmazonCloudWatch/latest/events/ScheduledEvents.html#CronExpressions + // except @reboot + string schedule = 1; + // ISO 8601 duration as described by https://en.wikipedia.org/wiki/ISO_8601#Durations + string offset = 2; +} + +// Defines complete set of information required to trigger an execution on a schedule. +message Schedule { + + oneof ScheduleExpression { + // Uses AWS syntax: Minutes Hours Day-of-month Month Day-of-week Year + // e.g. for a schedule that runs every 15 minutes: 0/15 * * * ? * + string cron_expression = 1 [deprecated=true]; + FixedRate rate = 2; + CronSchedule cron_schedule = 4; + } + + // Name of the input variable that the kickoff time will be supplied to when the workflow is kicked off. + string kickoff_time_input_arg = 3; +} diff --git a/docs/api/flyteidl/flyteidl/admin/signal.proto b/docs/api/flyteidl/flyteidl/admin/signal.proto new file mode 100644 index 0000000000..39ff5c09b9 --- /dev/null +++ b/docs/api/flyteidl/flyteidl/admin/signal.proto @@ -0,0 +1,86 @@ +syntax = "proto3"; + +package flyteidl.admin; +option go_package = "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/admin"; + +import "flyteidl/admin/common.proto"; +import "flyteidl/core/identifier.proto"; +import "flyteidl/core/literals.proto"; +import "flyteidl/core/types.proto"; + +// SignalGetOrCreateRequest represents a request structure to retrieve or create a signal. +// See :ref:`ref_flyteidl.admin.Signal` for more details +message SignalGetOrCreateRequest { + // A unique identifier for the requested signal. + core.SignalIdentifier id = 1; + + // A type denoting the required value type for this signal. + core.LiteralType type = 2; +} + +// SignalListRequest represents a request structure to retrieve a collection of signals. +// See :ref:`ref_flyteidl.admin.Signal` for more details +message SignalListRequest { + // Indicates the workflow execution to filter by. + // +required + core.WorkflowExecutionIdentifier workflow_execution_id = 1; + + // Indicates the number of resources to be returned. + // +required + uint32 limit = 2; + + // In the case of multiple pages of results, the, server-provided token can be used to fetch the next page + // in a query. + // +optional + string token = 3; + + // Indicates a list of filters passed as string. + // +optional + string filters = 4; + + // Sort ordering. + // +optional + Sort sort_by = 5; +} + +// SignalList represents collection of signals along with the token of the last result. +// See :ref:`ref_flyteidl.admin.Signal` for more details +message SignalList { + // A list of signals matching the input filters. + repeated Signal signals = 1; + + // In the case of multiple pages of results, the server-provided token can be used to fetch the next page + // in a query. If there are no more results, this value will be empty. + string token = 2; +} + +// SignalSetRequest represents a request structure to set the value on a signal. Setting a signal +// effetively satisfies the signal condition within a Flyte workflow. +// See :ref:`ref_flyteidl.admin.Signal` for more details +message SignalSetRequest { + // A unique identifier for the requested signal. + core.SignalIdentifier id = 1; + + // The value of this signal, must match the defining signal type. + core.Literal value = 2; +} + +// SignalSetResponse represents a response structure if signal setting succeeds. +message SignalSetResponse { + // Purposefully empty, may be populated in the future. +} + +// Signal encapsulates a unique identifier, associated metadata, and a value for a single Flyte +// signal. Signals may exist either without a set value (representing a signal request) or with a +// populated value (indicating the signal has been given). +message Signal { + // A unique identifier for the requested signal. + core.SignalIdentifier id = 1; + + // A type denoting the required value type for this signal. + core.LiteralType type = 2; + + // The value of the signal. This is only available if the signal has been "set" and must match + // the defined the type. + core.Literal value = 3; +} diff --git a/docs/api/flyteidl/flyteidl/admin/task.proto b/docs/api/flyteidl/flyteidl/admin/task.proto new file mode 100644 index 0000000000..78fbba39f8 --- /dev/null +++ b/docs/api/flyteidl/flyteidl/admin/task.proto @@ -0,0 +1,71 @@ +syntax = "proto3"; + +package flyteidl.admin; +option go_package = "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/admin"; + +import "flyteidl/core/identifier.proto"; +import "flyteidl/core/tasks.proto"; +import "flyteidl/core/compiler.proto"; +import "flyteidl/admin/description_entity.proto"; +import "google/protobuf/timestamp.proto"; + +// Represents a request structure to create a revision of a task. +// See :ref:`ref_flyteidl.admin.Task` for more details +message TaskCreateRequest { + // id represents the unique identifier of the task. + // +required + core.Identifier id = 1; + + // Represents the specification for task. + // +required + TaskSpec spec = 2; +} + +// Represents a response structure if task creation succeeds. +message TaskCreateResponse { + // Purposefully empty, may be populated in the future. +} + +// Flyte workflows are composed of many ordered tasks. That is small, reusable, self-contained logical blocks +// arranged to process workflow inputs and produce a deterministic set of outputs. +// Tasks can come in many varieties tuned for specialized behavior. +message Task { + // id represents the unique identifier of the task. + core.Identifier id = 1; + + // closure encapsulates all the fields that maps to a compiled version of the task. + TaskClosure closure = 2; + + // One-liner overview of the entity. + string short_description = 3; +} + +// Represents a list of tasks returned from the admin. +// See :ref:`ref_flyteidl.admin.Task` for more details +message TaskList { + // A list of tasks returned based on the request. + repeated Task tasks = 1; + + // In the case of multiple pages of results, the server-provided token can be used to fetch the next page + // in a query. If there are no more results, this value will be empty. + string token = 2; +} + +// Represents a structure that encapsulates the user-configured specification of the task. +message TaskSpec { + // Template of the task that encapsulates all the metadata of the task. + core.TaskTemplate template = 1; + + // Represents the specification for description entity. + DescriptionEntity description = 2; +} + +// Compute task attributes which include values derived from the TaskSpec, as well as plugin-specific data +// and task metadata. +message TaskClosure { + // Represents the compiled representation of the task from the specification provided. + core.CompiledTask compiled_task = 1; + + // Time at which the task was created. + google.protobuf.Timestamp created_at = 2; +} diff --git a/docs/api/flyteidl/flyteidl/admin/task_execution.proto b/docs/api/flyteidl/flyteidl/admin/task_execution.proto new file mode 100644 index 0000000000..54d2ff1e61 --- /dev/null +++ b/docs/api/flyteidl/flyteidl/admin/task_execution.proto @@ -0,0 +1,168 @@ +syntax = "proto3"; + +package flyteidl.admin; +option go_package = "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/admin"; + +import "flyteidl/admin/common.proto"; +import "flyteidl/core/execution.proto"; +import "flyteidl/core/identifier.proto"; +import "flyteidl/core/literals.proto"; +import "flyteidl/event/event.proto"; +import "google/protobuf/timestamp.proto"; +import "google/protobuf/duration.proto"; +import "google/protobuf/struct.proto"; + +// A message used to fetch a single task execution entity. +// See :ref:`ref_flyteidl.admin.TaskExecution` for more details +message TaskExecutionGetRequest { + // Unique identifier for the task execution. + // +required + core.TaskExecutionIdentifier id = 1; +} + +// Represents a request structure to retrieve a list of task execution entities yielded by a specific node execution. +// See :ref:`ref_flyteidl.admin.TaskExecution` for more details +message TaskExecutionListRequest { + // Indicates the node execution to filter by. + // +required + core.NodeExecutionIdentifier node_execution_id = 1; + + // Indicates the number of resources to be returned. + // +required + uint32 limit = 2; + + // In the case of multiple pages of results, the server-provided token can be used to fetch the next page + // in a query. + // +optional + string token = 3; + + // Indicates a list of filters passed as string. + // More info on constructing filters : + // +optional + string filters = 4; + + // Sort ordering for returned list. + // +optional + Sort sort_by = 5; +} + +// Encapsulates all details for a single task execution entity. +// A task execution represents an instantiated task, including all inputs and additional +// metadata as well as computed results included state, outputs, and duration-based attributes. +message TaskExecution { + // Unique identifier for the task execution. + core.TaskExecutionIdentifier id = 1; + + // Path to remote data store where input blob is stored. + string input_uri = 2; + + // Task execution details and results. + TaskExecutionClosure closure = 3; + + // Whether this task spawned nodes. + bool is_parent = 4; +} + +// Response structure for a query to list of task execution entities. +// See :ref:`ref_flyteidl.admin.TaskExecution` for more details +message TaskExecutionList { + repeated TaskExecution task_executions = 1; + + // In the case of multiple pages of results, the server-provided token can be used to fetch the next page + // in a query. If there are no more results, this value will be empty. + string token = 2; +} + +// Container for task execution details and results. +message TaskExecutionClosure { + oneof output_result { + // Path to remote data store where output blob is stored if the execution succeeded (and produced outputs). + // DEPRECATED. Use GetTaskExecutionData to fetch output data instead. + string output_uri = 1 [deprecated = true]; + + // Error information for the task execution. Populated if the execution failed. + core.ExecutionError error = 2; + + // Raw output data produced by this task execution. + // DEPRECATED. Use GetTaskExecutionData to fetch output data instead. + core.LiteralMap output_data = 12 [deprecated = true]; + } + + // The last recorded phase for this task execution. + core.TaskExecution.Phase phase = 3; + + // Detailed log information output by the task execution. + repeated core.TaskLog logs = 4; + + // Time at which the task execution began running. + google.protobuf.Timestamp started_at = 5; + + // The amount of time the task execution spent running. + google.protobuf.Duration duration = 6; + + // Time at which the task execution was created. + google.protobuf.Timestamp created_at = 7; + + // Time at which the task execution was last updated. + google.protobuf.Timestamp updated_at = 8; + + // Custom data specific to the task plugin. + google.protobuf.Struct custom_info = 9; + + // If there is an explanation for the most recent phase transition, the reason will capture it. + string reason = 10; + + // A predefined yet extensible Task type identifier. + string task_type = 11; + + // Metadata around how a task was executed. + event.TaskExecutionMetadata metadata = 16; + + // The event version is used to indicate versioned changes in how data is maintained using this + // proto message. For example, event_verison > 0 means that maps tasks logs use the + // TaskExecutionMetadata ExternalResourceInfo fields for each subtask rather than the TaskLog + // in this message. + int32 event_version = 17; + + // A time-series of the phase transition or update explanations. This, when compared to storing a singular reason + // as previously done, is much more valuable in visualizing and understanding historical evaluations. + repeated Reason reasons = 18; +} + +// Reason is a single message annotated with a timestamp to indicate the instant the reason occurred. +message Reason { + // occurred_at is the timestamp indicating the instant that this reason happened. + google.protobuf.Timestamp occurred_at = 1; + + // message is the explanation for the most recent phase transition or status update. + string message = 2; +} + +// Request structure to fetch inputs and output for a task execution. +// By default this data is not returned inline in :ref:`ref_flyteidl.admin.TaskExecutionGetRequest` +message TaskExecutionGetDataRequest { + // The identifier of the task execution for which to fetch inputs and outputs. + // +required + core.TaskExecutionIdentifier id = 1; +} + +// Response structure for TaskExecutionGetDataRequest which contains inputs and outputs for a task execution. +message TaskExecutionGetDataResponse { + // Signed url to fetch a core.LiteralMap of task execution inputs. + // Deprecated: Please use full_inputs instead. + UrlBlob inputs = 1 [deprecated = true]; + + // Signed url to fetch a core.LiteralMap of task execution outputs. + // Deprecated: Please use full_outputs instead. + UrlBlob outputs = 2 [deprecated = true]; + + // Full_inputs will only be populated if they are under a configured size threshold. + core.LiteralMap full_inputs = 3; + + // Full_outputs will only be populated if they are under a configured size threshold. + core.LiteralMap full_outputs = 4; + + // flyte tiny url to fetch a core.LiteralMap of task execution's IO + // Deck will be empty for task + FlyteURLs flyte_urls = 5; +} diff --git a/docs/api/flyteidl/flyteidl/admin/version.proto b/docs/api/flyteidl/flyteidl/admin/version.proto new file mode 100644 index 0000000000..e0e38bda1f --- /dev/null +++ b/docs/api/flyteidl/flyteidl/admin/version.proto @@ -0,0 +1,27 @@ +syntax = "proto3"; + +package flyteidl.admin; +option go_package = "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/admin"; + +// Response for the GetVersion API +message GetVersionResponse { + // The control plane version information. FlyteAdmin and related components + // form the control plane of Flyte + Version control_plane_version = 1; +} + +// Provides Version information for a component +message Version { + // Specifies the GIT sha of the build + string Build = 1; + + // Version for the build, should follow a semver + string Version = 2; + + // Build timestamp + string BuildTime = 3; +} + +// Empty request for GetVersion +message GetVersionRequest { +} diff --git a/docs/api/flyteidl/flyteidl/admin/workflow.proto b/docs/api/flyteidl/flyteidl/admin/workflow.proto new file mode 100644 index 0000000000..d522d65b73 --- /dev/null +++ b/docs/api/flyteidl/flyteidl/admin/workflow.proto @@ -0,0 +1,92 @@ +syntax = "proto3"; + +package flyteidl.admin; +option go_package = "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/admin"; + +import "flyteidl/core/compiler.proto"; +import "flyteidl/core/identifier.proto"; +import "flyteidl/core/workflow.proto"; +import "flyteidl/admin/description_entity.proto"; +import "google/protobuf/timestamp.proto"; + +// Represents a request structure to create a revision of a workflow. +// See :ref:`ref_flyteidl.admin.Workflow` for more details +message WorkflowCreateRequest { + // id represents the unique identifier of the workflow. + // +required + core.Identifier id = 1; + + // Represents the specification for workflow. + // +required + WorkflowSpec spec = 2; +} + +message WorkflowCreateResponse { + // Purposefully empty, may be populated in the future. +} + +// Represents the workflow structure stored in the Admin +// A workflow is created by ordering tasks and associating outputs to inputs +// in order to produce a directed-acyclic execution graph. +message Workflow { + // id represents the unique identifier of the workflow. + core.Identifier id = 1; + + // closure encapsulates all the fields that maps to a compiled version of the workflow. + WorkflowClosure closure = 2; + + // One-liner overview of the entity. + string short_description = 3; +} + +// Represents a list of workflows returned from the admin. +// See :ref:`ref_flyteidl.admin.Workflow` for more details +message WorkflowList { + // A list of workflows returned based on the request. + repeated Workflow workflows = 1; + + // In the case of multiple pages of results, the server-provided token can be used to fetch the next page + // in a query. If there are no more results, this value will be empty. + string token = 2; +} + +// Represents a structure that encapsulates the specification of the workflow. +message WorkflowSpec { + // Template of the task that encapsulates all the metadata of the workflow. + core.WorkflowTemplate template = 1; + + // Workflows that are embedded into other workflows need to be passed alongside the parent workflow to the + // propeller compiler (since the compiler doesn't have any knowledge of other workflows - ie, it doesn't reach out + // to Admin to see other registered workflows). In fact, subworkflows do not even need to be registered. + repeated core.WorkflowTemplate sub_workflows = 2; + + // Represents the specification for description entity. + DescriptionEntity description = 3; +} + +// A container holding the compiled workflow produced from the WorkflowSpec and additional metadata. +message WorkflowClosure { + // Represents the compiled representation of the workflow from the specification provided. + core.CompiledWorkflowClosure compiled_workflow = 1; + + // Time at which the workflow was created. + google.protobuf.Timestamp created_at = 2; +} + +// The workflow id is already used and the structure is different +message WorkflowErrorExistsDifferentStructure { + core.Identifier id = 1; +} + +// The workflow id is already used with an identical sctructure +message WorkflowErrorExistsIdenticalStructure { + core.Identifier id = 1; +} + +// When a CreateWorkflowRequest fails due to matching id +message CreateWorkflowFailureReason { + oneof reason { + WorkflowErrorExistsDifferentStructure exists_different_structure = 1; + WorkflowErrorExistsIdenticalStructure exists_identical_structure = 2; + } +} diff --git a/docs/api/flyteidl/flyteidl/admin/workflow_attributes.proto b/docs/api/flyteidl/flyteidl/admin/workflow_attributes.proto new file mode 100644 index 0000000000..9767f00df7 --- /dev/null +++ b/docs/api/flyteidl/flyteidl/admin/workflow_attributes.proto @@ -0,0 +1,89 @@ +syntax = "proto3"; + +package flyteidl.admin; +option go_package = "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/admin"; + +import "flyteidl/admin/matchable_resource.proto"; + +// Defines a set of custom matching attributes which defines resource defaults for a project, domain and workflow. +// For more info on matchable attributes, see :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` +message WorkflowAttributes { + // Unique project id for which this set of attributes will be applied. + string project = 1; + + // Unique domain id for which this set of attributes will be applied. + string domain = 2; + + // Workflow name for which this set of attributes will be applied. + string workflow = 3; + + MatchingAttributes matching_attributes = 4; + + // Optional, org key applied to the attributes. + string org = 5; +} + +// Sets custom attributes for a project, domain and workflow combination. +// For more info on matchable attributes, see :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` +message WorkflowAttributesUpdateRequest { + WorkflowAttributes attributes = 1; +} + +// Purposefully empty, may be populated in the future. +message WorkflowAttributesUpdateResponse { +} + +// Request to get an individual workflow attribute override. +// For more info on matchable attributes, see :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` +message WorkflowAttributesGetRequest { + // Unique project id which this set of attributes references. + // +required + string project = 1; + + // Unique domain id which this set of attributes references. + // +required + string domain = 2; + + // Workflow name which this set of attributes references. + // +required + string workflow = 3; + + // Which type of matchable attributes to return. + // +required + MatchableResource resource_type = 4; + + // Optional, org key applied to the attributes. + string org = 5; +} + +// Response to get an individual workflow attribute override. +message WorkflowAttributesGetResponse { + WorkflowAttributes attributes = 1; +} + +// Request to delete a set matchable workflow attribute override. +// For more info on matchable attributes, see :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` +message WorkflowAttributesDeleteRequest { + // Unique project id which this set of attributes references. + // +required + string project = 1; + + // Unique domain id which this set of attributes references. + // +required + string domain = 2; + + // Workflow name which this set of attributes references. + // +required + string workflow = 3; + + // Which type of matchable attributes to delete. + // +required + MatchableResource resource_type = 4; + + // Optional, org key applied to the attributes. + string org = 5; +} + +// Purposefully empty, may be populated in the future. +message WorkflowAttributesDeleteResponse { +} diff --git a/docs/api/flyteidl/flyteidl/cacheservice/cacheservice.proto b/docs/api/flyteidl/flyteidl/cacheservice/cacheservice.proto new file mode 100644 index 0000000000..c85e2eb55c --- /dev/null +++ b/docs/api/flyteidl/flyteidl/cacheservice/cacheservice.proto @@ -0,0 +1,143 @@ +syntax = "proto3"; + +package flyteidl.cacheservice; + +import "flyteidl/core/literals.proto"; +import "flyteidl/core/types.proto"; +import "flyteidl/core/identifier.proto"; +import "flyteidl/core/interface.proto"; +import "google/protobuf/duration.proto"; +import "google/protobuf/timestamp.proto"; + +option go_package = "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/cacheservice"; + +/* + * CacheService defines operations for cache management including retrieval, storage, and deletion of cached task/workflow outputs. + */ +service CacheService { + // Retrieves cached data by key. + rpc Get (GetCacheRequest) returns (GetCacheResponse); + + // Stores or updates cached data by key. + rpc Put (PutCacheRequest) returns (PutCacheResponse); + + // Deletes cached data by key. + rpc Delete (DeleteCacheRequest) returns (DeleteCacheResponse); + + // Get or extend a reservation for a cache key + rpc GetOrExtendReservation (GetOrExtendReservationRequest) returns (GetOrExtendReservationResponse); + + // Release the reservation for a cache key + rpc ReleaseReservation (ReleaseReservationRequest) returns (ReleaseReservationResponse); +} + +/* + * Additional metadata as key-value pairs + */ +message KeyMapMetadata { + map values = 1; // Additional metadata as key-value pairs +} + +/* + * Metadata for cached outputs, including the source identifier and timestamps. + */ +message Metadata { + core.Identifier source_identifier = 1; // Source task or workflow identifier + KeyMapMetadata key_map = 2; // Additional metadata as key-value pairs + google.protobuf.Timestamp created_at = 3; // Creation timestamp + google.protobuf.Timestamp last_updated_at = 4; // Last update timestamp +} + +/* + * Represents cached output, either as literals or an URI, with associated metadata. + */ +message CachedOutput { + oneof output { + flyteidl.core.LiteralMap output_literals = 1; // Output literals + string output_uri = 2; // URI to output data + } + Metadata metadata = 3; // Associated metadata +} + +/* + * Request to retrieve cached data by key. + */ +message GetCacheRequest { + string key = 1; // Cache key +} + +/* + * Response with cached data for a given key. + */ +message GetCacheResponse { + CachedOutput output = 1; // Cached output +} + +/* + * Request to store/update cached data by key. + */ +message PutCacheRequest { + string key = 1; // Cache key + CachedOutput output = 2; // Output to cache + bool overwrite = 3; // Overwrite flag +} + +/* + * Response message of cache store/update operation. + */ +message PutCacheResponse { + // Empty, success indicated by no errors +} + +/* + * Request to delete cached data by key. + */ +message DeleteCacheRequest { + string key = 1; // Cache key +} + +/* + * Response message of cache deletion operation. + */ +message DeleteCacheResponse { + // Empty, success indicated by no errors +} + +// A reservation including owner, heartbeat interval, expiration timestamp, and various metadata. +message Reservation { + string key = 1; // The unique ID for the reservation - same as the cache key + string owner_id = 2; // The unique ID of the owner for the reservation + google.protobuf.Duration heartbeat_interval = 3; // Requested reservation extension heartbeat interval + google.protobuf.Timestamp expires_at = 4; // Expiration timestamp of this reservation +} + +/* + * Request to get or extend a reservation for a cache key + */ +message GetOrExtendReservationRequest { + string key = 1; // The unique ID for the reservation - same as the cache key + string owner_id = 2; // The unique ID of the owner for the reservation + google.protobuf.Duration heartbeat_interval = 3; // Requested reservation extension heartbeat interval +} + +/* + * Request to get or extend a reservation for a cache key + */ +message GetOrExtendReservationResponse { + Reservation reservation = 1; // The reservation that was created or extended +} + +/* + * Request to release the reservation for a cache key + */ +message ReleaseReservationRequest { + string key = 1; // The unique ID for the reservation - same as the cache key + string owner_id = 2; // The unique ID of the owner for the reservation +} + +/* + * Response message of release reservation operation. + */ +message ReleaseReservationResponse { + // Empty, success indicated by no errors +} \ No newline at end of file diff --git a/docs/api/flyteidl/flyteidl/core/artifact_id.proto b/docs/api/flyteidl/flyteidl/core/artifact_id.proto new file mode 100644 index 0000000000..022bc20cff --- /dev/null +++ b/docs/api/flyteidl/flyteidl/core/artifact_id.proto @@ -0,0 +1,112 @@ +syntax = "proto3"; + +package flyteidl.core; + +option go_package = "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/core"; + +import "google/protobuf/timestamp.proto"; +import "flyteidl/core/identifier.proto"; + + +message ArtifactKey { + // Project and domain and suffix needs to be unique across a given artifact store. + string project = 1; + string domain = 2; + string name = 3; + string org = 4; +} + +// Only valid for triggers +message ArtifactBindingData { + reserved 1 to 4; + // These two fields are only relevant in the partition value case + oneof partition_data { + string partition_key = 5; + bool bind_to_time_partition = 6; + } + + // This is only relevant in the time partition case + TimeTransform time_transform = 7; +} + +enum Granularity { + UNSET = 0; + MINUTE = 1; + HOUR = 2; + DAY = 3; // default + MONTH = 4; +} + +enum Operator { + MINUS = 0; + PLUS = 1; +} + +message TimeTransform { + string transform = 1; + Operator op = 2; +} + +message InputBindingData { + string var = 1; +} + +message RuntimeBinding {} + +message LabelValue { + oneof value { + // The string static value is for use in the Partitions object + string static_value = 1; + + // The time value is for use in the TimePartition case + google.protobuf.Timestamp time_value = 2; + ArtifactBindingData triggered_binding = 3; + InputBindingData input_binding = 4; + RuntimeBinding runtime_binding = 5; + } +} + +message Partitions { + map value = 1; +} + +message TimePartition { + LabelValue value = 1; + Granularity granularity = 2; +} + +message ArtifactID { + ArtifactKey artifact_key = 1; + + string version = 2; + + // Think of a partition as a tag on an Artifact, except it's a key-value pair. + // Different partitions naturally have different versions (execution ids). + Partitions partitions = 3; + + // There is no such thing as an empty time partition - if it's not set, then there is no time partition. + TimePartition time_partition = 4; +} + +message ArtifactTag { + ArtifactKey artifact_key = 1; + + LabelValue value = 2; +} + +// Uniqueness constraints for Artifacts +// - project, domain, name, version, partitions +// Option 2 (tags are standalone, point to an individual artifact id): +// - project, domain, name, alias (points to one partition if partitioned) +// - project, domain, name, partition key, partition value +message ArtifactQuery { + oneof identifier { + ArtifactID artifact_id = 1; + ArtifactTag artifact_tag = 2; + string uri = 3; + + // This is used in the trigger case, where a user specifies a value for an input that is one of the triggering + // artifacts, or a partition value derived from a triggering artifact. + ArtifactBindingData binding = 4; + } +} diff --git a/docs/api/flyteidl/flyteidl/core/catalog.proto b/docs/api/flyteidl/flyteidl/core/catalog.proto new file mode 100644 index 0000000000..4d98c28d7e --- /dev/null +++ b/docs/api/flyteidl/flyteidl/core/catalog.proto @@ -0,0 +1,63 @@ +syntax = "proto3"; + +package flyteidl.core; + +option go_package = "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/core"; + +import "flyteidl/core/identifier.proto"; + +// Indicates the status of CatalogCaching. The reason why this is not embedded in TaskNodeMetadata is, that we may use for other types of nodes as well in the future +enum CatalogCacheStatus { + // Used to indicate that caching was disabled + CACHE_DISABLED = 0; + // Used to indicate that the cache lookup resulted in no matches + CACHE_MISS = 1; + // used to indicate that the associated artifact was a result of a previous execution + CACHE_HIT = 2; + // used to indicate that the resultant artifact was added to the cache + CACHE_POPULATED = 3; + // Used to indicate that cache lookup failed because of an error + CACHE_LOOKUP_FAILURE = 4; + // Used to indicate that cache lookup failed because of an error + CACHE_PUT_FAILURE = 5; + // Used to indicate the cache lookup was skipped + CACHE_SKIPPED = 6; + // Used to indicate that the cache was evicted + CACHE_EVICTED = 7; +}; + +message CatalogArtifactTag { + // Artifact ID is generated name + string artifact_id = 1; + // Flyte computes the tag automatically, as the hash of the values + string name = 2; +}; + +// Catalog artifact information with specific metadata +message CatalogMetadata { + // Dataset ID in the catalog + Identifier dataset_id = 1; + // Artifact tag in the catalog + CatalogArtifactTag artifact_tag = 2; + // Optional: Source Execution identifier, if this dataset was generated by another execution in Flyte. This is a one-of field and will depend on the caching context + oneof source_execution { + // Today we only support TaskExecutionIdentifier as a source, as catalog caching only works for task executions + TaskExecutionIdentifier source_task_execution = 3; + } +}; + +message CatalogReservation { + // Indicates the status of a catalog reservation operation. + enum Status { + // Used to indicate that reservations are disabled + RESERVATION_DISABLED = 0; + // Used to indicate that a reservation was successfully acquired or extended + RESERVATION_ACQUIRED = 1; + // Used to indicate that an active reservation currently exists + RESERVATION_EXISTS = 2; + // Used to indicate that the reservation has been successfully released + RESERVATION_RELEASED = 3; + // Used to indicate that a reservation operation resulted in failure + RESERVATION_FAILURE = 4; + } +} diff --git a/docs/api/flyteidl/flyteidl/core/compiler.proto b/docs/api/flyteidl/flyteidl/core/compiler.proto new file mode 100644 index 0000000000..620ee26f2d --- /dev/null +++ b/docs/api/flyteidl/flyteidl/core/compiler.proto @@ -0,0 +1,64 @@ +syntax = "proto3"; + +package flyteidl.core; + +option go_package = "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/core"; + +import "flyteidl/core/identifier.proto"; +import "flyteidl/core/interface.proto"; +import "flyteidl/core/workflow.proto"; +import "flyteidl/core/tasks.proto"; + +// Adjacency list for the workflow. This is created as part of the compilation process. Every process after the compilation +// step uses this created ConnectionSet +message ConnectionSet { + message IdList { + repeated string ids = 1; + } + + // A list of all the node ids that are downstream from a given node id + map downstream = 7; + + // A list of all the node ids, that are upstream of this node id + map upstream = 8; +} + +// Output of the compilation Step. This object represents one workflow. We store more metadata at this layer +message CompiledWorkflow { + // Completely contained Workflow Template + WorkflowTemplate template = 1; + // For internal use only! This field is used by the system and must not be filled in. Any values set will be ignored. + ConnectionSet connections = 2; +} + +// Output of the compilation step. This object represents one LaunchPlan. We store more metadata at this layer +message CompiledLaunchPlan { + // Completely contained LaunchPlan Template + LaunchPlanTemplate template = 1; +} + +// Output of the Compilation step. This object represent one Task. We store more metadata at this layer +message CompiledTask { + // Completely contained TaskTemplate + TaskTemplate template = 1; +} + +// A Compiled Workflow Closure contains all the information required to start a new execution, or to visualize a workflow +// and its details. The CompiledWorkflowClosure should always contain a primary workflow, that is the main workflow that +// will being the execution. All subworkflows are denormalized. WorkflowNodes refer to the workflow identifiers of +// compiled subworkflows. +message CompiledWorkflowClosure { + //+required + CompiledWorkflow primary = 1; + // Guaranteed that there will only exist one and only one workflow with a given id, i.e., every sub workflow has a + // unique identifier. Also every enclosed subworkflow is used either by a primary workflow or by a subworkflow + // as an inlined workflow + //+optional + repeated CompiledWorkflow sub_workflows = 2; + // Guaranteed that there will only exist one and only one task with a given id, i.e., every task has a unique id + //+required (at least 1) + repeated CompiledTask tasks = 3; + // A collection of launch plans that are compiled. Guaranteed that there will only exist one and only one launch plan + // with a given id, i.e., every launch plan has a unique id. + repeated CompiledLaunchPlan launch_plans = 4; +} diff --git a/docs/api/flyteidl/flyteidl/core/condition.proto b/docs/api/flyteidl/flyteidl/core/condition.proto new file mode 100644 index 0000000000..84c7fb0314 --- /dev/null +++ b/docs/api/flyteidl/flyteidl/core/condition.proto @@ -0,0 +1,63 @@ +syntax = "proto3"; + +package flyteidl.core; + +option go_package = "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/core"; + +import "flyteidl/core/literals.proto"; + +// Defines a 2-level tree where the root is a comparison operator and Operands are primitives or known variables. +// Each expression results in a boolean result. +message ComparisonExpression { + // Binary Operator for each expression + enum Operator { + EQ = 0; + NEQ = 1; + // Greater Than + GT = 2; + GTE = 3; + // Less Than + LT = 4; + LTE = 5; + } + + Operator operator = 1; + Operand left_value = 2; + Operand right_value = 3; +} + +// Defines an operand to a comparison expression. +message Operand { + oneof val { + // Can be a constant + core.Primitive primitive = 1 [deprecated = true]; + // Or one of this node's input variables + string var = 2; + // Replace the primitive field + core.Scalar scalar = 3; + } +} + +// Defines a boolean expression tree. It can be a simple or a conjunction expression. +// Multiple expressions can be combined using a conjunction or a disjunction to result in a final boolean result. +message BooleanExpression { + oneof expr { + ConjunctionExpression conjunction = 1; + ComparisonExpression comparison = 2; + } +} + +// Defines a conjunction expression of two boolean expressions. +message ConjunctionExpression { + // Nested conditions. They can be conjoined using AND / OR + // Order of evaluation is not important as the operators are Commutative + enum LogicalOperator { + // Conjunction + AND = 0; + OR = 1; + } + + LogicalOperator operator = 1; + BooleanExpression left_expression = 2; + BooleanExpression right_expression = 3; +} diff --git a/docs/api/flyteidl/flyteidl/core/dynamic_job.proto b/docs/api/flyteidl/flyteidl/core/dynamic_job.proto new file mode 100644 index 0000000000..1665f5fa29 --- /dev/null +++ b/docs/api/flyteidl/flyteidl/core/dynamic_job.proto @@ -0,0 +1,32 @@ +syntax = "proto3"; + +import "flyteidl/core/tasks.proto"; +import "flyteidl/core/workflow.proto"; +import "flyteidl/core/literals.proto"; + +package flyteidl.core; + +option go_package = "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/core"; + +// Describes a set of tasks to execute and how the final outputs are produced. +message DynamicJobSpec { + // A collection of nodes to execute. + repeated Node nodes = 1; + + // An absolute number of successful completions of nodes required to mark this job as succeeded. As soon as this + // criteria is met, the dynamic job will be marked as successful and outputs will be computed. If this number + // becomes impossible to reach (e.g. number of currently running tasks + number of already succeeded tasks < + // min_successes) the task will be aborted immediately and marked as failed. The default value of this field, if not + // specified, is the count of nodes repeated field. + int64 min_successes = 2; + + // Describes how to bind the final output of the dynamic job from the outputs of executed nodes. The referenced ids + // in bindings should have the generated id for the subtask. + repeated Binding outputs = 3; + + // [Optional] A complete list of task specs referenced in nodes. + repeated TaskTemplate tasks = 4; + + // [Optional] A complete list of task specs referenced in nodes. + repeated WorkflowTemplate subworkflows = 5; +} diff --git a/docs/api/flyteidl/flyteidl/core/errors.proto b/docs/api/flyteidl/flyteidl/core/errors.proto new file mode 100644 index 0000000000..4d25389349 --- /dev/null +++ b/docs/api/flyteidl/flyteidl/core/errors.proto @@ -0,0 +1,35 @@ +syntax = "proto3"; + +package flyteidl.core; + +option go_package = "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/core"; + +import "flyteidl/core/execution.proto"; + +// Error message to propagate detailed errors from container executions to the execution +// engine. +message ContainerError { + // A simplified code for errors, so that we can provide a glossary of all possible errors. + string code = 1; + // A detailed error message. + string message = 2; + + // Defines a generic error type that dictates the behavior of the retry strategy. + enum Kind { + NON_RECOVERABLE = 0; + RECOVERABLE = 1; + } + + // An abstract error kind for this error. Defaults to Non_Recoverable if not specified. + Kind kind = 3; + + // Defines the origin of the error (system, user, unknown). + ExecutionError.ErrorKind origin = 4; +} + +// Defines the errors.pb file format the container can produce to communicate +// failure reasons to the execution engine. +message ErrorDocument { + // The error raised during execution. + ContainerError error = 1; +} diff --git a/docs/api/flyteidl/flyteidl/core/execution.proto b/docs/api/flyteidl/flyteidl/core/execution.proto new file mode 100644 index 0000000000..4d55198955 --- /dev/null +++ b/docs/api/flyteidl/flyteidl/core/execution.proto @@ -0,0 +1,118 @@ +syntax = "proto3"; + +package flyteidl.core; + +option go_package = "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/core"; + +import "google/protobuf/duration.proto"; + +// Indicates various phases of Workflow Execution +message WorkflowExecution { + enum Phase { + UNDEFINED = 0; + QUEUED = 1; + RUNNING = 2; + SUCCEEDING = 3; + SUCCEEDED = 4; + FAILING = 5; + FAILED = 6; + ABORTED = 7; + TIMED_OUT = 8; + ABORTING = 9; + } +} + +// Indicates various phases of Node Execution that only include the time spent to run the nodes/workflows +message NodeExecution { + enum Phase { + UNDEFINED = 0; + QUEUED = 1; + RUNNING = 2; + SUCCEEDED = 3; + FAILING = 4; + FAILED = 5; + ABORTED = 6; + SKIPPED = 7; + TIMED_OUT = 8; + DYNAMIC_RUNNING = 9; + RECOVERED = 10; + } +} + +// Phases that task plugins can go through. Not all phases may be applicable to a specific plugin task, +// but this is the cumulative list that customers may want to know about for their task. +message TaskExecution{ + enum Phase { + UNDEFINED = 0; + QUEUED = 1; + RUNNING = 2; + SUCCEEDED = 3; + ABORTED = 4; + FAILED = 5; + // To indicate cases where task is initializing, like: ErrImagePull, ContainerCreating, PodInitializing + INITIALIZING = 6; + // To address cases, where underlying resource is not available: Backoff error, Resource quota exceeded + WAITING_FOR_RESOURCES = 7; + } +} + + +// Represents the error message from the execution. +message ExecutionError { + // Error code indicates a grouping of a type of error. + // More Info: + string code = 1; + // Detailed description of the error - including stack trace. + string message = 2; + // Full error contents accessible via a URI + string error_uri = 3; + // Error type: System or User + enum ErrorKind { + UNKNOWN = 0; + USER = 1; + SYSTEM = 2; + } + ErrorKind kind = 4; +} + +// Log information for the task that is specific to a log sink +// When our log story is flushed out, we may have more metadata here like log link expiry +message TaskLog { + + enum MessageFormat { + UNKNOWN = 0; + CSV = 1; + JSON = 2; + } + + string uri = 1; + string name = 2; + MessageFormat message_format = 3; + google.protobuf.Duration ttl = 4; + bool ShowWhilePending = 5; + bool HideOnceFinished = 6; +} + +// Represents customized execution run-time attributes. +message QualityOfServiceSpec { + // Indicates how much queueing delay an execution can tolerate. + google.protobuf.Duration queueing_budget = 1; + + // Add future, user-configurable options here +} + +// Indicates the priority of an execution. +message QualityOfService { + enum Tier { + // Default: no quality of service specified. + UNDEFINED = 0; + HIGH = 1; + MEDIUM = 2; + LOW = 3; + } + + oneof designation { + Tier tier = 1; + QualityOfServiceSpec spec = 2; + } +} diff --git a/docs/api/flyteidl/flyteidl/core/execution_envs.proto b/docs/api/flyteidl/flyteidl/core/execution_envs.proto new file mode 100644 index 0000000000..d5da775f65 --- /dev/null +++ b/docs/api/flyteidl/flyteidl/core/execution_envs.proto @@ -0,0 +1,45 @@ +syntax = "proto3"; + +package flyteidl.core; + +option go_package = "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/core"; + +import "google/protobuf/struct.proto"; + +// ExecutionEnvAssignment is a message that is used to assign an execution environment to a set of +// nodes. +message ExecutionEnvAssignment { + // node_ids is a list of node ids that are being assigned the execution environment. + repeated string node_ids = 1; + + // task_type is the type of task that is being assigned. This is used to override which Flyte + // plugin will be used during execution. + string task_type = 2; + + // execution_env is the environment that is being assigned to the nodes. + ExecutionEnv execution_env = 3; +} + +// ExecutionEnv is a message that is used to specify the execution environment. +message ExecutionEnv { + // name is a human-readable identifier for the execution environment. This is combined with the + // project, domain, and version to uniquely identify an execution environment. + string name = 1; + + // type is the type of the execution environment. + string type = 2; + + // environment is a oneof field that can be used to specify the environment in different ways. + oneof environment { + // extant is a reference to an existing environment. + google.protobuf.Struct extant = 3; + + // spec is a specification of the environment. + google.protobuf.Struct spec = 4; + } + + // version is the version of the execution environment. This may be used differently by each + // individual environment type (ex. auto-generated or manually provided), but is intended to + // allow variance in environment specifications with the same ID. + string version = 5; +} diff --git a/docs/api/flyteidl/flyteidl/core/identifier.proto b/docs/api/flyteidl/flyteidl/core/identifier.proto new file mode 100644 index 0000000000..50bf22429c --- /dev/null +++ b/docs/api/flyteidl/flyteidl/core/identifier.proto @@ -0,0 +1,80 @@ +syntax = "proto3"; + +package flyteidl.core; + +option go_package = "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/core"; + +// Indicates a resource type within Flyte. +enum ResourceType { + UNSPECIFIED = 0; + TASK = 1; + WORKFLOW = 2; + LAUNCH_PLAN = 3; + // A dataset represents an entity modeled in Flyte DataCatalog. A Dataset is also a versioned entity and can be a compilation of multiple individual objects. + // Eventually all Catalog objects should be modeled similar to Flyte Objects. The Dataset entities makes it possible for the UI and CLI to act on the objects + // in a similar manner to other Flyte objects + DATASET = 4; +} + +// Encapsulation of fields that uniquely identifies a Flyte resource. +message Identifier { + // Identifies the specific type of resource that this identifier corresponds to. + core.ResourceType resource_type = 1; + + // Name of the project the resource belongs to. + string project = 2; + + // Name of the domain the resource belongs to. + // A domain can be considered as a subset within a specific project. + string domain = 3; + + // User provided value for the resource. + string name = 4; + + // Specific version of the resource. + string version = 5; + + // Optional, org key applied to the resource. + string org = 6; +} + +// Encapsulation of fields that uniquely identifies a Flyte workflow execution +message WorkflowExecutionIdentifier { + // Name of the project the resource belongs to. + string project = 1; + + // Name of the domain the resource belongs to. + // A domain can be considered as a subset within a specific project. + string domain = 2; + + // User or system provided value for the resource. + string name = 4; + + // Optional, org key applied to the resource. + string org = 5; +} + +// Encapsulation of fields that identify a Flyte node execution entity. +message NodeExecutionIdentifier { + string node_id = 1; + + WorkflowExecutionIdentifier execution_id = 2; +} + +// Encapsulation of fields that identify a Flyte task execution entity. +message TaskExecutionIdentifier { + core.Identifier task_id = 1; + + core.NodeExecutionIdentifier node_execution_id = 2; + + uint32 retry_attempt = 3; +} + +// Encapsulation of fields the uniquely identify a signal. +message SignalIdentifier { + // Unique identifier for a signal. + string signal_id = 1; + + // Identifies the Flyte workflow execution this signal belongs to. + WorkflowExecutionIdentifier execution_id = 2; +} diff --git a/docs/api/flyteidl/flyteidl/core/interface.proto b/docs/api/flyteidl/flyteidl/core/interface.proto new file mode 100644 index 0000000000..ec7673d9c4 --- /dev/null +++ b/docs/api/flyteidl/flyteidl/core/interface.proto @@ -0,0 +1,64 @@ +syntax = "proto3"; + +package flyteidl.core; + +option go_package = "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/core"; + +import "flyteidl/core/types.proto"; +import "flyteidl/core/literals.proto"; +import "flyteidl/core/artifact_id.proto"; + +// Defines a strongly typed variable. +message Variable { + // Variable literal type. + LiteralType type = 1; + + //+optional string describing input variable + string description = 2; + + //+optional This object allows the user to specify how Artifacts are created. + // name, tag, partitions can be specified. The other fields (version and project/domain) are ignored. + core.ArtifactID artifact_partial_id = 3; + + core.ArtifactTag artifact_tag = 4; +} + +// A map of Variables +message VariableMap { + // Defines a map of variable names to variables. + map variables = 1; +} + +// Defines strongly typed inputs and outputs. +message TypedInterface { + VariableMap inputs = 1; + VariableMap outputs = 2; +} + +// A parameter is used as input to a launch plan and has +// the special ability to have a default value or mark itself as required. +message Parameter { + //+required Variable. Defines the type of the variable backing this parameter. + Variable var = 1; + + //+optional + oneof behavior { + // Defines a default value that has to match the variable type defined. + Literal default = 2; + + //+optional, is this value required to be filled. + bool required = 3; + + // This is an execution time search basically that should result in exactly one Artifact with a Type that + // matches the type of the variable. + core.ArtifactQuery artifact_query = 4; + + core.ArtifactID artifact_id = 5; + } +} + +// A map of Parameters. +message ParameterMap { + // Defines a map of parameter names to parameters. + map parameters = 1; +} diff --git a/docs/api/flyteidl/flyteidl/core/literals.proto b/docs/api/flyteidl/flyteidl/core/literals.proto new file mode 100644 index 0000000000..66e4821867 --- /dev/null +++ b/docs/api/flyteidl/flyteidl/core/literals.proto @@ -0,0 +1,200 @@ +syntax = "proto3"; + +package flyteidl.core; + +option go_package = "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/core"; + +import "google/protobuf/timestamp.proto"; +import "google/protobuf/duration.proto"; +import "google/protobuf/struct.proto"; +import "flyteidl/core/types.proto"; + +// Primitive Types +message Primitive { + // Defines one of simple primitive types. These types will get translated into different programming languages as + // described in https://developers.google.com/protocol-buffers/docs/proto#scalar. + oneof value { + int64 integer = 1; + double float_value = 2; + string string_value = 3; + bool boolean = 4; + google.protobuf.Timestamp datetime = 5; + google.protobuf.Duration duration = 6; + } +} + +// Used to denote a nil/null/None assignment to a scalar value. The underlying LiteralType for Void is intentionally +// undefined since it can be assigned to a scalar of any LiteralType. +message Void { +} + +// Refers to an offloaded set of files. It encapsulates the type of the store and a unique uri for where the data is. +// There are no restrictions on how the uri is formatted since it will depend on how to interact with the store. +message Blob { + BlobMetadata metadata = 1; + string uri = 3; +} + +message BlobMetadata { + BlobType type = 1; +} + +// A simple byte array with a tag to help different parts of the system communicate about what is in the byte array. +// It's strongly advisable that consumers of this type define a unique tag and validate the tag before parsing the data. +message Binary { + bytes value = 1; // Serialized data (MessagePack) for supported types like Dataclass, Pydantic BaseModel, and untyped dict. + string tag = 2; // The serialization format identifier (e.g., MessagePack). Consumers must define unique tags and validate them before deserialization. +} + +// A strongly typed schema that defines the interface of data retrieved from the underlying storage medium. +message Schema { + string uri = 1; + SchemaType type = 3; +} + +// The runtime representation of a tagged union value. See `UnionType` for more details. +message Union { + Literal value = 1; + LiteralType type = 2; +} + +message StructuredDatasetMetadata { + // Bundle the type information along with the literal. + // This is here because StructuredDatasets can often be more defined at run time than at compile time. + // That is, at compile time you might only declare a task to return a pandas dataframe or a StructuredDataset, + // without any column information, but at run time, you might have that column information. + // flytekit python will copy this type information into the literal, from the type information, if not provided by + // the various plugins (encoders). + // Since this field is run time generated, it's not used for any type checking. + StructuredDatasetType structured_dataset_type = 1; +} + +message StructuredDataset { + // String location uniquely identifying where the data is. + // Should start with the storage location (e.g. s3://, gs://, bq://, etc.) + string uri = 1; + + StructuredDatasetMetadata metadata = 2; +} + +message Scalar { + oneof value { + Primitive primitive = 1; + Blob blob = 2; + Binary binary = 3; + Schema schema = 4; + Void none_type = 5; + Error error = 6; + google.protobuf.Struct generic = 7; + StructuredDataset structured_dataset = 8; + Union union = 9; + } +} + +// A simple value. This supports any level of nesting (e.g. array of array of array of Blobs) as well as simple primitives. +message Literal { + reserved 6, 7; + oneof value { + // A simple value. + Scalar scalar = 1; + + // A collection of literals to allow nesting. + LiteralCollection collection = 2; + + // A map of strings to literals. + LiteralMap map = 3; + + // Offloaded literal metadata + // When you deserialize the offloaded metadata, it would be of Literal and its type would be defined by LiteralType stored in offloaded_metadata. + LiteralOffloadedMetadata offloaded_metadata = 8; + } + + // A hash representing this literal. + // This is used for caching purposes. For more details refer to RFC 1893 + // (https://github.com/flyteorg/flyte/blob/master/rfc/system/1893-caching-of-offloaded-objects.md) + string hash = 4; + + // Additional metadata for literals. + map metadata = 5; +} + +// A message that contains the metadata of the offloaded data. +message LiteralOffloadedMetadata { + // The location of the offloaded core.Literal. + string uri = 1; + + // The size of the offloaded data. + uint64 size_bytes = 2; + + // The inferred literal type of the offloaded data. + LiteralType inferred_type = 3; +} + +// A collection of literals. This is a workaround since oneofs in proto messages cannot contain a repeated field. +message LiteralCollection { + repeated Literal literals = 1; +} + +// A map of literals. This is a workaround since oneofs in proto messages cannot contain a repeated field. +message LiteralMap { + map literals = 1; +} + +// A collection of BindingData items. +message BindingDataCollection { + repeated BindingData bindings = 1; +} + +// A map of BindingData items. +message BindingDataMap { + map bindings = 1; +} + +message UnionInfo { + LiteralType targetType = 1; +} + +// Specifies either a simple value or a reference to another output. +message BindingData { + oneof value { + // A simple scalar value. + Scalar scalar = 1; + + // A collection of binding data. This allows nesting of binding data to any number + // of levels. + BindingDataCollection collection = 2; + + // References an output promised by another node. + OutputReference promise = 3; + + // A map of bindings. The key is always a string. + BindingDataMap map = 4; + } + + UnionInfo union = 5; +} + +// An input/output binding of a variable to either static value or a node output. +message Binding { + // Variable name must match an input/output variable of the node. + string var = 1; + + // Data to use to bind this variable. + BindingData binding = 2; +} + +// A generic key value pair. +message KeyValuePair { + //required. + string key = 1; + + //+optional. + string value = 2; +} + +// Retry strategy associated with an executable unit. +message RetryStrategy { + // Number of retries. Retries will be consumed when the job fails with a recoverable error. + // The number of retries must be less than or equals to 10. + uint32 retries = 5; +} diff --git a/docs/api/flyteidl/flyteidl/core/metrics.proto b/docs/api/flyteidl/flyteidl/core/metrics.proto new file mode 100644 index 0000000000..5244ff4873 --- /dev/null +++ b/docs/api/flyteidl/flyteidl/core/metrics.proto @@ -0,0 +1,50 @@ +syntax = "proto3"; + +package flyteidl.core; + +option go_package = "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/core"; + +import "flyteidl/core/identifier.proto"; +import "google/protobuf/timestamp.proto"; +import "google/protobuf/struct.proto"; + +// Span represents a duration trace of Flyte execution. The id field denotes a Flyte execution entity or an operation +// which uniquely identifies the Span. The spans attribute allows this Span to be further broken down into more +// precise definitions. +message Span { + // start_time defines the instance this span began. + google.protobuf.Timestamp start_time = 1; + + // end_time defines the instance this span completed. + google.protobuf.Timestamp end_time = 2; + + oneof id { + // workflow_id is the id of the workflow execution this Span represents. + flyteidl.core.WorkflowExecutionIdentifier workflow_id = 3; + + // node_id is the id of the node execution this Span represents. + flyteidl.core.NodeExecutionIdentifier node_id = 4; + + // task_id is the id of the task execution this Span represents. + flyteidl.core.TaskExecutionIdentifier task_id = 5; + + // operation_id is the id of a unique operation that this Span represents. + string operation_id = 6; + } + + // spans defines a collection of Spans that breakdown this execution. + repeated Span spans = 7; +} + +// ExecutionMetrics is a collection of metrics that are collected during the execution of a Flyte task. +message ExecutionMetricResult { + // The metric this data represents. e.g. EXECUTION_METRIC_USED_CPU_AVG or EXECUTION_METRIC_USED_MEMORY_BYTES_AVG. + string metric = 1; + + // The result data in prometheus range query result format + // https://prometheus.io/docs/prometheus/latest/querying/api/#expression-query-result-formats. + // This may include multiple time series, differentiated by their metric labels. + // Start time is greater of (execution attempt start, 48h ago) + // End time is lesser of (execution attempt end, now) + google.protobuf.Struct data = 2; +} \ No newline at end of file diff --git a/docs/api/flyteidl/flyteidl/core/security.proto b/docs/api/flyteidl/flyteidl/core/security.proto new file mode 100644 index 0000000000..3aba017476 --- /dev/null +++ b/docs/api/flyteidl/flyteidl/core/security.proto @@ -0,0 +1,130 @@ +syntax = "proto3"; + +package flyteidl.core; + +option go_package = "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/core"; + +// Secret encapsulates information about the secret a task needs to proceed. An environment variable +// FLYTE_SECRETS_ENV_PREFIX will be passed to indicate the prefix of the environment variables that will be present if +// secrets are passed through environment variables. +// FLYTE_SECRETS_DEFAULT_DIR will be passed to indicate the prefix of the path where secrets will be mounted if secrets +// are passed through file mounts. +message Secret { + enum MountType { + // Default case, indicates the client can tolerate either mounting options. + ANY = 0; + + // ENV_VAR indicates the secret needs to be mounted as an environment variable. + ENV_VAR = 1; + + // FILE indicates the secret needs to be mounted as a file. + FILE = 2; + } + + // The name of the secret group where to find the key referenced below. For K8s secrets, this should be the name of + // the v1/secret object. For Confidant, this should be the Credential name. For Vault, this should be the secret name. + // For AWS Secret Manager, this should be the name of the secret. + // +required + string group = 1; + + // The group version to fetch. This is not supported in all secret management systems. It'll be ignored for the ones + // that do not support it. + // +optional + string group_version = 2; + + // The name of the secret to mount. This has to match an existing secret in the system. It's up to the implementation + // of the secret management system to require case sensitivity. For K8s secrets, Confidant and Vault, this should + // match one of the keys inside the secret. For AWS Secret Manager, it's ignored. + // +optional + string key = 3; + + // mount_requirement is optional. Indicates where the secret has to be mounted. If provided, the execution will fail + // if the underlying key management system cannot satisfy that requirement. If not provided, the default location + // will depend on the key management system. + // +optional + MountType mount_requirement = 4; +} + +// OAuth2Client encapsulates OAuth2 Client Credentials to be used when making calls on behalf of that task. +message OAuth2Client { + // client_id is the public id for the client to use. The system will not perform any pre-auth validation that the + // secret requested matches the client_id indicated here. + // +required + string client_id = 1; + + // client_secret is a reference to the secret used to authenticate the OAuth2 client. + // +required + Secret client_secret = 2; +} + +// Identity encapsulates the various security identities a task can run as. It's up to the underlying plugin to pick the +// right identity for the execution environment. +message Identity { + // iam_role references the fully qualified name of Identity & Access Management role to impersonate. + string iam_role = 1; + + // k8s_service_account references a kubernetes service account to impersonate. + string k8s_service_account = 2; + + // oauth2_client references an oauth2 client. Backend plugins can use this information to impersonate the client when + // making external calls. + OAuth2Client oauth2_client = 3; + + // execution_identity references the subject who makes the execution + string execution_identity = 4; +} + +// OAuth2TokenRequest encapsulates information needed to request an OAuth2 token. +// FLYTE_TOKENS_ENV_PREFIX will be passed to indicate the prefix of the environment variables that will be present if +// tokens are passed through environment variables. +// FLYTE_TOKENS_PATH_PREFIX will be passed to indicate the prefix of the path where secrets will be mounted if tokens +// are passed through file mounts. +message OAuth2TokenRequest { + // Type of the token requested. + enum Type { + // CLIENT_CREDENTIALS indicates a 2-legged OAuth token requested using client credentials. + CLIENT_CREDENTIALS = 0; + } + + // name indicates a unique id for the token request within this task token requests. It'll be used as a suffix for + // environment variables and as a filename for mounting tokens as files. + // +required + string name = 1; + + // type indicates the type of the request to make. Defaults to CLIENT_CREDENTIALS. + // +required + Type type = 2; + + // client references the client_id/secret to use to request the OAuth2 token. + // +required + OAuth2Client client = 3; + + // idp_discovery_endpoint references the discovery endpoint used to retrieve token endpoint and other related + // information. + // +optional + string idp_discovery_endpoint = 4; + + // token_endpoint references the token issuance endpoint. If idp_discovery_endpoint is not provided, this parameter is + // mandatory. + // +optional + string token_endpoint = 5; +} + +// SecurityContext holds security attributes that apply to tasks. +message SecurityContext { + // run_as encapsulates the identity a pod should run as. If the task fills in multiple fields here, it'll be up to the + // backend plugin to choose the appropriate identity for the execution engine the task will run on. + Identity run_as = 1; + + // secrets indicate the list of secrets the task needs in order to proceed. Secrets will be mounted/passed to the + // pod as it starts. If the plugin responsible for kicking of the task will not run it on a flyte cluster (e.g. AWS + // Batch), it's the responsibility of the plugin to fetch the secret (which means propeller identity will need access + // to the secret) and to pass it to the remote execution engine. + repeated Secret secrets = 2; + + // tokens indicate the list of token requests the task needs in order to proceed. Tokens will be mounted/passed to the + // pod as it starts. If the plugin responsible for kicking of the task will not run it on a flyte cluster (e.g. AWS + // Batch), it's the responsibility of the plugin to fetch the secret (which means propeller identity will need access + // to the secret) and to pass it to the remote execution engine. + repeated OAuth2TokenRequest tokens = 3; +} diff --git a/docs/api/flyteidl/flyteidl/core/tasks.proto b/docs/api/flyteidl/flyteidl/core/tasks.proto new file mode 100644 index 0000000000..20a1fa0cbf --- /dev/null +++ b/docs/api/flyteidl/flyteidl/core/tasks.proto @@ -0,0 +1,351 @@ +syntax = "proto3"; + +import "flyteidl/core/identifier.proto"; +import "flyteidl/core/interface.proto"; +import "flyteidl/core/literals.proto"; +import "flyteidl/core/security.proto"; +import "google/protobuf/duration.proto"; +import "google/protobuf/struct.proto"; + +package flyteidl.core; + +option go_package = "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/core"; + +// A customizable interface to convey resources requested for a container. This can be interpreted differently for different +// container engines. +message Resources { + // Known resource names. + enum ResourceName { + UNKNOWN = 0; + CPU = 1; + GPU = 2; + MEMORY = 3; + STORAGE = 4; + // For Kubernetes-based deployments, pods use ephemeral local storage for scratch space, caching, and for logs. + EPHEMERAL_STORAGE = 5; + } + + // Encapsulates a resource name and value. + message ResourceEntry { + // Resource name. + ResourceName name = 1; + + // Value must be a valid k8s quantity. See + // https://github.com/kubernetes/apimachinery/blob/master/pkg/api/resource/quantity.go#L30-L80 + string value = 2; + } + + // The desired set of resources requested. ResourceNames must be unique within the list. + repeated ResourceEntry requests = 1; + + // Defines a set of bounds (e.g. min/max) within which the task can reliably run. ResourceNames must be unique + // within the list. + repeated ResourceEntry limits = 2; +} + +// Metadata associated with the GPU accelerator to allocate to a task. Contains +// information about device type, and for multi-instance GPUs, the partition size to +// use. +message GPUAccelerator { + // This can be any arbitrary string, and should be informed by the labels or taints + // associated with the nodes in question. Default cloud provider labels typically + // use the following values: `nvidia-tesla-t4`, `nvidia-tesla-a100`, etc. + string device = 1; + oneof partition_size_value { + bool unpartitioned = 2; + // Like `device`, this can be any arbitrary string, and should be informed by + // the labels or taints associated with the nodes in question. Default cloud + // provider labels typically use the following values: `1g.5gb`, `2g.10gb`, etc. + string partition_size = 3; + } +} + +// Encapsulates all non-standard resources, not captured by v1.ResourceRequirements, to +// allocate to a task. +message ExtendedResources { + // GPU accelerator to select for task. Contains information about device type, and + // for multi-instance GPUs, the partition size to use. + GPUAccelerator gpu_accelerator = 1; +} + +// Runtime information. This is loosely defined to allow for extensibility. +message RuntimeMetadata { + enum RuntimeType { + OTHER = 0; + FLYTE_SDK = 1; + } + + // Type of runtime. + RuntimeType type = 1; + + // Version of the runtime. All versions should be backward compatible. However, certain cases call for version + // checks to ensure tighter validation or setting expectations. + string version = 2; + + //+optional It can be used to provide extra information about the runtime (e.g. python, golang... etc.). + string flavor = 3; +} + +// Task Metadata +message TaskMetadata { + // Indicates whether the system should attempt to lookup this task's output to avoid duplication of work. + bool discoverable = 1; + + // Runtime information about the task. + RuntimeMetadata runtime = 2; + + // The overall timeout of a task including user-triggered retries. + google.protobuf.Duration timeout = 4; + + // Number of retries per task. + RetryStrategy retries = 5; + + // Indicates a logical version to apply to this task for the purpose of discovery. + string discovery_version = 6; + + // If set, this indicates that this task is deprecated. This will enable owners of tasks to notify consumers + // of the ending of support for a given task. + string deprecated_error_message = 7; + + // For interruptible we will populate it at the node level but require it be part of TaskMetadata + // for a user to set the value. + // We are using oneof instead of bool because otherwise we would be unable to distinguish between value being + // set by the user or defaulting to false. + // The logic of handling precedence will be done as part of flytepropeller. + + // Identify whether task is interruptible + oneof interruptible_value { + bool interruptible = 8; + }; + + // Indicates whether the system should attempt to execute discoverable instances in serial to avoid duplicate work + bool cache_serializable = 9; + + // Indicates whether the task will generate a Deck URI when it finishes executing. + bool generates_deck = 10; + + // Arbitrary tags that allow users and the platform to store small but arbitrary labels + map tags = 11; + + // pod_template_name is the unique name of a PodTemplate k8s resource to be used as the base configuration if this + // task creates a k8s Pod. If this value is set, the specified PodTemplate will be used instead of, but applied + // identically as, the default PodTemplate configured in FlytePropeller. + string pod_template_name = 12; + + // cache_ignore_input_vars is the input variables that should not be included when calculating hash for cache. + repeated string cache_ignore_input_vars = 13; +} + +// A Task structure that uniquely identifies a task in the system +// Tasks are registered as a first step in the system. +message TaskTemplate { + // Auto generated taskId by the system. Task Id uniquely identifies this task globally. + Identifier id = 1; + + // A predefined yet extensible Task type identifier. This can be used to customize any of the components. If no + // extensions are provided in the system, Flyte will resolve the this task to its TaskCategory and default the + // implementation registered for the TaskCategory. + string type = 2; + + // Extra metadata about the task. + TaskMetadata metadata = 3; + + // A strongly typed interface for the task. This enables others to use this task within a workflow and guarantees + // compile-time validation of the workflow to avoid costly runtime failures. + TypedInterface interface = 4; + + // Custom data about the task. This is extensible to allow various plugins in the system. + google.protobuf.Struct custom = 5; + + // Known target types that the system will guarantee plugins for. Custom SDK plugins are allowed to set these if needed. + // If no corresponding execution-layer plugins are found, the system will default to handling these using built-in + // handlers. + oneof target { + Container container = 6; + K8sPod k8s_pod = 17; + Sql sql = 18; + } + + // This can be used to customize task handling at execution time for the same task type. + int32 task_type_version = 7; + + // security_context encapsulates security attributes requested to run this task. + SecurityContext security_context = 8; + + // Encapsulates all non-standard resources, not captured by + // v1.ResourceRequirements, to allocate to a task. + ExtendedResources extended_resources = 9; + + // Metadata about the custom defined for this task. This is extensible to allow various plugins in the system + // to use as required. + // reserve the field numbers 1 through 15 for very frequently occurring message elements + map config = 16; +} + +// ----------------- First class Plugins + +// Defines port properties for a container. +message ContainerPort { + // Number of port to expose on the pod's IP address. + // This must be a valid port number, 0 < x < 65536. + uint32 container_port = 1; +} + +message Container { + // Container image url. Eg: docker/redis:latest + string image = 1; + + // Command to be executed, if not provided, the default entrypoint in the container image will be used. + repeated string command = 2; + + // These will default to Flyte given paths. If provided, the system will not append known paths. If the task still + // needs flyte's inputs and outputs path, add $(FLYTE_INPUT_FILE), $(FLYTE_OUTPUT_FILE) wherever makes sense and the + // system will populate these before executing the container. + repeated string args = 3; + + // Container resources requirement as specified by the container engine. + Resources resources = 4; + + // Environment variables will be set as the container is starting up. + repeated KeyValuePair env = 5; + + // Allows extra configs to be available for the container. + // TODO: elaborate on how configs will become available. + // Deprecated, please use TaskTemplate.config instead. + repeated KeyValuePair config = 6 [deprecated = true]; + + // Ports to open in the container. This feature is not supported by all execution engines. (e.g. supported on K8s but + // not supported on AWS Batch) + // Only K8s + repeated ContainerPort ports = 7; + + // BETA: Optional configuration for DataLoading. If not specified, then default values are used. + // This makes it possible to to run a completely portable container, that uses inputs and outputs + // only from the local file-system and without having any reference to flyteidl. This is supported only on K8s at the moment. + // If data loading is enabled, then data will be mounted in accompanying directories specified in the DataLoadingConfig. If the directories + // are not specified, inputs will be mounted onto and outputs will be uploaded from a pre-determined file-system path. Refer to the documentation + // to understand the default paths. + // Only K8s + DataLoadingConfig data_config = 9; + + // Architecture-type the container image supports. + enum Architecture { + UNKNOWN = 0; + AMD64 = 1; + ARM64 = 2; + ARM_V6 = 3; + ARM_V7 = 4; + } + Architecture architecture = 10; +} + +// Strategy to use when dealing with Blob, Schema, or multipart blob data (large datasets) +message IOStrategy { + // Mode to use for downloading + enum DownloadMode { + // All data will be downloaded before the main container is executed + DOWNLOAD_EAGER = 0; + // Data will be downloaded as a stream and an End-Of-Stream marker will be written to indicate all data has been downloaded. Refer to protocol for details + DOWNLOAD_STREAM = 1; + // Large objects (offloaded) will not be downloaded + DO_NOT_DOWNLOAD = 2; + } + // Mode to use for uploading + enum UploadMode { + // All data will be uploaded after the main container exits + UPLOAD_ON_EXIT = 0; + // Data will be uploaded as it appears. Refer to protocol specification for details + UPLOAD_EAGER = 1; + // Data will not be uploaded, only references will be written + DO_NOT_UPLOAD = 2; + } + // Mode to use to manage downloads + DownloadMode download_mode = 1; + // Mode to use to manage uploads + UploadMode upload_mode = 2; +} + +// This configuration allows executing raw containers in Flyte using the Flyte CoPilot system. +// Flyte CoPilot, eliminates the needs of flytekit or sdk inside the container. Any inputs required by the users container are side-loaded in the input_path +// Any outputs generated by the user container - within output_path are automatically uploaded. +message DataLoadingConfig { + // LiteralMapFormat decides the encoding format in which the input metadata should be made available to the containers. + // If the user has access to the protocol buffer definitions, it is recommended to use the PROTO format. + // JSON and YAML do not need any protobuf definitions to read it + // All remote references in core.LiteralMap are replaced with local filesystem references (the data is downloaded to local filesystem) + enum LiteralMapFormat { + // JSON / YAML for the metadata (which contains inlined primitive values). The representation is inline with the standard json specification as specified - https://www.json.org/json-en.html + JSON = 0; + YAML = 1; + // Proto is a serialized binary of `core.LiteralMap` defined in flyteidl/core + PROTO = 2; + } + // Flag enables DataLoading Config. If this is not set, data loading will not be used! + bool enabled = 1; + // File system path (start at root). This folder will contain all the inputs exploded to a separate file. + // Example, if the input interface needs (x: int, y: blob, z: multipart_blob) and the input path is '/var/flyte/inputs', then the file system will look like + // /var/flyte/inputs/inputs. .pb .json .yaml> -> Format as defined previously. The Blob and Multipart blob will reference local filesystem instead of remote locations + // /var/flyte/inputs/x -> X is a file that contains the value of x (integer) in string format + // /var/flyte/inputs/y -> Y is a file in Binary format + // /var/flyte/inputs/z/... -> Note Z itself is a directory + // More information about the protocol - refer to docs #TODO reference docs here + string input_path = 2; + // File system path (start at root). This folder should contain all the outputs for the task as individual files and/or an error text file + string output_path = 3; + // In the inputs folder, there will be an additional summary/metadata file that contains references to all files or inlined primitive values. + // This format decides the actual encoding for the data. Refer to the encoding to understand the specifics of the contents and the encoding + LiteralMapFormat format = 4; + IOStrategy io_strategy = 5; +} + +// Defines a pod spec and additional pod metadata that is created when a task is executed. +message K8sPod { + // Contains additional metadata for building a kubernetes pod. + K8sObjectMetadata metadata = 1; + + // Defines the primary pod spec created when a task is executed. + // This should be a JSON-marshalled pod spec, which can be defined in + // - go, using: https://github.com/kubernetes/api/blob/release-1.21/core/v1/types.go#L2936 + // - python: using https://github.com/kubernetes-client/python/blob/release-19.0/kubernetes/client/models/v1_pod_spec.py + google.protobuf.Struct pod_spec = 2; + + // BETA: Optional configuration for DataLoading. If not specified, then default values are used. + // This makes it possible to to run a completely portable container, that uses inputs and outputs + // only from the local file-system and without having any reference to flytekit. This is supported only on K8s at the moment. + // If data loading is enabled, then data will be mounted in accompanying directories specified in the DataLoadingConfig. If the directories + // are not specified, inputs will be mounted onto and outputs will be uploaded from a pre-determined file-system path. Refer to the documentation + // to understand the default paths. + // Only K8s + DataLoadingConfig data_config = 3; +} + +// Metadata for building a kubernetes object when a task is executed. +message K8sObjectMetadata { + // Optional labels to add to the pod definition. + map labels = 1; + + // Optional annotations to add to the pod definition. + map annotations = 2; +} + +// Sql represents a generic sql workload with a statement and dialect. +message Sql { + // The actual query to run, the query can have templated parameters. + // We use Flyte's Golang templating format for Query templating. + // For example, + // insert overwrite directory '{{ .rawOutputDataPrefix }}' stored as parquet + // select * + // from my_table + // where ds = '{{ .Inputs.ds }}' + string statement = 1; + // The dialect of the SQL statement. This is used to validate and parse SQL statements at compilation time to avoid + // expensive runtime operations. If set to an unsupported dialect, no validation will be done on the statement. + // We support the following dialect: ansi, hive. + enum Dialect { + UNDEFINED = 0; + ANSI = 1; + HIVE = 2; + OTHER = 3; + } + Dialect dialect = 2; +} diff --git a/docs/api/flyteidl/flyteidl/core/types.proto b/docs/api/flyteidl/flyteidl/core/types.proto new file mode 100644 index 0000000000..3580eea9f0 --- /dev/null +++ b/docs/api/flyteidl/flyteidl/core/types.proto @@ -0,0 +1,208 @@ +syntax = "proto3"; + +package flyteidl.core; + +option go_package = "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/core"; + +import "google/protobuf/struct.proto"; + +// Define a set of simple types. +enum SimpleType { + NONE = 0; + INTEGER = 1; + FLOAT = 2; + STRING = 3; + BOOLEAN = 4; + DATETIME = 5; + DURATION = 6; + BINARY = 7; + ERROR = 8; + STRUCT = 9; +} + +// Defines schema columns and types to strongly type-validate schemas interoperability. +message SchemaType { + message SchemaColumn { + // A unique name -within the schema type- for the column + string name = 1; + + enum SchemaColumnType { + INTEGER = 0; + FLOAT = 1; + STRING = 2; + BOOLEAN = 3; + DATETIME = 4; + DURATION = 5; + } + + // The column type. This allows a limited set of types currently. + SchemaColumnType type = 2; + } + + // A list of ordered columns this schema comprises of. + repeated SchemaColumn columns = 3; +} + +message StructuredDatasetType { + message DatasetColumn { + // A unique name within the schema type for the column. + string name = 1; + + // The column type. + LiteralType literal_type = 2; + } + + // A list of ordered columns this schema comprises of. + repeated DatasetColumn columns = 1; + + // This is the storage format, the format of the bits at rest + // parquet, feather, csv, etc. + // For two types to be compatible, the format will need to be an exact match. + string format = 2; + + // This is a string representing the type that the bytes in external_schema_bytes are formatted in. + // This is an optional field that will not be used for type checking. + string external_schema_type = 3; + + // The serialized bytes of a third-party schema library like Arrow. + // This is an optional field that will not be used for type checking. + bytes external_schema_bytes = 4; +} + +// Defines type behavior for blob objects +message BlobType { + enum BlobDimensionality { + SINGLE = 0; + MULTIPART = 1; + } + + // Format can be a free form string understood by SDK/UI etc like + // csv, parquet etc + string format = 1; + BlobDimensionality dimensionality = 2; +} + +// Enables declaring enum types, with predefined string values +// For len(values) > 0, the first value in the ordered list is regarded as the default value. If you wish +// To provide no defaults, make the first value as undefined. +message EnumType { + // Predefined set of enum values. + repeated string values = 1; +} + +// Defines a tagged union type, also known as a variant (and formally as the sum type). +// +// A sum type S is defined by a sequence of types (A, B, C, ...), each tagged by a string tag +// A value of type S is constructed from a value of any of the variant types. The specific choice of type is recorded by +// storing the varaint's tag with the literal value and can be examined in runtime. +// +// Type S is typically written as +// S := Apple A | Banana B | Cantaloupe C | ... +// +// Notably, a nullable (optional) type is a sum type between some type X and the singleton type representing a null-value: +// Optional X := X | Null +// +// See also: https://en.wikipedia.org/wiki/Tagged_union +message UnionType { + // Predefined set of variants in union. + repeated LiteralType variants = 1; +} + +// Hints to improve type matching +// e.g. allows distinguishing output from custom type transformers +// even if the underlying IDL serialization matches. +message TypeStructure { + // Must exactly match for types to be castable + string tag = 1; + // dataclass_type only exists for dataclasses. + // This is used to resolve the type of the fields of dataclass + // The key is the field name, and the value is the literal type of the field + // e.g. For dataclass Foo, with fields a, and a is a string + // Foo.a will be resolved as a literal type of string from dataclass_type + map dataclass_type = 2; +} + +// TypeAnnotation encapsulates registration time information about a type. This can be used for various control-plane operations. TypeAnnotation will not be available at runtime when a task runs. +message TypeAnnotation { + // A arbitrary JSON payload to describe a type. + google.protobuf.Struct annotations = 1; +} + +// Defines a strong type to allow type checking between interfaces. +message LiteralType { + oneof type { + // A simple type that can be compared one-to-one with another. + SimpleType simple = 1; + + // A complex type that requires matching of inner fields. + SchemaType schema = 2; + + // Defines the type of the value of a collection. Only homogeneous collections are allowed. + LiteralType collection_type = 3; + + // Defines the type of the value of a map type. The type of the key is always a string. + LiteralType map_value_type = 4; + + // A blob might have specialized implementation details depending on associated metadata. + BlobType blob = 5; + + // Defines an enum with pre-defined string values. + EnumType enum_type = 7; + + // Generalized schema support + StructuredDatasetType structured_dataset_type = 8; + + // Defines an union type with pre-defined LiteralTypes. + UnionType union_type = 10; + } + + // This field contains type metadata that is descriptive of the type, but is NOT considered in type-checking. This might be used by + // consumers to identify special behavior or display extended information for the type. + google.protobuf.Struct metadata = 6; + + // This field contains arbitrary data that might have special semantic + // meaning for the client but does not effect internal flyte behavior. + TypeAnnotation annotation = 9; + + // Hints to improve type matching. + TypeStructure structure = 11; +} + +// A reference to an output produced by a node. The type can be retrieved -and validated- from +// the underlying interface of the node. +message OutputReference { + // Node id must exist at the graph layer. + string node_id = 1; + + // Variable name must refer to an output variable for the node. + string var = 2; + + repeated PromiseAttribute attr_path = 3; +} + +// PromiseAttribute stores the attribute path of a promise, which will be resolved at runtime. +// The attribute path is a list of strings and integers. +// In the following example, +// ``` +// @workflow +// def wf(): +// o = t1() +// t2(o.a["b"][0]) +// ``` +// the output reference t2 binds to has a list of PromiseAttribute ["a", "b", 0] + +message PromiseAttribute { + oneof value { + string string_value = 1; + int32 int_value = 2; + } +} + +// Represents an error thrown from a node. +message Error { + // The node id that threw the error. + string failed_node_id = 1; + + // Error message thrown. + string message = 2; +} diff --git a/docs/api/flyteidl/flyteidl/core/workflow.proto b/docs/api/flyteidl/flyteidl/core/workflow.proto new file mode 100644 index 0000000000..3df4b2422f --- /dev/null +++ b/docs/api/flyteidl/flyteidl/core/workflow.proto @@ -0,0 +1,331 @@ +syntax = "proto3"; + +package flyteidl.core; + +option go_package = "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/core"; + +import "flyteidl/core/condition.proto"; +import "flyteidl/core/execution.proto"; +import "flyteidl/core/identifier.proto"; +import "flyteidl/core/interface.proto"; +import "flyteidl/core/literals.proto"; +import "flyteidl/core/tasks.proto"; +import "flyteidl/core/types.proto"; +import "flyteidl/core/security.proto"; +import "google/protobuf/duration.proto"; +import "google/protobuf/wrappers.proto"; + +// Defines a condition and the execution unit that should be executed if the condition is satisfied. +message IfBlock { + core.BooleanExpression condition = 1; + Node then_node = 2; +} + +// Defines a series of if/else blocks. The first branch whose condition evaluates to true is the one to execute. +// If no conditions were satisfied, the else_node or the error will execute. +message IfElseBlock { + //+required. First condition to evaluate. + IfBlock case = 1; + + //+optional. Additional branches to evaluate. + repeated IfBlock other = 2; + + //+required. + oneof default { + // The node to execute in case none of the branches were taken. + Node else_node = 3; + + // An error to throw in case none of the branches were taken. + Error error = 4; + } +} + +// BranchNode is a special node that alter the flow of the workflow graph. It allows the control flow to branch at +// runtime based on a series of conditions that get evaluated on various parameters (e.g. inputs, primitives). +message BranchNode { + //+required + IfElseBlock if_else = 1; +} + +// Refers to the task that the Node is to execute. +message TaskNode { + oneof reference { + // A globally unique identifier for the task. + Identifier reference_id = 1; + } + + // Optional overrides applied at task execution time. + TaskNodeOverrides overrides = 2; +} + +// Refers to a the workflow the node is to execute. +message WorkflowNode { + oneof reference { + // A globally unique identifier for the launch plan. + Identifier launchplan_ref = 1; + + // Reference to a subworkflow, that should be defined with the compiler context + Identifier sub_workflow_ref = 2; + } +} + +// ApproveCondition represents a dependency on an external approval. During execution, this will manifest as a boolean +// signal with the provided signal_id. +message ApproveCondition { + // A unique identifier for the requested boolean signal. + string signal_id = 1; +} + +// SignalCondition represents a dependency on an signal. +message SignalCondition { + // A unique identifier for the requested signal. + string signal_id = 1; + + // A type denoting the required value type for this signal. + LiteralType type = 2; + + // The variable name for the signal value in this nodes outputs. + string output_variable_name = 3; +} + +// SleepCondition represents a dependency on waiting for the specified duration. +message SleepCondition { + // The overall duration for this sleep. + google.protobuf.Duration duration = 1; +} + +// GateNode refers to the condition that is required for the gate to successfully complete. +message GateNode { + oneof condition { + // ApproveCondition represents a dependency on an external approval provided by a boolean signal. + ApproveCondition approve = 1; + + // SignalCondition represents a dependency on an signal. + SignalCondition signal = 2; + + // SleepCondition represents a dependency on waiting for the specified duration. + SleepCondition sleep = 3; + } +} + +// ArrayNode is a Flyte node type that simplifies the execution of a sub-node over a list of input +// values. An ArrayNode can be executed with configurable parallelism (separate from the parent +// workflow) and can be configured to succeed when a certain number of sub-nodes succeed. +message ArrayNode { + // node is the sub-node that will be executed for each element in the array. + Node node = 1; + + oneof parallelism_option { + // parallelism defines the minimum number of instances to bring up concurrently at any given + // point. Note that this is an optimistic restriction and that, due to network partitioning or + // other failures, the actual number of currently running instances might be more. This has to + // be a positive number if assigned. Default value is size. + uint32 parallelism = 2; + } + + oneof success_criteria { + // min_successes is an absolute number of the minimum number of successful completions of + // sub-nodes. As soon as this criteria is met, the ArrayNode will be marked as successful + // and outputs will be computed. This has to be a non-negative number if assigned. Default + // value is size (if specified). + uint32 min_successes = 3; + + // If the array job size is not known beforehand, the min_success_ratio can instead be used + // to determine when an ArrayNode can be marked successful. + float min_success_ratio = 4; + } + + enum ExecutionMode { + // Indicates the ArrayNode will store minimal state for the sub-nodes. + // This is more efficient, but only supports a subset of Flyte entities. + MINIMAL_STATE = 0; + + // Indicates the ArrayNode will store full state for the sub-nodes. + // This supports a wider range of Flyte entities. + FULL_STATE = 1; + } + + // execution_mode determines the execution path for ArrayNode. + ExecutionMode execution_mode = 5; +} + +// Defines extra information about the Node. +message NodeMetadata { + // A friendly name for the Node + string name = 1; + + // The overall timeout of a task. + google.protobuf.Duration timeout = 4; + + // Number of retries per task. + RetryStrategy retries = 5; + + // Identify whether node is interruptible + oneof interruptible_value { + bool interruptible = 6; + }; + + // Identify whether a node should have it's outputs cached. + oneof cacheable_value { + bool cacheable = 7; + } + + // The version of the cache to use. + oneof cache_version_value { + string cache_version = 8; + } + + // Identify whether caching operations involving this node should be serialized. + oneof cache_serializable_value { + bool cache_serializable = 9; + } +} + +// Links a variable to an alias. +message Alias { + // Must match one of the output variable names on a node. + string var = 1; + + // A workflow-level unique alias that downstream nodes can refer to in their input. + string alias = 2; +} + +// A Workflow graph Node. One unit of execution in the graph. Each node can be linked to a Task, a Workflow or a branch +// node. +message Node { + // A workflow-level unique identifier that identifies this node in the workflow. 'inputs' and 'outputs' are reserved + // node ids that cannot be used by other nodes. + string id = 1; + + // Extra metadata about the node. + NodeMetadata metadata = 2; + + // Specifies how to bind the underlying interface's inputs. All required inputs specified in the underlying interface + // must be fulfilled. + repeated Binding inputs = 3; + + //+optional Specifies execution dependency for this node ensuring it will only get scheduled to run after all its + // upstream nodes have completed. This node will have an implicit dependency on any node that appears in inputs + // field. + repeated string upstream_node_ids = 4; + + //+optional. A node can define aliases for a subset of its outputs. This is particularly useful if different nodes + // need to conform to the same interface (e.g. all branches in a branch node). Downstream nodes must refer to this + // nodes outputs using the alias if one's specified. + repeated Alias output_aliases = 5; + + // Information about the target to execute in this node. + oneof target { + // Information about the Task to execute in this node. + TaskNode task_node = 6; + + // Information about the Workflow to execute in this mode. + WorkflowNode workflow_node = 7; + + // Information about the branch node to evaluate in this node. + BranchNode branch_node = 8; + + // Information about the condition to evaluate in this node. + GateNode gate_node = 9; + + // Information about the sub-node executions for each value in the list of this nodes + // inputs values. + ArrayNode array_node = 10; + } +} + +// This is workflow layer metadata. These settings are only applicable to the workflow as a whole, and do not +// percolate down to child entities (like tasks) launched by the workflow. +message WorkflowMetadata { + // Indicates the runtime priority of workflow executions. + QualityOfService quality_of_service = 1; + + // Failure Handling Strategy + enum OnFailurePolicy { + // FAIL_IMMEDIATELY instructs the system to fail as soon as a node fails in the workflow. It'll automatically + // abort all currently running nodes and clean up resources before finally marking the workflow executions as + // failed. + FAIL_IMMEDIATELY = 0; + + // FAIL_AFTER_EXECUTABLE_NODES_COMPLETE instructs the system to make as much progress as it can. The system will + // not alter the dependencies of the execution graph so any node that depend on the failed node will not be run. + // Other nodes that will be executed to completion before cleaning up resources and marking the workflow + // execution as failed. + FAIL_AFTER_EXECUTABLE_NODES_COMPLETE = 1; + } + + // Defines how the system should behave when a failure is detected in the workflow execution. + OnFailurePolicy on_failure = 2; + + // Arbitrary tags that allow users and the platform to store small but arbitrary labels + map tags = 3; +} + +// The difference between these settings and the WorkflowMetadata ones is that these are meant to be passed down to +// a workflow's underlying entities (like tasks). For instance, 'interruptible' has no meaning at the workflow layer, it +// is only relevant when a task executes. The settings here are the defaults that are passed to all nodes +// unless explicitly overridden at the node layer. +// If you are adding a setting that applies to both the Workflow itself, and everything underneath it, it should be +// added to both this object and the WorkflowMetadata object above. +message WorkflowMetadataDefaults { + // Whether child nodes of the workflow are interruptible. + bool interruptible = 1; +} + +// Flyte Workflow Structure that encapsulates task, branch and subworkflow nodes to form a statically analyzable, +// directed acyclic graph. +message WorkflowTemplate { + // A globally unique identifier for the workflow. + Identifier id = 1; + + // Extra metadata about the workflow. + WorkflowMetadata metadata = 2; + + // Defines a strongly typed interface for the Workflow. This can include some optional parameters. + TypedInterface interface = 3; + + // A list of nodes. In addition, 'globals' is a special reserved node id that can be used to consume workflow inputs. + repeated Node nodes = 4; + + // A list of output bindings that specify how to construct workflow outputs. Bindings can pull node outputs or + // specify literals. All workflow outputs specified in the interface field must be bound in order for the workflow + // to be validated. A workflow has an implicit dependency on all of its nodes to execute successfully in order to + // bind final outputs. + // Most of these outputs will be Binding's with a BindingData of type OutputReference. That is, your workflow can + // just have an output of some constant (`Output(5)`), but usually, the workflow will be pulling + // outputs from the output of a task. + repeated Binding outputs = 5; + + //+optional A catch-all node. This node is executed whenever the execution engine determines the workflow has failed. + // The interface of this node must match the Workflow interface with an additional input named 'error' of type + // pb.lyft.flyte.core.Error. + Node failure_node = 6; + + // workflow defaults + WorkflowMetadataDefaults metadata_defaults = 7; +} + +// Optional task node overrides that will be applied at task execution time. +message TaskNodeOverrides { + // A customizable interface to convey resources requested for a task container. + Resources resources = 1; + + // Overrides for all non-standard resources, not captured by + // v1.ResourceRequirements, to allocate to a task. + ExtendedResources extended_resources = 2; + + // Override for the image used by task pods. + string container_image = 3; +} + +// A structure that uniquely identifies a launch plan in the system. +message LaunchPlanTemplate { + // A globally unique identifier for the launch plan. + Identifier id = 1; + + // The input and output interface for the launch plan + TypedInterface interface = 2; + + // A collection of input literals that are fixed for the launch plan + LiteralMap fixed_inputs = 3; +} diff --git a/docs/api/flyteidl/flyteidl/core/workflow_closure.proto b/docs/api/flyteidl/flyteidl/core/workflow_closure.proto new file mode 100644 index 0000000000..c8ee990036 --- /dev/null +++ b/docs/api/flyteidl/flyteidl/core/workflow_closure.proto @@ -0,0 +1,18 @@ +syntax = "proto3"; + +package flyteidl.core; + +option go_package = "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/core"; + +import "flyteidl/core/workflow.proto"; +import "flyteidl/core/tasks.proto"; + +// Defines an enclosed package of workflow and tasks it references. +message WorkflowClosure { + //required. Workflow template. + WorkflowTemplate workflow = 1; + + //optional. A collection of tasks referenced by the workflow. Only needed if the workflow + // references tasks. + repeated TaskTemplate tasks = 2; +} diff --git a/docs/api/flyteidl/flyteidl/datacatalog/datacatalog.proto b/docs/api/flyteidl/flyteidl/datacatalog/datacatalog.proto new file mode 100644 index 0000000000..e296603113 --- /dev/null +++ b/docs/api/flyteidl/flyteidl/datacatalog/datacatalog.proto @@ -0,0 +1,420 @@ +syntax = "proto3"; + +package datacatalog; + +import "flyteidl/core/literals.proto"; +import "google/protobuf/duration.proto"; +import "google/protobuf/timestamp.proto"; + +option go_package = "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/datacatalog"; + +/* + * Data Catalog service definition + * Data Catalog is a service for indexing parameterized, strongly-typed data artifacts across revisions. + * Artifacts are associated with a Dataset, and can be tagged for retrieval. + */ +service DataCatalog { + // Create a new Dataset. Datasets are unique based on the DatasetID. Datasets are logical groupings of artifacts. + // Each dataset can have one or more artifacts + rpc CreateDataset (CreateDatasetRequest) returns (CreateDatasetResponse); + + // Get a Dataset by the DatasetID. This returns the Dataset with the associated metadata. + rpc GetDataset (GetDatasetRequest) returns (GetDatasetResponse); + + // Create an artifact and the artifact data associated with it. An artifact can be a hive partition or arbitrary + // files or data values + rpc CreateArtifact (CreateArtifactRequest) returns (CreateArtifactResponse); + + // Retrieve an artifact by an identifying handle. This returns an artifact along with the artifact data. + rpc GetArtifact (GetArtifactRequest) returns (GetArtifactResponse); + + // Associate a tag with an artifact. Tags are unique within a Dataset. + rpc AddTag (AddTagRequest) returns (AddTagResponse); + + // Return a paginated list of artifacts + rpc ListArtifacts (ListArtifactsRequest) returns (ListArtifactsResponse); + + // Return a paginated list of datasets + rpc ListDatasets (ListDatasetsRequest) returns (ListDatasetsResponse); + + // Updates an existing artifact, overwriting the stored artifact data in the underlying blob storage. + rpc UpdateArtifact (UpdateArtifactRequest) returns (UpdateArtifactResponse); + + // Attempts to get or extend a reservation for the corresponding artifact. If one already exists + // (ie. another entity owns the reservation) then that reservation is retrieved. + // Once you acquire a reservation, you need to periodically extend the reservation with an + // identical call. If the reservation is not extended before the defined expiration, it may be + // acquired by another task. + // Note: We may have multiple concurrent tasks with the same signature and the same input that + // try to populate the same artifact at the same time. Thus with reservation, only one task can + // run at a time, until the reservation expires. + // Note: If task A does not extend the reservation in time and the reservation expires, another + // task B may take over the reservation, resulting in two tasks A and B running in parallel. So + // a third task C may get the Artifact from A or B, whichever writes last. + rpc GetOrExtendReservation (GetOrExtendReservationRequest) returns (GetOrExtendReservationResponse); + + // Release the reservation when the task holding the spot fails so that the other tasks + // can grab the spot. + rpc ReleaseReservation (ReleaseReservationRequest) returns (ReleaseReservationResponse); +} + +/* + * Request message for creating a Dataset. + */ +message CreateDatasetRequest { + Dataset dataset = 1; +} + +/* + * Response message for creating a Dataset + */ +message CreateDatasetResponse { + +} + +/* + * Request message for retrieving a Dataset. The Dataset is retrieved by it's unique identifier + * which is a combination of several fields. + */ +message GetDatasetRequest { + DatasetID dataset = 1; +} + +/* + * Response message for retrieving a Dataset. The response will include the metadata for the + * Dataset. + */ +message GetDatasetResponse { + Dataset dataset = 1; +} + +/* + * Request message for retrieving an Artifact. Retrieve an artifact based on a query handle that + * can be one of artifact_id or tag. The result returned will include the artifact data and metadata + * associated with the artifact. + */ +message GetArtifactRequest { + DatasetID dataset = 1; + + oneof query_handle { + string artifact_id = 2; + string tag_name = 3; + } +} + +/* + * Response message for retrieving an Artifact. The result returned will include the artifact data + * and metadata associated with the artifact. + */ +message GetArtifactResponse { + Artifact artifact = 1; +} + +/* + * Request message for creating an Artifact and its associated artifact Data. + */ +message CreateArtifactRequest { + Artifact artifact = 1; +} + +/* + * Response message for creating an Artifact. + */ +message CreateArtifactResponse { + +} + +/* + * Request message for tagging an Artifact. + */ +message AddTagRequest { + Tag tag = 1; +} + +/* + * Response message for tagging an Artifact. + */ +message AddTagResponse { + +} + +// List the artifacts that belong to the Dataset, optionally filtered using filtered expression. +message ListArtifactsRequest { + // Use a datasetID for which you want to retrieve the artifacts + DatasetID dataset = 1; + + // Apply the filter expression to this query + FilterExpression filter = 2; + // Pagination options to get a page of artifacts + PaginationOptions pagination = 3; +} + +// Response to list artifacts +message ListArtifactsResponse { + // The list of artifacts + repeated Artifact artifacts = 1; + // Token to use to request the next page, pass this into the next requests PaginationOptions + string next_token = 2; +} + +// List the datasets for the given query +message ListDatasetsRequest { + // Apply the filter expression to this query + FilterExpression filter = 1; + // Pagination options to get a page of datasets + PaginationOptions pagination = 2; +} + +// List the datasets response with token for next pagination +message ListDatasetsResponse { + // The list of datasets + repeated Dataset datasets = 1; + // Token to use to request the next page, pass this into the next requests PaginationOptions + string next_token = 2; +} + +/* + * Request message for updating an Artifact and overwriting its associated ArtifactData. + */ +message UpdateArtifactRequest { + // ID of dataset the artifact is associated with + DatasetID dataset = 1; + + // Either ID of artifact or name of tag to retrieve existing artifact from + oneof query_handle { + string artifact_id = 2; + string tag_name = 3; + } + + // List of data to overwrite stored artifact data with. Must contain ALL data for updated Artifact as any missing + // ArtifactData entries will be removed from the underlying blob storage and database. + repeated ArtifactData data = 4; + + // Update execution metadata(including execution domain, name, node, project data) when overwriting cache + Metadata metadata = 5; +} + +/* + * Response message for updating an Artifact. + */ +message UpdateArtifactResponse { + // The unique ID of the artifact updated + string artifact_id = 1; +} + +/* + * ReservationID message that is composed of several string fields. + */ +message ReservationID { + // The unique ID for the reserved dataset + DatasetID dataset_id = 1; + + // The specific artifact tag for the reservation + string tag_name = 2; +} + +// Try to acquire or extend an artifact reservation. If an active reservation exists, retrieve that instance. +message GetOrExtendReservationRequest { + // The unique ID for the reservation + ReservationID reservation_id = 1; + + // The unique ID of the owner for the reservation + string owner_id = 2; + + // Requested reservation extension heartbeat interval + google.protobuf.Duration heartbeat_interval = 3; +} + +// A reservation including owner, heartbeat interval, expiration timestamp, and various metadata. +message Reservation { + // The unique ID for the reservation + ReservationID reservation_id = 1; + + // The unique ID of the owner for the reservation + string owner_id = 2; + + // Recommended heartbeat interval to extend reservation + google.protobuf.Duration heartbeat_interval = 3; + + // Expiration timestamp of this reservation + google.protobuf.Timestamp expires_at = 4; + + // Free-form metadata associated with the artifact + Metadata metadata = 6; +} + +// Response including either a newly minted reservation or the existing reservation +message GetOrExtendReservationResponse { + // The reservation to be acquired or extended + Reservation reservation = 1; +} + +// Request to release reservation +message ReleaseReservationRequest { + // The unique ID for the reservation + ReservationID reservation_id = 1; + + // The unique ID of the owner for the reservation + string owner_id = 2; +} + +// Response to release reservation +message ReleaseReservationResponse { + +} + +/* + * Dataset message. It is uniquely identified by DatasetID. + */ +message Dataset { + DatasetID id = 1; + Metadata metadata = 2; + repeated string partitionKeys = 3; +} + +/* + * An artifact could have multiple partitions and each partition can have an arbitrary string key/value pair + */ +message Partition { + string key = 1; + string value = 2; +} + +/* + * DatasetID message that is composed of several string fields. + */ +message DatasetID { + string project = 1; // The name of the project + string name = 2; // The name of the dataset + string domain = 3; // The domain (eg. environment) + string version = 4; // Version of the data schema + string UUID = 5; // UUID for the dataset (if set the above fields are optional) + + // Optional, org key applied to the resource. + string org = 6; +} + +/* + * Artifact message. It is composed of several string fields. + */ +message Artifact { + string id = 1; // The unique ID of the artifact + DatasetID dataset = 2; // The Dataset that the artifact belongs to + repeated ArtifactData data = 3; // A list of data that is associated with the artifact + Metadata metadata = 4; // Free-form metadata associated with the artifact + repeated Partition partitions = 5; + repeated Tag tags = 6; + google.protobuf.Timestamp created_at = 7; // creation timestamp of artifact, autogenerated by service +} + +/* + * ArtifactData that belongs to an artifact + */ +message ArtifactData { + string name = 1; + flyteidl.core.Literal value = 2; +} + +/* + * Tag message that is unique to a Dataset. It is associated to a single artifact and + * can be retrieved by name later. + */ +message Tag { + string name = 1; // Name of tag + string artifact_id = 2; // The tagged artifact + DatasetID dataset = 3; // The Dataset that this tag belongs to +} + +/* + * Metadata representation for artifacts and datasets + */ +message Metadata { + map key_map = 1; // key map is a dictionary of key/val strings that represent metadata +} + +// Filter expression that is composed of a combination of single filters +message FilterExpression { + repeated SinglePropertyFilter filters = 1; +} + +// A single property to filter on. +message SinglePropertyFilter { + oneof property_filter { + TagPropertyFilter tag_filter = 1; + PartitionPropertyFilter partition_filter = 2; + ArtifactPropertyFilter artifact_filter = 3; + DatasetPropertyFilter dataset_filter = 4; + } + + // as use-cases come up we can add more operators, ex: gte, like, not eq etc. + enum ComparisonOperator { + EQUALS = 0; + } + + ComparisonOperator operator = 10; // field 10 in case we add more entities to query + // Next field number: 11 +} + +// Artifact properties we can filter by +message ArtifactPropertyFilter { + // oneof because we can add more properties in the future + oneof property { + string artifact_id = 1; + } +} + +// Tag properties we can filter by +message TagPropertyFilter { + oneof property { + string tag_name = 1; + } +} + +// Partition properties we can filter by +message PartitionPropertyFilter { + oneof property { + KeyValuePair key_val = 1; + } +} + +message KeyValuePair { + string key = 1; + string value = 2; +} + +// Dataset properties we can filter by +message DatasetPropertyFilter { + oneof property { + string project = 1; + string name = 2; + string domain = 3; + string version = 4; + // Optional, org key applied to the dataset. + string org = 5; + } +} + +// Pagination options for making list requests +message PaginationOptions { + + // the max number of results to return + uint32 limit = 1; + + // the token to pass to fetch the next page + string token = 2; + + // the property that we want to sort the results by + SortKey sortKey = 3; + + // the sort order of the results + SortOrder sortOrder = 4; + + enum SortOrder { + DESCENDING = 0; + ASCENDING = 1; + } + + enum SortKey { + CREATION_TIME = 0; + } +} diff --git a/docs/api/flyteidl/flyteidl/event/cloudevents.proto b/docs/api/flyteidl/flyteidl/event/cloudevents.proto new file mode 100644 index 0000000000..d02c5ff516 --- /dev/null +++ b/docs/api/flyteidl/flyteidl/event/cloudevents.proto @@ -0,0 +1,73 @@ +syntax = "proto3"; + +package flyteidl.event; + +option go_package = "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/event"; + +import "flyteidl/event/event.proto"; +import "flyteidl/core/literals.proto"; +import "flyteidl/core/interface.proto"; +import "flyteidl/core/artifact_id.proto"; +import "flyteidl/core/identifier.proto"; +import "google/protobuf/timestamp.proto"; + +// This is the cloud event parallel to the raw WorkflowExecutionEvent message. It's filled in with additional +// information that downstream consumers may find useful. +message CloudEventWorkflowExecution { + event.WorkflowExecutionEvent raw_event = 1; + + core.TypedInterface output_interface = 2; + + // The following are ExecutionMetadata fields + // We can't have the ExecutionMetadata object directly because of import cycle + repeated core.ArtifactID artifact_ids = 3; + core.WorkflowExecutionIdentifier reference_execution = 4; + string principal = 5; + + // The ID of the LP that generated the execution that generated the Artifact. + // Here for provenance information. + // Launch plan IDs are easier to get than workflow IDs so we'll use these for now. + core.Identifier launch_plan_id = 6; +} + +message CloudEventNodeExecution { + event.NodeExecutionEvent raw_event = 1; + + // The relevant task execution if applicable + core.TaskExecutionIdentifier task_exec_id = 2; + + // The typed interface for the task that produced the event. + core.TypedInterface output_interface = 3; + + // The following are ExecutionMetadata fields + // We can't have the ExecutionMetadata object directly because of import cycle + repeated core.ArtifactID artifact_ids = 4; + string principal = 5; + + // The ID of the LP that generated the execution that generated the Artifact. + // Here for provenance information. + // Launch plan IDs are easier to get than workflow IDs so we'll use these for now. + core.Identifier launch_plan_id = 6; +} + +message CloudEventTaskExecution { + event.TaskExecutionEvent raw_event = 1; +} + +// This event is to be sent by Admin after it creates an execution. +message CloudEventExecutionStart { + // The execution created. + core.WorkflowExecutionIdentifier execution_id = 1; + // The launch plan used. + core.Identifier launch_plan_id = 2; + + core.Identifier workflow_id = 3; + + // Artifact inputs to the workflow execution for which we have the full Artifact ID. These are likely the result of artifact queries that are run. + repeated core.ArtifactID artifact_ids = 4; + + // Artifact inputs to the workflow execution for which we only have the tracking bit that's installed into the Literal's metadata by the Artifact service. + repeated string artifact_trackers = 5; + + string principal = 6; +} diff --git a/docs/api/flyteidl/flyteidl/event/event.proto b/docs/api/flyteidl/flyteidl/event/event.proto new file mode 100644 index 0000000000..640b4804e9 --- /dev/null +++ b/docs/api/flyteidl/flyteidl/event/event.proto @@ -0,0 +1,328 @@ +syntax = "proto3"; + +package flyteidl.event; + +option go_package = "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/event"; + +import "flyteidl/core/literals.proto"; +import "flyteidl/core/compiler.proto"; +import "flyteidl/core/execution.proto"; +import "flyteidl/core/identifier.proto"; +import "flyteidl/core/catalog.proto"; +import "google/protobuf/timestamp.proto"; +import "google/protobuf/struct.proto"; + + +message WorkflowExecutionEvent { + // Workflow execution id + core.WorkflowExecutionIdentifier execution_id = 1; + + // the id of the originator (Propeller) of the event + string producer_id = 2; + + core.WorkflowExecution.Phase phase = 3; + + // This timestamp represents when the original event occurred, it is generated + // by the executor of the workflow. + google.protobuf.Timestamp occurred_at = 4; + + oneof output_result { + // URL to the output of the execution, it encodes all the information + // including Cloud source provider. ie., s3://... + string output_uri = 5; + + // Error information for the execution + core.ExecutionError error = 6; + + // Raw output data produced by this workflow execution. + core.LiteralMap output_data = 7; + } +} + +message NodeExecutionEvent { + // Unique identifier for this node execution + core.NodeExecutionIdentifier id = 1; + + // the id of the originator (Propeller) of the event + string producer_id = 2; + + core.NodeExecution.Phase phase = 3; + + // This timestamp represents when the original event occurred, it is generated + // by the executor of the node. + google.protobuf.Timestamp occurred_at = 4; + + oneof input_value { + string input_uri = 5; + + // Raw input data consumed by this node execution. + core.LiteralMap input_data = 20; + } + + oneof output_result { + // URL to the output of the execution, it encodes all the information + // including Cloud source provider. ie., s3://... + string output_uri = 6; + + // Error information for the execution + core.ExecutionError error = 7; + + // Raw output data produced by this node execution. + core.LiteralMap output_data = 15; + } + + // Additional metadata to do with this event's node target based + // on the node type + oneof target_metadata { + WorkflowNodeMetadata workflow_node_metadata = 8; + TaskNodeMetadata task_node_metadata = 14; + } + + // [To be deprecated] Specifies which task (if any) launched this node. + ParentTaskExecutionMetadata parent_task_metadata = 9; + + // Specifies the parent node of the current node execution. Node executions at level zero will not have a parent node. + ParentNodeExecutionMetadata parent_node_metadata = 10; + + // Retry group to indicate grouping of nodes by retries + string retry_group = 11; + + // Identifier of the node in the original workflow/graph + // This maps to value of WorkflowTemplate.nodes[X].id + string spec_node_id = 12; + + // Friendly readable name for the node + string node_name = 13; + + int32 event_version = 16; + + // Whether this node launched a subworkflow. + bool is_parent = 17; + + // Whether this node yielded a dynamic workflow. + bool is_dynamic = 18; + + // String location uniquely identifying where the deck HTML file is + // NativeUrl specifies the url in the format of the configured storage provider (e.g. s3://my-bucket/randomstring/suffix.tar) + string deck_uri = 19; + + // This timestamp represents the instant when the event was reported by the executing framework. For example, + // when first processing a node the `occurred_at` timestamp should be the instant propeller makes progress, so when + // literal inputs are initially copied. The event however will not be sent until after the copy completes. + // Extracting both of these timestamps facilitates a more accurate portrayal of the evaluation time-series. + google.protobuf.Timestamp reported_at = 21; + + // Indicates if this node is an ArrayNode. + bool is_array = 22; + + // So that Admin doesn't have to rebuild the node execution graph to find the target entity, propeller will fill this + // in optionally - currently this is only filled in for subworkflows. This is the ID of the subworkflow corresponding + // to this node execution. It is difficult to find because Admin only sees one node at a time. A subworkflow could be + // nested multiple layers deep, and you'd need to access the correct workflow template to know the target subworkflow. + core.Identifier target_entity = 23; + + // Tasks and subworkflows (but not launch plans) that are run within a dynamic task are effectively independent of + // the tasks that are registered in Admin's db. Confusingly, they are often identical, but sometimes they are not + // even registered at all. Similar to the target_entity field, at the time Admin receives this event, it has no idea + // if the relevant execution entity is was registered, or dynamic. This field indicates that the target_entity ID, + // as well as task IDs in any corresponding Task Executions, should not be used to looked up the task in Admin's db. + bool is_in_dynamic_chain = 24; +} + +// For Workflow Nodes we need to send information about the workflow that's launched +message WorkflowNodeMetadata { + core.WorkflowExecutionIdentifier execution_id = 1; +} + +message TaskNodeMetadata { + // Captures the status of caching for this execution. + core.CatalogCacheStatus cache_status = 1; + // This structure carries the catalog artifact information + core.CatalogMetadata catalog_key = 2; + // Captures the status of cache reservations for this execution. + core.CatalogReservation.Status reservation_status = 3; + // The latest checkpoint location + string checkpoint_uri = 4; + + // In the case this task launched a dynamic workflow we capture its structure here. + DynamicWorkflowNodeMetadata dynamic_workflow = 16; +} + +// For dynamic workflow nodes we send information about the dynamic workflow definition that gets generated. +message DynamicWorkflowNodeMetadata { + // id represents the unique identifier of the workflow. + core.Identifier id = 1; + + // Represents the compiled representation of the embedded dynamic workflow. + core.CompiledWorkflowClosure compiled_workflow = 2; + + // dynamic_job_spec_uri is the location of the DynamicJobSpec proto message for this DynamicWorkflow. This is + // required to correctly recover partially completed executions where the workflow has already been compiled. + string dynamic_job_spec_uri = 3; +} + +message ParentTaskExecutionMetadata { + core.TaskExecutionIdentifier id = 1; +} + +message ParentNodeExecutionMetadata { + // Unique identifier of the parent node id within the execution + // This is value of core.NodeExecutionIdentifier.node_id of the parent node + string node_id = 1; +} + +message EventReason { + // An explanation for this event + string reason = 1; + + // The time this reason occurred + google.protobuf.Timestamp occurred_at = 2; +} + +// Plugin specific execution event information. For tasks like Python, Hive, Spark, DynamicJob. +message TaskExecutionEvent { + // ID of the task. In combination with the retryAttempt this will indicate + // the task execution uniquely for a given parent node execution. + core.Identifier task_id = 1; + + // A task execution is always kicked off by a node execution, the event consumer + // will use the parent_id to relate the task to it's parent node execution + core.NodeExecutionIdentifier parent_node_execution_id = 2; + + // retry attempt number for this task, ie., 2 for the second attempt + uint32 retry_attempt = 3; + + // Phase associated with the event + core.TaskExecution.Phase phase = 4; + + // id of the process that sent this event, mainly for trace debugging + string producer_id = 5; + + // log information for the task execution + repeated core.TaskLog logs = 6; + + // This timestamp represents when the original event occurred, it is generated + // by the executor of the task. + google.protobuf.Timestamp occurred_at = 7; + + oneof input_value { + // URI of the input file, it encodes all the information + // including Cloud source provider. ie., s3://... + string input_uri = 8; + + // Raw input data consumed by this task execution. + core.LiteralMap input_data = 19; + } + + oneof output_result { + // URI to the output of the execution, it will be in a format that encodes all the information + // including Cloud source provider. ie., s3://... + string output_uri = 9; + + // Error information for the execution + core.ExecutionError error = 10; + + // Raw output data produced by this task execution. + core.LiteralMap output_data = 17; + } + + // Custom data that the task plugin sends back. This is extensible to allow various plugins in the system. + google.protobuf.Struct custom_info = 11; + + // Some phases, like RUNNING, can send multiple events with changed metadata (new logs, additional custom_info, etc) + // that should be recorded regardless of the lack of phase change. + // The version field should be incremented when metadata changes across the duration of an individual phase. + uint32 phase_version = 12; + + // An optional explanation for the phase transition. + // Deprecated: Use reasons instead. + string reason = 13 [deprecated = true]; + + // An optional list of explanations for the phase transition. + repeated EventReason reasons = 21; + + // A predefined yet extensible Task type identifier. If the task definition is already registered in flyte admin + // this type will be identical, but not all task executions necessarily use pre-registered definitions and this + // type is useful to render the task in the UI, filter task executions, etc. + string task_type = 14; + + // Metadata around how a task was executed. + TaskExecutionMetadata metadata = 16; + + // The event version is used to indicate versioned changes in how data is reported using this + // proto message. For example, event_verison > 0 means that maps tasks report logs using the + // TaskExecutionMetadata ExternalResourceInfo fields for each subtask rather than the TaskLog + // in this message. + int32 event_version = 18; + + // This timestamp represents the instant when the event was reported by the executing framework. For example, a k8s + // pod task may be marked completed at (ie. `occurred_at`) the instant the container running user code completes, + // but this event will not be reported until the pod is marked as completed. Extracting both of these timestamps + // facilitates a more accurate portrayal of the evaluation time-series. + google.protobuf.Timestamp reported_at = 20; +} + +// This message contains metadata about external resources produced or used by a specific task execution. +message ExternalResourceInfo { + + // Identifier for an external resource created by this task execution, for example Qubole query ID or presto query ids. + string external_id = 1; + + // A unique index for the external resource with respect to all external resources for this task. Although the + // identifier may change between task reporting events or retries, this will remain the same to enable aggregating + // information from multiple reports. + uint32 index = 2; + + // Retry attempt number for this external resource, ie., 2 for the second attempt + uint32 retry_attempt = 3; + + // Phase associated with the external resource + core.TaskExecution.Phase phase = 4; + + // Captures the status of caching for this external resource execution. + core.CatalogCacheStatus cache_status = 5; + + // log information for the external resource execution + repeated core.TaskLog logs = 6; +} + + +// This message holds task execution metadata specific to resource allocation used to manage concurrent +// executions for a project namespace. +message ResourcePoolInfo { + // Unique resource ID used to identify this execution when allocating a token. + string allocation_token = 1; + + // Namespace under which this task execution requested an allocation token. + string namespace = 2; +} + +// Holds metadata around how a task was executed. +// As a task transitions across event phases during execution some attributes, such its generated name, generated external resources, +// and more may grow in size but not change necessarily based on the phase transition that sparked the event update. +// Metadata is a container for these attributes across the task execution lifecycle. +message TaskExecutionMetadata { + + // Unique, generated name for this task execution used by the backend. + string generated_name = 1; + + // Additional data on external resources on other back-ends or platforms (e.g. Hive, Qubole, etc) launched by this task execution. + repeated ExternalResourceInfo external_resources = 2; + + // Includes additional data on concurrent resource management used during execution.. + // This is a repeated field because a plugin can request multiple resource allocations during execution. + repeated ResourcePoolInfo resource_pool_info = 3; + + // The identifier of the plugin used to execute this task. + string plugin_identifier = 4; + + // Includes the broad category of machine used for this specific task execution. + enum InstanceClass { + // The default instance class configured for the flyte application platform. + DEFAULT = 0; + + // The instance class configured for interruptible tasks. + INTERRUPTIBLE = 1; + } + InstanceClass instance_class = 16; +} diff --git a/docs/api/flyteidl/flyteidl/plugins/array_job.proto b/docs/api/flyteidl/flyteidl/plugins/array_job.proto new file mode 100644 index 0000000000..e202316ef5 --- /dev/null +++ b/docs/api/flyteidl/flyteidl/plugins/array_job.proto @@ -0,0 +1,30 @@ +syntax = "proto3"; + +package flyteidl.plugins; + +option go_package = "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/plugins"; + +// Describes a job that can process independent pieces of data concurrently. Multiple copies of the runnable component +// will be executed concurrently. +message ArrayJob { + // Defines the maximum number of instances to bring up concurrently at any given point. Note that this is an + // optimistic restriction and that, due to network partitioning or other failures, the actual number of currently + // running instances might be more. This has to be a positive number if assigned. Default value is size. + int64 parallelism = 1; + + // Defines the number of instances to launch at most. This number should match the size of the input if the job + // requires processing of all input data. This has to be a positive number. + // In the case this is not defined, the back-end will determine the size at run-time by reading the inputs. + int64 size = 2; + + oneof success_criteria { + // An absolute number of the minimum number of successful completions of subtasks. As soon as this criteria is met, + // the array job will be marked as successful and outputs will be computed. This has to be a non-negative number if + // assigned. Default value is size (if specified). + int64 min_successes = 3; + + // If the array job size is not known beforehand, the min_success_ratio can instead be used to determine when an array + // job can be marked successful. + float min_success_ratio = 4; + } +} diff --git a/docs/api/flyteidl/flyteidl/plugins/common.proto b/docs/api/flyteidl/flyteidl/plugins/common.proto new file mode 100644 index 0000000000..15f31cf2d2 --- /dev/null +++ b/docs/api/flyteidl/flyteidl/plugins/common.proto @@ -0,0 +1,27 @@ +syntax = "proto3"; + +package flyteidl.plugins; + +option go_package = "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/plugins"; + +import "flyteidl/core/tasks.proto"; + +enum RestartPolicy { + RESTART_POLICY_NEVER = 0; + RESTART_POLICY_ON_FAILURE = 1; + RESTART_POLICY_ALWAYS = 2; +} + +message CommonReplicaSpec { + // Number of replicas + int32 replicas = 1; + + // Image used for the replica group + string image = 2; + + // Resources required for the replica group + core.Resources resources = 3; + + // RestartPolicy determines whether pods will be restarted when they exit + RestartPolicy restart_policy = 4; +} diff --git a/docs/api/flyteidl/flyteidl/plugins/dask.proto b/docs/api/flyteidl/flyteidl/plugins/dask.proto new file mode 100644 index 0000000000..96e861049a --- /dev/null +++ b/docs/api/flyteidl/flyteidl/plugins/dask.proto @@ -0,0 +1,41 @@ +syntax = "proto3"; + +import "flyteidl/core/tasks.proto"; + +package flyteidl.plugins; + +option go_package = "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/plugins"; + + +// Custom Proto for Dask Plugin. +message DaskJob { + // Spec for the scheduler pod. + DaskScheduler scheduler = 1; + + // Spec of the default worker group. + DaskWorkerGroup workers = 2; +} + +// Specification for the scheduler pod. +message DaskScheduler { + // Optional image to use. If unset, will use the default image. + string image = 1; + + // Resources assigned to the scheduler pod. + core.Resources resources = 2; +} + +message DaskWorkerGroup { + // Number of workers in the group. + uint32 number_of_workers = 1; + + // Optional image to use for the pods of the worker group. If unset, will use the default image. + string image = 2; + + // Resources assigned to the all pods of the worker group. + // As per https://kubernetes.dask.org/en/latest/kubecluster.html?highlight=limit#best-practices + // it is advised to only set limits. If requests are not explicitly set, the plugin will make + // sure to set requests==limits. + // The plugin sets ` --memory-limit` as well as `--nthreads` for the workers according to the limit. + core.Resources resources = 3; +} diff --git a/docs/api/flyteidl/flyteidl/plugins/kubeflow/common.proto b/docs/api/flyteidl/flyteidl/plugins/kubeflow/common.proto new file mode 100644 index 0000000000..37655caf3d --- /dev/null +++ b/docs/api/flyteidl/flyteidl/plugins/kubeflow/common.proto @@ -0,0 +1,28 @@ +syntax = "proto3"; + +package flyteidl.plugins.kubeflow; + +option go_package = "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/plugins/kubeflow"; + +import public "flyteidl/plugins/common.proto"; + +enum CleanPodPolicy { + CLEANPOD_POLICY_NONE = 0; + CLEANPOD_POLICY_RUNNING = 1; + CLEANPOD_POLICY_ALL = 2; +} + +message RunPolicy { + // Defines the policy to kill pods after the job completes. Default to None. + CleanPodPolicy clean_pod_policy = 1; + + // TTL to clean up jobs. Default to infinite. + int32 ttl_seconds_after_finished = 2; + + // Specifies the duration in seconds relative to the startTime that the job may be active + // before the system tries to terminate it; value must be positive integer. + int32 active_deadline_seconds = 3; + + // Number of retries before marking this job failed. + int32 backoff_limit = 4; +} diff --git a/docs/api/flyteidl/flyteidl/plugins/kubeflow/mpi.proto b/docs/api/flyteidl/flyteidl/plugins/kubeflow/mpi.proto new file mode 100644 index 0000000000..b98e8aad99 --- /dev/null +++ b/docs/api/flyteidl/flyteidl/plugins/kubeflow/mpi.proto @@ -0,0 +1,47 @@ +syntax = "proto3"; + +package flyteidl.plugins.kubeflow; + +option go_package = "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/plugins/kubeflow"; + +import "flyteidl/core/tasks.proto"; +import "flyteidl/plugins/kubeflow/common.proto"; + +// Proto for plugin that enables distributed training using https://github.com/kubeflow/mpi-operator +message DistributedMPITrainingTask { + // Worker replicas spec + DistributedMPITrainingReplicaSpec worker_replicas = 1; + + // Master replicas spec + DistributedMPITrainingReplicaSpec launcher_replicas = 2; + + // RunPolicy encapsulates various runtime policies of the distributed training + // job, for example how to clean up resources and how long the job can stay + // active. + RunPolicy run_policy = 3; + + // Number of slots per worker + int32 slots = 4; +} + +// Replica specification for distributed MPI training +message DistributedMPITrainingReplicaSpec { + // 1~4 deprecated. Use common instead. + // Number of replicas + int32 replicas = 1 [deprecated = true]; + + // Image used for the replica group + string image = 2 [deprecated = true]; + + // Resources required for the replica group + core.Resources resources = 3 [deprecated = true]; + + // Restart policy determines whether pods will be restarted when they exit + RestartPolicy restart_policy = 4 [deprecated = true]; + + // MPI sometimes requires different command set for different replica groups + repeated string command = 5; + + // The common replica spec + CommonReplicaSpec common = 6; +} diff --git a/docs/api/flyteidl/flyteidl/plugins/kubeflow/pytorch.proto b/docs/api/flyteidl/flyteidl/plugins/kubeflow/pytorch.proto new file mode 100644 index 0000000000..0433384e75 --- /dev/null +++ b/docs/api/flyteidl/flyteidl/plugins/kubeflow/pytorch.proto @@ -0,0 +1,53 @@ +syntax = "proto3"; + +package flyteidl.plugins.kubeflow; + +option go_package = "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/plugins/kubeflow"; + +import "flyteidl/core/tasks.proto"; +import "flyteidl/plugins/kubeflow/common.proto"; + +// Custom proto for torch elastic config for distributed training using +// https://github.com/kubeflow/training-operator/blob/master/pkg/apis/kubeflow.org/v1/pytorch_types.go +message ElasticConfig { + string rdzv_backend = 1; + int32 min_replicas = 2; + int32 max_replicas = 3; + int32 nproc_per_node = 4; + int32 max_restarts = 5; +} + +// Proto for plugin that enables distributed training using https://github.com/kubeflow/pytorch-operator +message DistributedPyTorchTrainingTask { + // Worker replicas spec + DistributedPyTorchTrainingReplicaSpec worker_replicas = 1; + + // Master replicas spec, master replicas can only have 1 replica + DistributedPyTorchTrainingReplicaSpec master_replicas = 2; + + // RunPolicy encapsulates various runtime policies of the distributed training + // job, for example how to clean up resources and how long the job can stay + // active. + RunPolicy run_policy = 3; + + // config for an elastic pytorch job + ElasticConfig elastic_config = 4; +} + +message DistributedPyTorchTrainingReplicaSpec { + // 1~4 deprecated. Use common instead. + // Number of replicas + int32 replicas = 1 [deprecated = true]; + + // Image used for the replica group + string image = 2 [deprecated = true]; + + // Resources required for the replica group + core.Resources resources = 3 [deprecated = true]; + + // Restart policy determines whether pods will be restarted when they exit + RestartPolicy restart_policy = 4 [deprecated = true]; + + // The common replica spec + CommonReplicaSpec common = 5; +} diff --git a/docs/api/flyteidl/flyteidl/plugins/kubeflow/tensorflow.proto b/docs/api/flyteidl/flyteidl/plugins/kubeflow/tensorflow.proto new file mode 100644 index 0000000000..251526f7e0 --- /dev/null +++ b/docs/api/flyteidl/flyteidl/plugins/kubeflow/tensorflow.proto @@ -0,0 +1,46 @@ +syntax = "proto3"; + +package flyteidl.plugins.kubeflow; + +option go_package = "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/plugins/kubeflow"; + +import "flyteidl/core/tasks.proto"; +import "flyteidl/plugins/kubeflow/common.proto"; + +// Proto for plugin that enables distributed training using https://github.com/kubeflow/tf-operator +message DistributedTensorflowTrainingTask { + // Worker replicas spec + DistributedTensorflowTrainingReplicaSpec worker_replicas = 1; + + // Parameter server replicas spec + DistributedTensorflowTrainingReplicaSpec ps_replicas = 2; + + // Chief replicas spec + DistributedTensorflowTrainingReplicaSpec chief_replicas = 3; + + // RunPolicy encapsulates various runtime policies of the distributed training + // job, for example how to clean up resources and how long the job can stay + // active. + RunPolicy run_policy = 4; + + // Evaluator replicas spec + DistributedTensorflowTrainingReplicaSpec evaluator_replicas = 5; +} + +message DistributedTensorflowTrainingReplicaSpec { + // 1~4 deprecated. Use common instead. + // Number of replicas + int32 replicas = 1 [deprecated = true]; + + // Image used for the replica group + string image = 2 [deprecated = true]; + + // Resources required for the replica group + core.Resources resources = 3 [deprecated = true]; + + // Restart policy determines whether pods will be restarted when they exit + RestartPolicy restart_policy = 4 [deprecated = true]; + + // The common replica spec + CommonReplicaSpec common = 5; +} diff --git a/docs/api/flyteidl/flyteidl/plugins/mpi.proto b/docs/api/flyteidl/flyteidl/plugins/mpi.proto new file mode 100644 index 0000000000..9e657279d8 --- /dev/null +++ b/docs/api/flyteidl/flyteidl/plugins/mpi.proto @@ -0,0 +1,20 @@ +syntax = "proto3"; + +package flyteidl.plugins; + +option go_package = "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/plugins"; + +// MPI operator proposal https://github.com/kubeflow/community/blob/master/proposals/mpi-operator-proposal.md +// Custom proto for plugin that enables distributed training using https://github.com/kubeflow/mpi-operator +message DistributedMPITrainingTask { + // number of worker spawned in the cluster for this job + int32 num_workers = 1; + + // number of launcher replicas spawned in the cluster for this job + // The launcher pod invokes mpirun and communicates with worker pods through MPI. + int32 num_launcher_replicas = 2; + + // number of slots per worker used in hostfile. + // The available slots (GPUs) in each pod. + int32 slots = 3; +} \ No newline at end of file diff --git a/docs/api/flyteidl/flyteidl/plugins/presto.proto b/docs/api/flyteidl/flyteidl/plugins/presto.proto new file mode 100644 index 0000000000..5ff3a8a2e0 --- /dev/null +++ b/docs/api/flyteidl/flyteidl/plugins/presto.proto @@ -0,0 +1,14 @@ +syntax = "proto3"; + +package flyteidl.plugins; + +option go_package = "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/plugins"; + +// This message works with the 'presto' task type in the SDK and is the object that will be in the 'custom' field +// of a Presto task's TaskTemplate +message PrestoQuery { + string routing_group = 1; + string catalog = 2; + string schema = 3; + string statement = 4; +} diff --git a/docs/api/flyteidl/flyteidl/plugins/pytorch.proto b/docs/api/flyteidl/flyteidl/plugins/pytorch.proto new file mode 100644 index 0000000000..02e748ab8e --- /dev/null +++ b/docs/api/flyteidl/flyteidl/plugins/pytorch.proto @@ -0,0 +1,25 @@ +syntax = "proto3"; + +package flyteidl.plugins; + +option go_package = "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/plugins"; + +// Custom proto for torch elastic config for distributed training using +// https://github.com/kubeflow/training-operator/blob/master/pkg/apis/kubeflow.org/v1/pytorch_types.go +message ElasticConfig { + string rdzv_backend = 1; + int32 min_replicas = 2; + int32 max_replicas = 3; + int32 nproc_per_node = 4; + int32 max_restarts = 5; +} + +// Custom proto for plugin that enables distributed training using https://github.com/kubeflow/pytorch-operator +message DistributedPyTorchTrainingTask { + // number of worker replicas spawned in the cluster for this job + int32 workers = 1; + + // config for an elastic pytorch job + // + ElasticConfig elastic_config = 2; +} diff --git a/docs/api/flyteidl/flyteidl/plugins/qubole.proto b/docs/api/flyteidl/flyteidl/plugins/qubole.proto new file mode 100644 index 0000000000..b1faada9f3 --- /dev/null +++ b/docs/api/flyteidl/flyteidl/plugins/qubole.proto @@ -0,0 +1,26 @@ +syntax = "proto3"; + +package flyteidl.plugins; + +option go_package = "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/plugins"; + +// Defines a query to execute on a hive cluster. +message HiveQuery { + string query = 1; + uint32 timeout_sec = 2; + uint32 retryCount = 3; +} + +// Defines a collection of hive queries. +message HiveQueryCollection { + repeated HiveQuery queries = 2; +} + +// This message works with the 'hive' task type in the SDK and is the object that will be in the 'custom' field +// of a hive task's TaskTemplate +message QuboleHiveJob { + string cluster_label = 1; + HiveQueryCollection query_collection = 2 [deprecated=true]; + repeated string tags = 3; + HiveQuery query = 4; +} diff --git a/docs/api/flyteidl/flyteidl/plugins/ray.proto b/docs/api/flyteidl/flyteidl/plugins/ray.proto new file mode 100644 index 0000000000..c20c6360e7 --- /dev/null +++ b/docs/api/flyteidl/flyteidl/plugins/ray.proto @@ -0,0 +1,53 @@ +syntax = "proto3"; + +package flyteidl.plugins; + +option go_package = "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/plugins"; + +// RayJobSpec defines the desired state of RayJob +message RayJob { + // RayClusterSpec is the cluster template to run the job + RayCluster ray_cluster = 1; + // runtime_env is base64 encoded. + // Ray runtime environments: https://docs.ray.io/en/latest/ray-core/handling-dependencies.html#runtime-environments + string runtime_env = 2 [deprecated = true]; + // shutdown_after_job_finishes specifies whether the RayCluster should be deleted after the RayJob finishes. + bool shutdown_after_job_finishes = 3; + // ttl_seconds_after_finished specifies the number of seconds after which the RayCluster will be deleted after the RayJob finishes. + int32 ttl_seconds_after_finished = 4; + // RuntimeEnvYAML represents the runtime environment configuration + // provided as a multi-line YAML string. + string runtime_env_yaml = 5; +} + +// Define Ray cluster defines the desired state of RayCluster +message RayCluster { + // HeadGroupSpecs are the spec for the head pod + HeadGroupSpec head_group_spec = 1; + // WorkerGroupSpecs are the specs for the worker pods + repeated WorkerGroupSpec worker_group_spec = 2; + // Whether to enable autoscaling. + bool enable_autoscaling = 3; +} + +// HeadGroupSpec are the spec for the head pod +message HeadGroupSpec { + // Optional. RayStartParams are the params of the start command: address, object-store-memory. + // Refer to https://docs.ray.io/en/latest/ray-core/package-ref.html#ray-start + map ray_start_params = 1; +} + +// WorkerGroupSpec are the specs for the worker pods +message WorkerGroupSpec { + // Required. RayCluster can have multiple worker groups, and it distinguishes them by name + string group_name = 1; + // Required. Desired replicas of the worker group. Defaults to 1. + int32 replicas = 2; + // Optional. Min replicas of the worker group. MinReplicas defaults to 1. + int32 min_replicas = 3; + // Optional. Max replicas of the worker group. MaxReplicas defaults to maxInt32 + int32 max_replicas = 4; + // Optional. RayStartParams are the params of the start command: address, object-store-memory. + // Refer to https://docs.ray.io/en/latest/ray-core/package-ref.html#ray-start + map ray_start_params = 5; +} diff --git a/docs/api/flyteidl/flyteidl/plugins/spark.proto b/docs/api/flyteidl/flyteidl/plugins/spark.proto new file mode 100644 index 0000000000..666ea311b2 --- /dev/null +++ b/docs/api/flyteidl/flyteidl/plugins/spark.proto @@ -0,0 +1,34 @@ +syntax = "proto3"; + +package flyteidl.plugins; +import "google/protobuf/struct.proto"; + +option go_package = "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/plugins"; + +message SparkApplication { + enum Type { + PYTHON = 0; + JAVA = 1; + SCALA = 2; + R = 3; + } +} + +// Custom Proto for Spark Plugin. +message SparkJob { + SparkApplication.Type applicationType = 1; + string mainApplicationFile = 2; + string mainClass = 3; + map sparkConf = 4; + map hadoopConf = 5; + string executorPath = 6; // Executor path for Python jobs. + // Databricks job configuration. + // Config structure can be found here. https://docs.databricks.com/dev-tools/api/2.0/jobs.html#request-structure. + google.protobuf.Struct databricksConf = 7; + // Databricks access token. https://docs.databricks.com/dev-tools/api/latest/authentication.html + // This token can be set in either flytepropeller or flytekit. + string databricksToken = 8; + // Domain name of your deployment. Use the form .cloud.databricks.com. + // This instance name can be set in either flytepropeller or flytekit. + string databricksInstance = 9; +} diff --git a/docs/api/flyteidl/flyteidl/plugins/tensorflow.proto b/docs/api/flyteidl/flyteidl/plugins/tensorflow.proto new file mode 100644 index 0000000000..e494a6cc32 --- /dev/null +++ b/docs/api/flyteidl/flyteidl/plugins/tensorflow.proto @@ -0,0 +1,18 @@ +syntax = "proto3"; + +package flyteidl.plugins; + +option go_package = "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/plugins"; + +// Custom proto for plugin that enables distributed training using https://github.com/kubeflow/tf-operator +message DistributedTensorflowTrainingTask { + // number of worker replicas spawned in the cluster for this job + int32 workers = 1; + // PS -> Parameter server + // number of ps replicas spawned in the cluster for this job + int32 ps_replicas = 2; + // number of chief replicas spawned in the cluster for this job + int32 chief_replicas = 3; + // number of evaluator replicas spawned in the cluster for this job + int32 evaluator_replicas = 4; +} diff --git a/docs/api/flyteidl/flyteidl/plugins/waitable.proto b/docs/api/flyteidl/flyteidl/plugins/waitable.proto new file mode 100644 index 0000000000..dd2138d535 --- /dev/null +++ b/docs/api/flyteidl/flyteidl/plugins/waitable.proto @@ -0,0 +1,15 @@ +syntax = "proto3"; + +import "flyteidl/core/execution.proto"; +import "flyteidl/core/identifier.proto"; + +package flyteidl.plugins; + +option go_package = "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/plugins"; + +// Represents an Execution that was launched and could be waited on. +message Waitable { + core.WorkflowExecutionIdentifier wf_exec_id = 1; + core.WorkflowExecution.Phase phase = 2; + string workflow_id = 3; +} diff --git a/docs/api/flyteidl/flyteidl/service/admin.proto b/docs/api/flyteidl/flyteidl/service/admin.proto new file mode 100644 index 0000000000..d7d5adeeef --- /dev/null +++ b/docs/api/flyteidl/flyteidl/service/admin.proto @@ -0,0 +1,668 @@ +syntax = "proto3"; +package flyteidl.service; + +option go_package = "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/service"; + +import "google/api/annotations.proto"; +import "flyteidl/admin/project.proto"; +import "flyteidl/admin/project_domain_attributes.proto"; +import "flyteidl/admin/project_attributes.proto"; +import "flyteidl/admin/task.proto"; +import "flyteidl/admin/workflow.proto"; +import "flyteidl/admin/workflow_attributes.proto"; +import "flyteidl/admin/launch_plan.proto"; +import "flyteidl/admin/event.proto"; +import "flyteidl/admin/execution.proto"; +import "flyteidl/admin/matchable_resource.proto"; +import "flyteidl/admin/node_execution.proto"; +import "flyteidl/admin/task_execution.proto"; +import "flyteidl/admin/version.proto"; +import "flyteidl/admin/common.proto"; +import "flyteidl/admin/description_entity.proto"; +import "protoc-gen-openapiv2/options/annotations.proto"; + + +// The following defines an RPC service that is also served over HTTP via grpc-gateway. +// Standard response codes for both are defined here: https://github.com/grpc-ecosystem/grpc-gateway/blob/master/runtime/errors.go +service AdminService { + // Create and upload a :ref:`ref_flyteidl.admin.Task` definition + rpc CreateTask (flyteidl.admin.TaskCreateRequest) returns (flyteidl.admin.TaskCreateResponse) { + option (google.api.http) = { + post: "/api/v1/tasks" + body: "*" + }; + option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { + description: "Create and register a task definition." + responses: { + key: "400" + value: { + description: "Returned for bad request that may have failed validation." + } + } + responses: { + key: "409" + value: { + description: "Returned for a request that references an identical entity that has already been registered." + } + } + }; + } + + // Fetch a :ref:`ref_flyteidl.admin.Task` definition. + rpc GetTask (flyteidl.admin.ObjectGetRequest) returns (flyteidl.admin.Task) { + option (google.api.http) = { + get: "/api/v1/tasks/{id.project}/{id.domain}/{id.name}/{id.version}" + }; + option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { + description: "Retrieve an existing task definition." + }; + } + + // Fetch a list of :ref:`ref_flyteidl.admin.NamedEntityIdentifier` of task objects. + rpc ListTaskIds (flyteidl.admin.NamedEntityIdentifierListRequest) returns (flyteidl.admin.NamedEntityIdentifierList) { + option (google.api.http) = { + get: "/api/v1/task_ids/{project}/{domain}" + }; + option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { + description: "Fetch existing task definition identifiers matching input filters." + }; + } + + // Fetch a list of :ref:`ref_flyteidl.admin.Task` definitions. + rpc ListTasks (flyteidl.admin.ResourceListRequest) returns (flyteidl.admin.TaskList) { + option (google.api.http) = { + get: "/api/v1/tasks/{id.project}/{id.domain}/{id.name}" + additional_bindings { + get: "/api/v1/tasks/{id.project}/{id.domain}", + } + }; + option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { + description: "Fetch existing task definitions matching input filters." + }; + } + + // Create and upload a :ref:`ref_flyteidl.admin.Workflow` definition + rpc CreateWorkflow (flyteidl.admin.WorkflowCreateRequest) returns (flyteidl.admin.WorkflowCreateResponse) { + option (google.api.http) = { + post: "/api/v1/workflows" + body: "*" + }; + option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { + description: "Create and register a workflow definition." + responses: { + key: "400" + value: { + description: "Returned for bad request that may have failed validation." + } + } + responses: { + key: "409" + value: { + description: "Returned for a request that references an identical entity that has already been registered." + } + } + }; + } + + // Fetch a :ref:`ref_flyteidl.admin.Workflow` definition. + rpc GetWorkflow (flyteidl.admin.ObjectGetRequest) returns (flyteidl.admin.Workflow) { + option (google.api.http) = { + get: "/api/v1/workflows/{id.project}/{id.domain}/{id.name}/{id.version}" + }; + option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { + description: "Retrieve an existing workflow definition." + }; + } + + // Fetch a list of :ref:`ref_flyteidl.admin.NamedEntityIdentifier` of workflow objects. + rpc ListWorkflowIds (flyteidl.admin.NamedEntityIdentifierListRequest) returns (flyteidl.admin.NamedEntityIdentifierList) { + option (google.api.http) = { + get: "/api/v1/workflow_ids/{project}/{domain}" + }; + // option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { + // description: "Fetch an existing workflow definition identifiers matching input filters." + // }; + } + + // Fetch a list of :ref:`ref_flyteidl.admin.Workflow` definitions. + rpc ListWorkflows (flyteidl.admin.ResourceListRequest) returns (flyteidl.admin.WorkflowList) { + option (google.api.http) = { + get: "/api/v1/workflows/{id.project}/{id.domain}/{id.name}" + additional_bindings { + get: "/api/v1/workflows/{id.project}/{id.domain}", + } + }; + option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { + description: "Fetch existing workflow definitions matching input filters." + }; + } + + // Create and upload a :ref:`ref_flyteidl.admin.LaunchPlan` definition + rpc CreateLaunchPlan (flyteidl.admin.LaunchPlanCreateRequest) returns (flyteidl.admin.LaunchPlanCreateResponse) { + option (google.api.http) = { + post: "/api/v1/launch_plans" + body: "*" + }; + option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { + description: "Create and register a launch plan definition." + responses: { + key: "400" + value: { + description: "Returned for bad request that may have failed validation." + } + } + responses: { + key: "409" + value: { + description: "Returned for a request that references an identical entity that has already been registered." + } + } + }; + } + + // Fetch a :ref:`ref_flyteidl.admin.LaunchPlan` definition. + rpc GetLaunchPlan (flyteidl.admin.ObjectGetRequest) returns (flyteidl.admin.LaunchPlan) { + option (google.api.http) = { + get: "/api/v1/launch_plans/{id.project}/{id.domain}/{id.name}/{id.version}" + }; + option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { + description: "Retrieve an existing launch plan definition." + }; + } + + // Fetch the active version of a :ref:`ref_flyteidl.admin.LaunchPlan`. + rpc GetActiveLaunchPlan (flyteidl.admin.ActiveLaunchPlanRequest) returns (flyteidl.admin.LaunchPlan) { + option (google.api.http) = { + get: "/api/v1/active_launch_plans/{id.project}/{id.domain}/{id.name}" + }; + option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { + description: "Retrieve the active launch plan version specified by input request filters." + }; + } + + // List active versions of :ref:`ref_flyteidl.admin.LaunchPlan`. + rpc ListActiveLaunchPlans (flyteidl.admin.ActiveLaunchPlanListRequest) returns (flyteidl.admin.LaunchPlanList) { + option (google.api.http) = { + get: "/api/v1/active_launch_plans/{project}/{domain}" + }; + option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { + description: "Fetch the active launch plan versions specified by input request filters." + }; + } + + // Fetch a list of :ref:`ref_flyteidl.admin.NamedEntityIdentifier` of launch plan objects. + rpc ListLaunchPlanIds (flyteidl.admin.NamedEntityIdentifierListRequest) returns (flyteidl.admin.NamedEntityIdentifierList) { + option (google.api.http) = { + get: "/api/v1/launch_plan_ids/{project}/{domain}" + }; + option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { + description: "Fetch existing launch plan definition identifiers matching input filters." + }; + } + + // Fetch a list of :ref:`ref_flyteidl.admin.LaunchPlan` definitions. + rpc ListLaunchPlans (flyteidl.admin.ResourceListRequest) returns (flyteidl.admin.LaunchPlanList) { + option (google.api.http) = { + get: "/api/v1/launch_plans/{id.project}/{id.domain}/{id.name}" + additional_bindings { + get: "/api/v1/launch_plans/{id.project}/{id.domain}" + } + }; + option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { + description: "Fetch existing launch plan definitions matching input filters." + }; + } + + // Updates the status of a registered :ref:`ref_flyteidl.admin.LaunchPlan`. + rpc UpdateLaunchPlan (flyteidl.admin.LaunchPlanUpdateRequest) returns (flyteidl.admin.LaunchPlanUpdateResponse) { + option (google.api.http) = { + put: "/api/v1/launch_plans/{id.project}/{id.domain}/{id.name}/{id.version}" + body: "*" + }; + option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { + description: "Update the status of an existing launch plan definition. " + "At most one launch plan version for a given {project, domain, name} can be active at a time. " + "If this call sets a launch plan to active and existing version is already active, the result of this call will be that the " + "formerly active launch plan will be made inactive and specified launch plan in this request will be made active. " + "In the event that the formerly active launch plan had a schedule associated it with it, this schedule will be disabled. " + "If the reference launch plan in this request is being set to active and has a schedule associated with it, the schedule will be enabled." + }; + } + + // Triggers the creation of a :ref:`ref_flyteidl.admin.Execution` + rpc CreateExecution (flyteidl.admin.ExecutionCreateRequest) returns (flyteidl.admin.ExecutionCreateResponse) { + option (google.api.http) = { + post: "/api/v1/executions" + body: "*" + }; + option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { + description: "Create a workflow execution." + }; + } + + // Triggers the creation of an identical :ref:`ref_flyteidl.admin.Execution` + rpc RelaunchExecution (flyteidl.admin.ExecutionRelaunchRequest) returns (flyteidl.admin.ExecutionCreateResponse) { + option (google.api.http) = { + post: "/api/v1/executions/relaunch" + body: "*" + }; + option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { + description: "Relaunch a workflow execution." + }; + } + + // Recreates a previously-run workflow execution that will only start executing from the last known failure point. + // In Recover mode, users cannot change any input parameters or update the version of the execution. + // This is extremely useful to recover from system errors and byzantine faults like - Loss of K8s cluster, bugs in platform or instability, machine failures, + // downstream system failures (downstream services), or simply to recover executions that failed because of retry exhaustion and should complete if tried again. + // See :ref:`ref_flyteidl.admin.ExecutionRecoverRequest` for more details. + rpc RecoverExecution (flyteidl.admin.ExecutionRecoverRequest) returns (flyteidl.admin.ExecutionCreateResponse) { + option (google.api.http) = { + post: "/api/v1/executions/recover" + body: "*" + }; + option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { + description: "Recreates a previously-run workflow execution that will only start executing from the last known failure point. " + "In Recover mode, users cannot change any input parameters or update the version of the execution. " + "This is extremely useful to recover from system errors and byzantine faults like - Loss of K8s cluster, bugs in platform or instability, machine failures, " + "downstream system failures (downstream services), or simply to recover executions that failed because of retry exhaustion and should complete if tried again." + }; + } + + // Fetches a :ref:`ref_flyteidl.admin.Execution`. + rpc GetExecution (flyteidl.admin.WorkflowExecutionGetRequest) returns (flyteidl.admin.Execution) { + option (google.api.http) = { + get: "/api/v1/executions/{id.project}/{id.domain}/{id.name}" + }; + option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { + description: "Retrieve an existing workflow execution." + }; + } + + // Update execution belonging to project domain :ref:`ref_flyteidl.admin.Execution`. + rpc UpdateExecution (flyteidl.admin.ExecutionUpdateRequest) returns (flyteidl.admin.ExecutionUpdateResponse) { + option (google.api.http) = { + put: "/api/v1/executions/{id.project}/{id.domain}/{id.name}" + body: "*" + }; + // option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { + // description: "Update execution belonging to project domain." + // }; + } + + // Fetches input and output data for a :ref:`ref_flyteidl.admin.Execution`. + rpc GetExecutionData (flyteidl.admin.WorkflowExecutionGetDataRequest) returns (flyteidl.admin.WorkflowExecutionGetDataResponse) { + option (google.api.http) = { + get: "/api/v1/data/executions/{id.project}/{id.domain}/{id.name}" + }; + // option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { + // description: "Retrieve input and output data from an existing workflow execution." + // }; + }; + + // Fetch a list of :ref:`ref_flyteidl.admin.Execution`. + rpc ListExecutions (flyteidl.admin.ResourceListRequest) returns (flyteidl.admin.ExecutionList) { + option (google.api.http) = { + get: "/api/v1/executions/{id.project}/{id.domain}" + }; + // option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { + // description: "Fetch existing workflow executions matching input filters." + // }; + } + + // Terminates an in-progress :ref:`ref_flyteidl.admin.Execution`. + rpc TerminateExecution (flyteidl.admin.ExecutionTerminateRequest) returns (flyteidl.admin.ExecutionTerminateResponse) { + option (google.api.http) = { + delete: "/api/v1/executions/{id.project}/{id.domain}/{id.name}" + body: "*" + }; + // option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { + // description: "Terminate the active workflow execution specified in the request." + // }; + } + + // Fetches a :ref:`ref_flyteidl.admin.NodeExecution`. + rpc GetNodeExecution (flyteidl.admin.NodeExecutionGetRequest) returns (flyteidl.admin.NodeExecution) { + option (google.api.http) = { + get: "/api/v1/node_executions/{id.execution_id.project}/{id.execution_id.domain}/{id.execution_id.name}/{id.node_id}" + }; + // option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { + // description: "Retrieve an existing node execution." + // }; + } + + // Fetches a :ref:`ref_flyteidl.admin.DynamicNodeWorkflowResponse`. + rpc GetDynamicNodeWorkflow (flyteidl.admin.GetDynamicNodeWorkflowRequest) returns (flyteidl.admin.DynamicNodeWorkflowResponse) { + option (google.api.http) = { + get: "/api/v1/node_executions/{id.execution_id.project}/{id.execution_id.domain}/{id.execution_id.name}/{id.node_id}/dynamic_workflow" + }; + // option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { + // description: "Retrieve a workflow closure from a dynamic node execution." + // }; + } + + // Fetch a list of :ref:`ref_flyteidl.admin.NodeExecution`. + rpc ListNodeExecutions (flyteidl.admin.NodeExecutionListRequest) returns (flyteidl.admin.NodeExecutionList) { + option (google.api.http) = { + get: "/api/v1/node_executions/{workflow_execution_id.project}/{workflow_execution_id.domain}/{workflow_execution_id.name}" + }; + // option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { + // description: "Fetch existing node executions matching input filters." + // }; + } + + // Fetch a list of :ref:`ref_flyteidl.admin.NodeExecution` launched by the reference :ref:`ref_flyteidl.admin.TaskExecution`. + rpc ListNodeExecutionsForTask (flyteidl.admin.NodeExecutionForTaskListRequest) returns (flyteidl.admin.NodeExecutionList) { + option (google.api.http) = { + get: "/api/v1/children/task_executions/{task_execution_id.node_execution_id.execution_id.project}/{task_execution_id.node_execution_id.execution_id.domain}/{task_execution_id.node_execution_id.execution_id.name}/{task_execution_id.node_execution_id.node_id}/{task_execution_id.task_id.project}/{task_execution_id.task_id.domain}/{task_execution_id.task_id.name}/{task_execution_id.task_id.version}/{task_execution_id.retry_attempt}" + }; + // option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { + // description: "Fetch child node executions launched by the specified task execution." + // }; + } + + // Fetches input and output data for a :ref:`ref_flyteidl.admin.NodeExecution`. + rpc GetNodeExecutionData (flyteidl.admin.NodeExecutionGetDataRequest) returns (flyteidl.admin.NodeExecutionGetDataResponse) { + option (google.api.http) = { + get: "/api/v1/data/node_executions/{id.execution_id.project}/{id.execution_id.domain}/{id.execution_id.name}/{id.node_id}" + }; + // option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { + // description: "Retrieve input and output data from an existing node execution." + // }; + }; + + // Registers a :ref:`ref_flyteidl.admin.Project` with the Flyte deployment. + rpc RegisterProject (flyteidl.admin.ProjectRegisterRequest) returns (flyteidl.admin.ProjectRegisterResponse) { + option (google.api.http) = { + post: "/api/v1/projects" + body: "*" + }; + // option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { + // description: "Register a project." + // }; + } + + // Updates an existing :ref:`ref_flyteidl.admin.Project` + // flyteidl.admin.Project should be passed but the domains property should be empty; + // it will be ignored in the handler as domains cannot be updated via this API. + rpc UpdateProject (flyteidl.admin.Project) returns (flyteidl.admin.ProjectUpdateResponse) { + option (google.api.http) = { + put: "/api/v1/projects/{id}" + body: "*" + }; + option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { + description: "Update a project." + }; + } + + // Fetches a :ref:`ref_flyteidl.admin.Project` + rpc GetProject (flyteidl.admin.ProjectGetRequest) returns (flyteidl.admin.Project) { + option (google.api.http) = { + get: "/api/v1/projects/{id}" + }; + option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { + description: "Fetch a registered project." + }; + } + + // Fetches a list of :ref:`ref_flyteidl.admin.Project` + rpc ListProjects (flyteidl.admin.ProjectListRequest) returns (flyteidl.admin.Projects) { + option (google.api.http) = { + get: "/api/v1/projects" + }; + option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { + description: "Fetch registered projects." + }; + } + + rpc GetDomains (flyteidl.admin.GetDomainRequest) returns (flyteidl.admin.GetDomainsResponse) { + option (google.api.http) = { + get: "/api/v1/domains" + }; + // option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { + // description: "Fetch registered domains." + // }; + } + + // Indicates a :ref:`ref_flyteidl.event.WorkflowExecutionEvent` has occurred. + rpc CreateWorkflowEvent (flyteidl.admin.WorkflowExecutionEventRequest) returns (flyteidl.admin.WorkflowExecutionEventResponse) { + option (google.api.http) = { + post: "/api/v1/events/workflows" + body: "*" + }; + option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { + description: "Create a workflow execution event recording a phase transition." + }; + } + + // Indicates a :ref:`ref_flyteidl.event.NodeExecutionEvent` has occurred. + rpc CreateNodeEvent (flyteidl.admin.NodeExecutionEventRequest) returns (flyteidl.admin.NodeExecutionEventResponse) { + option (google.api.http) = { + post: "/api/v1/events/nodes" + body: "*" + }; + option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { + description: "Create a node execution event recording a phase transition." + }; + } + + // Indicates a :ref:`ref_flyteidl.event.TaskExecutionEvent` has occurred. + rpc CreateTaskEvent (flyteidl.admin.TaskExecutionEventRequest) returns (flyteidl.admin.TaskExecutionEventResponse) { + option (google.api.http) = { + post: "/api/v1/events/tasks" + body: "*" + }; + option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { + description: "Create a task execution event recording a phase transition." + }; + } + + // Fetches a :ref:`ref_flyteidl.admin.TaskExecution`. + rpc GetTaskExecution (flyteidl.admin.TaskExecutionGetRequest) returns (flyteidl.admin.TaskExecution) { + option (google.api.http) = { + get: "/api/v1/task_executions/{id.node_execution_id.execution_id.project}/{id.node_execution_id.execution_id.domain}/{id.node_execution_id.execution_id.name}/{id.node_execution_id.node_id}/{id.task_id.project}/{id.task_id.domain}/{id.task_id.name}/{id.task_id.version}/{id.retry_attempt}" + }; + option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { + description: "Retrieve an existing task execution." + }; + } + + // Fetches a list of :ref:`ref_flyteidl.admin.TaskExecution`. + rpc ListTaskExecutions (flyteidl.admin.TaskExecutionListRequest) returns (flyteidl.admin.TaskExecutionList) { + option (google.api.http) = { + get: "/api/v1/task_executions/{node_execution_id.execution_id.project}/{node_execution_id.execution_id.domain}/{node_execution_id.execution_id.name}/{node_execution_id.node_id}" + }; + option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { + description: "Fetch existing task executions matching input filters." + }; + + } + + // Fetches input and output data for a :ref:`ref_flyteidl.admin.TaskExecution`. + rpc GetTaskExecutionData (flyteidl.admin.TaskExecutionGetDataRequest) returns (flyteidl.admin.TaskExecutionGetDataResponse) { + option (google.api.http) = { + get: "/api/v1/data/task_executions/{id.node_execution_id.execution_id.project}/{id.node_execution_id.execution_id.domain}/{id.node_execution_id.execution_id.name}/{id.node_execution_id.node_id}/{id.task_id.project}/{id.task_id.domain}/{id.task_id.name}/{id.task_id.version}/{id.retry_attempt}" + }; + option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { + description: "Retrieve input and output data from an existing task execution." + }; + } + + // Creates or updates custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project and domain. + rpc UpdateProjectDomainAttributes (flyteidl.admin.ProjectDomainAttributesUpdateRequest) returns (flyteidl.admin.ProjectDomainAttributesUpdateResponse) { + option (google.api.http) = { + put: "/api/v1/project_domain_attributes/{attributes.project}/{attributes.domain}" + body: "*" + }; + option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { + description: "Update the customized resource attributes associated with a project-domain combination" + }; + } + + // Fetches custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project and domain. + rpc GetProjectDomainAttributes (flyteidl.admin.ProjectDomainAttributesGetRequest) returns (flyteidl.admin.ProjectDomainAttributesGetResponse) { + option (google.api.http) = { + get: "/api/v1/project_domain_attributes/{project}/{domain}" + }; + option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { + description: "Retrieve the customized resource attributes associated with a project-domain combination" + }; + } + + // Deletes custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project and domain. + rpc DeleteProjectDomainAttributes (flyteidl.admin.ProjectDomainAttributesDeleteRequest) returns (flyteidl.admin.ProjectDomainAttributesDeleteResponse) { + option (google.api.http) = { + delete: "/api/v1/project_domain_attributes/{project}/{domain}" + body: "*" + }; + option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { + description: "Delete the customized resource attributes associated with a project-domain combination" + }; + } + + // Creates or updates custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` at the project level + rpc UpdateProjectAttributes (flyteidl.admin.ProjectAttributesUpdateRequest) returns (flyteidl.admin.ProjectAttributesUpdateResponse) { + option (google.api.http) = { + put: "/api/v1/project_attributes/{attributes.project}" + body: "*" + }; + option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { + description: "Update the customized resource attributes associated with a project" + }; + } + + // Fetches custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project and domain. + rpc GetProjectAttributes (flyteidl.admin.ProjectAttributesGetRequest) returns (flyteidl.admin.ProjectAttributesGetResponse) { + option (google.api.http) = { + get: "/api/v1/project_attributes/{project}" + }; + option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { + description: "Retrieve the customized resource attributes associated with a project" + }; + } + + // Deletes custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project and domain. + rpc DeleteProjectAttributes (flyteidl.admin.ProjectAttributesDeleteRequest) returns (flyteidl.admin.ProjectAttributesDeleteResponse) { + option (google.api.http) = { + delete: "/api/v1/project_attributes/{project}" + body: "*" + }; + option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { + description: "Delete the customized resource attributes associated with a project" + }; + } + // Creates or updates custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project, domain and workflow. + rpc UpdateWorkflowAttributes (flyteidl.admin.WorkflowAttributesUpdateRequest) returns (flyteidl.admin.WorkflowAttributesUpdateResponse) { + option (google.api.http) = { + put: "/api/v1/workflow_attributes/{attributes.project}/{attributes.domain}/{attributes.workflow}" + body: "*" + }; + option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { + description: "Update the customized resource attributes associated with a project, domain and workflow combination" + }; + } + + // Fetches custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project, domain and workflow. + rpc GetWorkflowAttributes (flyteidl.admin.WorkflowAttributesGetRequest) returns (flyteidl.admin.WorkflowAttributesGetResponse) { + option (google.api.http) = { + get: "/api/v1/workflow_attributes/{project}/{domain}/{workflow}" + }; + option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { + description: "Retrieve the customized resource attributes associated with a project, domain and workflow combination" + }; + } + + // Deletes custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project, domain and workflow. + rpc DeleteWorkflowAttributes (flyteidl.admin.WorkflowAttributesDeleteRequest) returns (flyteidl.admin.WorkflowAttributesDeleteResponse) { + option (google.api.http) = { + delete: "/api/v1/workflow_attributes/{project}/{domain}/{workflow}" + body: "*" + }; + option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { + description: "Delete the customized resource attributes associated with a project, domain and workflow combination" + }; + } + + // Lists custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a specific resource type. + rpc ListMatchableAttributes (flyteidl.admin.ListMatchableAttributesRequest) returns (flyteidl.admin.ListMatchableAttributesResponse) { + option (google.api.http) = { + get: "/api/v1/matchable_attributes" + }; + option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { + description: "Retrieve a list of MatchableAttributesConfiguration objects." + }; + } + + // Returns a list of :ref:`ref_flyteidl.admin.NamedEntity` objects. + rpc ListNamedEntities (flyteidl.admin.NamedEntityListRequest) returns (flyteidl.admin.NamedEntityList) { + option (google.api.http) = { + get: "/api/v1/named_entities/{resource_type}/{project}/{domain}" + }; + option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { + description: "Retrieve a list of NamedEntity objects sharing a common resource type, project, and domain." + }; + } + + // Returns a :ref:`ref_flyteidl.admin.NamedEntity` object. + rpc GetNamedEntity (flyteidl.admin.NamedEntityGetRequest) returns (flyteidl.admin.NamedEntity) { + option (google.api.http) = { + get: "/api/v1/named_entities/{resource_type}/{id.project}/{id.domain}/{id.name}" + }; + option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { + description: "Retrieve a NamedEntity object." + }; + } + + // Updates a :ref:`ref_flyteidl.admin.NamedEntity` object. + rpc UpdateNamedEntity (flyteidl.admin.NamedEntityUpdateRequest) returns (flyteidl.admin.NamedEntityUpdateResponse) { + option (google.api.http) = { + put: "/api/v1/named_entities/{resource_type}/{id.project}/{id.domain}/{id.name}" + body: "*" + }; + option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { + description: "Update the fields associated with a NamedEntity" + }; + } + + rpc GetVersion (flyteidl.admin.GetVersionRequest) returns (flyteidl.admin.GetVersionResponse) { + option (google.api.http) = { + get: "/api/v1/version" + }; + option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { + description: "Retrieve the Version (including the Build information) for FlyteAdmin service" + }; + } + + // Fetch a :ref:`ref_flyteidl.admin.DescriptionEntity` object. + rpc GetDescriptionEntity (flyteidl.admin.ObjectGetRequest) returns (flyteidl.admin.DescriptionEntity) { + option (google.api.http) = { + get: "/api/v1/description_entities/{id.resource_type}/{id.project}/{id.domain}/{id.name}/{id.version}" + }; + option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { + description: "Retrieve an existing description entity description." + }; + } + + // Fetch a list of :ref:`ref_flyteidl.admin.DescriptionEntity` definitions. + rpc ListDescriptionEntities (flyteidl.admin.DescriptionEntityListRequest) returns (flyteidl.admin.DescriptionEntityList) { + option (google.api.http) = { + get: "/api/v1/description_entities/{resource_type}/{id.project}/{id.domain}/{id.name}" + additional_bindings { + get: "/api/v1/description_entities/{resource_type}/{id.project}/{id.domain}" + } + }; + option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { + description: "Fetch existing description entity definitions matching input filters." + }; + } + + // Fetches runtime metrics for a :ref:`ref_flyteidl.admin.Execution`. + rpc GetExecutionMetrics (flyteidl.admin.WorkflowExecutionGetMetricsRequest) returns (flyteidl.admin.WorkflowExecutionGetMetricsResponse) { + option (google.api.http) = { + get: "/api/v1/metrics/executions/{id.project}/{id.domain}/{id.name}" + }; + option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { + description: "Retrieve metrics from an existing workflow execution." + }; + }; +} diff --git a/docs/api/flyteidl/flyteidl/service/agent.proto b/docs/api/flyteidl/flyteidl/service/agent.proto new file mode 100644 index 0000000000..cd6b93a972 --- /dev/null +++ b/docs/api/flyteidl/flyteidl/service/agent.proto @@ -0,0 +1,79 @@ +syntax = "proto3"; +package flyteidl.service; + +option go_package = "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/service"; + +import "google/api/annotations.proto"; +import "flyteidl/admin/agent.proto"; + +// SyncAgentService defines an RPC Service that allows propeller to send the request to the agent server synchronously. +service SyncAgentService { + // ExecuteTaskSync streams the create request and inputs to the agent service and streams the outputs back. + rpc ExecuteTaskSync (stream flyteidl.admin.ExecuteTaskSyncRequest) returns (stream flyteidl.admin.ExecuteTaskSyncResponse){ + option (google.api.http) = { + post: "/api/v1/agent/task/stream" + body: "*" + }; + }; +} + +// AsyncAgentService defines an RPC Service that allows propeller to send the request to the agent server asynchronously. +service AsyncAgentService { + // CreateTask sends a task create request to the agent service. + rpc CreateTask (flyteidl.admin.CreateTaskRequest) returns (flyteidl.admin.CreateTaskResponse){ + option (google.api.http) = { + post: "/api/v1/agent/task" + body: "*" + }; + }; + + // Get job status. + rpc GetTask (flyteidl.admin.GetTaskRequest) returns (flyteidl.admin.GetTaskResponse){ + option (google.api.http) = { + get: "/api/v1/agent/task/{task_category.name}/{task_category.version}/{resource_meta}" + }; + }; + + // Delete the task resource. + rpc DeleteTask (flyteidl.admin.DeleteTaskRequest) returns (flyteidl.admin.DeleteTaskResponse){ + option (google.api.http) = { + delete: "/api/v1/agent/task_executions/{task_category.name}/{task_category.version}/{resource_meta}" + }; + }; + + // GetTaskMetrics returns one or more task execution metrics, if available. + // + // Errors include + // * OutOfRange if metrics are not available for the specified task time range + // * various other errors + rpc GetTaskMetrics(flyteidl.admin.GetTaskMetricsRequest) returns (flyteidl.admin.GetTaskMetricsResponse){ + option (google.api.http) = { + get: "/api/v1/agent/task/metrics/{task_category.name}/{task_category.version}/{resource_meta}" + }; + }; + + // GetTaskLogs returns task execution logs, if available. + rpc GetTaskLogs(flyteidl.admin.GetTaskLogsRequest) returns (stream flyteidl.admin.GetTaskLogsResponse){ + option (google.api.http) = { + get: "/api/v1/agent/task/logs/{task_category.name}/{task_category.version}/{resource_meta}" + }; + }; +} + +// AgentMetadataService defines an RPC service that is also served over HTTP via grpc-gateway. +// This service allows propeller or users to get the metadata of agents. +service AgentMetadataService { + // Fetch a :ref:`ref_flyteidl.admin.Agent` definition. + rpc GetAgent (flyteidl.admin.GetAgentRequest) returns (flyteidl.admin.GetAgentResponse){ + option (google.api.http) = { + get: "/api/v1/agent/{name}" + }; + }; + + // Fetch a list of :ref:`ref_flyteidl.admin.Agent` definitions. + rpc ListAgents (flyteidl.admin.ListAgentsRequest) returns (flyteidl.admin.ListAgentsResponse){ + option (google.api.http) = { + get: "/api/v1/agents" + }; + }; +} diff --git a/docs/api/flyteidl/flyteidl/service/auth.proto b/docs/api/flyteidl/flyteidl/service/auth.proto new file mode 100644 index 0000000000..a340f05add --- /dev/null +++ b/docs/api/flyteidl/flyteidl/service/auth.proto @@ -0,0 +1,94 @@ +syntax = "proto3"; +package flyteidl.service; + +option go_package = "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/service"; + +import "google/api/annotations.proto"; +import "protoc-gen-openapiv2/options/annotations.proto"; + +message OAuth2MetadataRequest {} + +// OAuth2MetadataResponse defines an RFC-Compliant response for /.well-known/oauth-authorization-server metadata +// as defined in https://tools.ietf.org/html/rfc8414 +message OAuth2MetadataResponse { + // Defines the issuer string in all JWT tokens this server issues. The issuer can be admin itself or an external + // issuer. + string issuer = 1; + + // URL of the authorization server's authorization endpoint [RFC6749]. This is REQUIRED unless no grant types are + // supported that use the authorization endpoint. + string authorization_endpoint = 2; + + // URL of the authorization server's token endpoint [RFC6749]. + string token_endpoint = 3; + + // Array containing a list of the OAuth 2.0 response_type values that this authorization server supports. + repeated string response_types_supported = 4; + + // JSON array containing a list of the OAuth 2.0 [RFC6749] scope values that this authorization server supports. + repeated string scopes_supported = 5; + + // JSON array containing a list of client authentication methods supported by this token endpoint. + repeated string token_endpoint_auth_methods_supported = 6; + + // URL of the authorization server's JWK Set [JWK] document. The referenced document contains the signing key(s) the + // client uses to validate signatures from the authorization server. + string jwks_uri = 7; + + // JSON array containing a list of Proof Key for Code Exchange (PKCE) [RFC7636] code challenge methods supported by + // this authorization server. + repeated string code_challenge_methods_supported = 8; + + // JSON array containing a list of the OAuth 2.0 grant type values that this authorization server supports. + repeated string grant_types_supported = 9; + + // URL of the authorization server's device authorization endpoint, as defined in Section 3.1 of [RFC8628] + string device_authorization_endpoint = 10; +} + +message PublicClientAuthConfigRequest {} + +// FlyteClientResponse encapsulates public information that flyte clients (CLIs... etc.) can use to authenticate users. +message PublicClientAuthConfigResponse { + // client_id to use when initiating OAuth2 authorization requests. + string client_id = 1; + // redirect uri to use when initiating OAuth2 authorization requests. + string redirect_uri = 2; + // scopes to request when initiating OAuth2 authorization requests. + repeated string scopes = 3; + // Authorization Header to use when passing Access Tokens to the server. If not provided, the client should use the + // default http `Authorization` header. + string authorization_metadata_key = 4; + // ServiceHttpEndpoint points to the http endpoint for the backend. If empty, clients can assume the endpoint used + // to configure the gRPC connection can be used for the http one respecting the insecure flag to choose between + // SSL or no SSL connections. + string service_http_endpoint = 5; + // audience to use when initiating OAuth2 authorization requests. + string audience = 6; +} + +// The following defines an RPC service that is also served over HTTP via grpc-gateway. +// Standard response codes for both are defined here: https://github.com/grpc-ecosystem/grpc-gateway/blob/master/runtime/errors.go +// RPCs defined in this service must be anonymously accessible. +service AuthMetadataService { + // Anonymously accessible. Retrieves local or external oauth authorization server metadata. + rpc GetOAuth2Metadata (OAuth2MetadataRequest) returns (OAuth2MetadataResponse) { + option (google.api.http) = { + get: "/.well-known/oauth-authorization-server" + }; + option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { + description: "Retrieves OAuth2 authorization server metadata. This endpoint is anonymously accessible." + }; + } + + // Anonymously accessible. Retrieves the client information clients should use when initiating OAuth2 authorization + // requests. + rpc GetPublicClientConfig (PublicClientAuthConfigRequest) returns (PublicClientAuthConfigResponse) { + option (google.api.http) = { + get: "/config/v1/flyte_client" + }; + option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { + description: "Retrieves public flyte client info. This endpoint is anonymously accessible." + }; + } +} diff --git a/docs/api/flyteidl/flyteidl/service/dataproxy.proto b/docs/api/flyteidl/flyteidl/service/dataproxy.proto new file mode 100644 index 0000000000..86c7c4d977 --- /dev/null +++ b/docs/api/flyteidl/flyteidl/service/dataproxy.proto @@ -0,0 +1,205 @@ +syntax = "proto3"; +package flyteidl.service; + +option go_package = "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/service"; + +import "google/api/annotations.proto"; +import "protoc-gen-openapiv2/options/annotations.proto"; +import "google/protobuf/duration.proto"; +import "google/protobuf/timestamp.proto"; +import "flyteidl/core/identifier.proto"; +import "flyteidl/core/literals.proto"; + + +message CreateUploadLocationResponse { + // SignedUrl specifies the url to use to upload content to (e.g. https://my-bucket.s3.amazonaws.com/randomstring/suffix.tar?X-...) + string signed_url = 1; + + // NativeUrl specifies the url in the format of the configured storage provider (e.g. s3://my-bucket/randomstring/suffix.tar) + string native_url = 2; + + // ExpiresAt defines when will the signed URL expires. + google.protobuf.Timestamp expires_at = 3; + + // Data proxy generates these headers for client, and they have to add these headers to the request when uploading the file. + map headers = 4; +} + +// CreateUploadLocationRequest specified request for the CreateUploadLocation API. +// The implementation in data proxy service will create the s3 location with some server side configured prefixes, +// and then: +// - project/domain/(a deterministic str representation of the content_md5)/filename (if present); OR +// - project/domain/filename_root (if present)/filename (if present). +message CreateUploadLocationRequest { + // Project to create the upload location for + // +required + string project = 1; + + // Domain to create the upload location for. + // +required + string domain = 2; + + // Filename specifies a desired suffix for the generated location. E.g. `file.py` or `pre/fix/file.zip`. + // +optional. By default, the service will generate a consistent name based on the provided parameters. + string filename = 3; + + // ExpiresIn defines a requested expiration duration for the generated url. The request will be rejected if this + // exceeds the platform allowed max. + // +optional. The default value comes from a global config. + google.protobuf.Duration expires_in = 4; + + // ContentMD5 restricts the upload location to the specific MD5 provided. The ContentMD5 will also appear in the + // generated path. + // +required + bytes content_md5 = 5; + + // If present, data proxy will use this string in lieu of the md5 hash in the path. When the filename is also included + // this makes the upload location deterministic. The native url will still be prefixed by the upload location prefix + // in data proxy config. This option is useful when uploading multiple files. + // +optional + string filename_root = 6; + + // If true, the data proxy will add content_md5 to the metadata to the signed URL and + // it will force clients to add this metadata to the object. + // This make sure dataproxy is backward compatible with the old flytekit. + bool add_content_md5_metadata = 7; + + + // Optional, org key applied to the resource. + string org = 8; +} + +// CreateDownloadLocationRequest specified request for the CreateDownloadLocation API. +message CreateDownloadLocationRequest { + option deprecated = true; + // NativeUrl specifies the url in the format of the configured storage provider (e.g. s3://my-bucket/randomstring/suffix.tar) + string native_url = 1; + + // ExpiresIn defines a requested expiration duration for the generated url. The request will be rejected if this + // exceeds the platform allowed max. + // +optional. The default value comes from a global config. + google.protobuf.Duration expires_in = 2; +} + +message CreateDownloadLocationResponse { + option deprecated = true; + // SignedUrl specifies the url to use to download content from (e.g. https://my-bucket.s3.amazonaws.com/randomstring/suffix.tar?X-...) + string signed_url = 1; + // ExpiresAt defines when will the signed URL expires. + google.protobuf.Timestamp expires_at = 2; +} + +// ArtifactType +enum ArtifactType { + // ARTIFACT_TYPE_UNDEFINED is the default, often invalid, value for the enum. + ARTIFACT_TYPE_UNDEFINED = 0; + + // ARTIFACT_TYPE_DECK refers to the deck html file optionally generated after a task, a workflow or a launch plan + // finishes executing. + ARTIFACT_TYPE_DECK = 1; +} + +// CreateDownloadLinkRequest defines the request parameters to create a download link (signed url) +message CreateDownloadLinkRequest { + // ArtifactType of the artifact requested. + ArtifactType artifact_type = 1; + + // ExpiresIn defines a requested expiration duration for the generated url. The request will be rejected if this + // exceeds the platform allowed max. + // +optional. The default value comes from a global config. + google.protobuf.Duration expires_in = 2; + + oneof source { + // NodeId is the unique identifier for the node execution. For a task node, this will retrieve the output of the + // most recent attempt of the task. + core.NodeExecutionIdentifier node_execution_id = 3; + } +} + +// CreateDownloadLinkResponse defines the response for the generated links +message CreateDownloadLinkResponse { + // SignedUrl specifies the url to use to download content from (e.g. https://my-bucket.s3.amazonaws.com/randomstring/suffix.tar?X-...) + repeated string signed_url = 1 [deprecated = true]; + + // ExpiresAt defines when will the signed URL expire. + google.protobuf.Timestamp expires_at = 2 [deprecated = true]; + + // New wrapper object containing the signed urls and expiration time + PreSignedURLs pre_signed_urls = 3; +} + +// Wrapper object since the message is shared across this and the GetDataResponse +message PreSignedURLs { + // SignedUrl specifies the url to use to download content from (e.g. https://my-bucket.s3.amazonaws.com/randomstring/suffix.tar?X-...) + repeated string signed_url = 1; + + // ExpiresAt defines when will the signed URL expire. + google.protobuf.Timestamp expires_at = 2; +} + +// General request artifact to retrieve data from a Flyte artifact url. +message GetDataRequest { + // A unique identifier in the form of flyte:// that uniquely, for a given Flyte + // backend, identifies a Flyte artifact ([i]nput, [o]output, flyte [d]eck, etc.). + // e.g. flyte://v1/proj/development/execid/n2/0/i (for 0th task execution attempt input) + // flyte://v1/proj/development/execid/n2/i (for node execution input) + // flyte://v1/proj/development/execid/n2/o/o3 (the o3 output of the second node) + string flyte_url = 1; +} + +message GetDataResponse { + oneof data { + // literal map data will be returned + core.LiteralMap literal_map = 1; + + // Flyte deck html will be returned as a signed url users can download + PreSignedURLs pre_signed_urls = 2; + + // Single literal will be returned. This is returned when the user/url requests a specific output or input + // by name. See the o3 example above. + core.Literal literal = 3; + } +} + +// DataProxyService defines an RPC Service that allows access to user-data in a controlled manner. +service DataProxyService { + // CreateUploadLocation creates a signed url to upload artifacts to for a given project/domain. + rpc CreateUploadLocation (CreateUploadLocationRequest) returns (CreateUploadLocationResponse) { + option (google.api.http) = { + post: "/api/v1/dataproxy/artifact_urn" + body: "*" + }; + option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { + description: "Creates a write-only http location that is accessible for tasks at runtime." + }; + } + + // CreateDownloadLocation creates a signed url to download artifacts. + rpc CreateDownloadLocation (CreateDownloadLocationRequest) returns (CreateDownloadLocationResponse) { + option deprecated = true; + option (google.api.http) = { + get: "/api/v1/dataproxy/artifact_urn" + }; + option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { + description: "Deprecated: Please use CreateDownloadLink instead. Creates a read-only http location that is accessible for tasks at runtime." + }; + } + + // CreateDownloadLocation creates a signed url to download artifacts. + rpc CreateDownloadLink (CreateDownloadLinkRequest) returns (CreateDownloadLinkResponse) { + option (google.api.http) = { + post: "/api/v1/dataproxy/artifact_link" + body: "*" + }; + option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { + description: "Creates a read-only http location that is accessible for tasks at runtime." + }; + } + + rpc GetData (GetDataRequest) returns (GetDataResponse) { + // Takes an address like flyte://v1/proj/development/execid/n2/0/i and return the actual data + option (google.api.http) = { + get: "/api/v1/data" + }; + } +} diff --git a/docs/api/flyteidl/flyteidl/service/external_plugin_service.proto b/docs/api/flyteidl/flyteidl/service/external_plugin_service.proto new file mode 100644 index 0000000000..a3035290e2 --- /dev/null +++ b/docs/api/flyteidl/flyteidl/service/external_plugin_service.proto @@ -0,0 +1,79 @@ +syntax = "proto3"; +package flyteidl.service; + +option go_package = "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/service"; +import "flyteidl/core/literals.proto"; +import "flyteidl/core/tasks.proto"; + +// ExternalPluginService defines an RPC Service that allows propeller to send the request to the backend plugin server. +service ExternalPluginService { + // Send a task create request to the backend plugin server. + rpc CreateTask (TaskCreateRequest) returns (TaskCreateResponse){option deprecated = true;}; + // Get job status. + rpc GetTask (TaskGetRequest) returns (TaskGetResponse){option deprecated = true;}; + // Delete the task resource. + rpc DeleteTask (TaskDeleteRequest) returns (TaskDeleteResponse){option deprecated = true;}; +} + +// The state of the execution is used to control its visibility in the UI/CLI. +enum State { + option deprecated = true; + RETRYABLE_FAILURE = 0; + PERMANENT_FAILURE = 1; + PENDING = 2; + RUNNING = 3; + SUCCEEDED = 4; +} + +// Represents a request structure to create task. +message TaskCreateRequest { + option deprecated = true; + // The inputs required to start the execution. All required inputs must be + // included in this map. If not required and not provided, defaults apply. + // +optional + core.LiteralMap inputs = 1; + // Template of the task that encapsulates all the metadata of the task. + core.TaskTemplate template = 2; + // Prefix for where task output data will be written. (e.g. s3://my-bucket/randomstring) + string output_prefix = 3; +} + +// Represents a create response structure. +message TaskCreateResponse { + option deprecated = true; + string job_id = 1; +} + +// A message used to fetch a job state from backend plugin server. +message TaskGetRequest { + option deprecated = true; + // A predefined yet extensible Task type identifier. + string task_type = 1; + // The unique id identifying the job. + string job_id = 2; +} + +// Response to get an individual task state. +message TaskGetResponse { + option deprecated = true; + // The state of the execution is used to control its visibility in the UI/CLI. + State state = 1; + // The outputs of the execution. It's typically used by sql task. Flyteplugins service will create a + // Structured dataset pointing to the query result table. + // +optional + core.LiteralMap outputs = 2; +} + +// A message used to delete a task. +message TaskDeleteRequest { + option deprecated = true; + // A predefined yet extensible Task type identifier. + string task_type = 1; + // The unique id identifying the job. + string job_id = 2; +} + +// Response to delete a task. +message TaskDeleteResponse { + option deprecated = true; +} diff --git a/docs/api/flyteidl/flyteidl/service/identity.proto b/docs/api/flyteidl/flyteidl/service/identity.proto new file mode 100644 index 0000000000..244bb9aaeb --- /dev/null +++ b/docs/api/flyteidl/flyteidl/service/identity.proto @@ -0,0 +1,51 @@ +syntax = "proto3"; +package flyteidl.service; + +option go_package = "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/service"; + +import "google/api/annotations.proto"; +import "google/protobuf/struct.proto"; +import "protoc-gen-openapiv2/options/annotations.proto"; + +message UserInfoRequest {} + +// See the OpenID Connect spec at https://openid.net/specs/openid-connect-core-1_0.html#UserInfoResponse for more information. +message UserInfoResponse { + // Locally unique and never reassigned identifier within the Issuer for the End-User, which is intended to be consumed + // by the Client. + string subject = 1; + + // Full name + string name = 2; + + // Shorthand name by which the End-User wishes to be referred to + string preferred_username = 3; + + // Given name(s) or first name(s) + string given_name = 4; + + // Surname(s) or last name(s) + string family_name = 5; + + // Preferred e-mail address + string email = 6; + + // Profile picture URL + string picture = 7; + + // Additional claims + google.protobuf.Struct additional_claims = 8; +} + +// IdentityService defines an RPC Service that interacts with user/app identities. +service IdentityService { + // Retrieves user information about the currently logged in user. + rpc UserInfo (UserInfoRequest) returns (UserInfoResponse) { + option (google.api.http) = { + get: "/me" + }; + option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { + description: "Retrieves authenticated identity info." + }; + } +} diff --git a/docs/api/flyteidl/flyteidl/service/signal.proto b/docs/api/flyteidl/flyteidl/service/signal.proto new file mode 100644 index 0000000000..b1b927979b --- /dev/null +++ b/docs/api/flyteidl/flyteidl/service/signal.proto @@ -0,0 +1,55 @@ +syntax = "proto3"; +package flyteidl.service; + +option go_package = "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/service"; + +import "google/api/annotations.proto"; +import "flyteidl/admin/signal.proto"; +import "protoc-gen-openapiv2/options/annotations.proto"; + +// SignalService defines an RPC Service that may create, update, and retrieve signal(s). +service SignalService { + // Fetches or creates a :ref:`ref_flyteidl.admin.Signal`. + rpc GetOrCreateSignal (flyteidl.admin.SignalGetOrCreateRequest) returns (flyteidl.admin.Signal) { + // Purposefully left out an HTTP API for this RPC call. This is meant to idempotently retrieve + // a signal, meaning the first call will create the signal and all subsequent calls will + // fetch the existing signal. This is only useful during Flyte Workflow execution and therefore + // is not exposed to mitigate unintended behavior. + option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { + description: "Retrieve a signal, creating it if it does not exist." + }; + } + + // Fetch a list of :ref:`ref_flyteidl.admin.Signal` definitions. + rpc ListSignals (flyteidl.admin.SignalListRequest) returns (flyteidl.admin.SignalList) { + option (google.api.http) = { + get: "/api/v1/signals/{workflow_execution_id.project}/{workflow_execution_id.domain}/{workflow_execution_id.name}" + }; + option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { + description: "Fetch existing signal definitions matching the input signal id filters." + }; + } + + // Sets the value on a :ref:`ref_flyteidl.admin.Signal` definition + rpc SetSignal (flyteidl.admin.SignalSetRequest) returns (flyteidl.admin.SignalSetResponse) { + option (google.api.http) = { + post: "/api/v1/signals" + body: "*" + }; + option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { + description: "Set a signal value." + responses: { + key: "400" + value: { + description: "Returned for bad request that may have failed validation." + } + } + responses: { + key: "409" + value: { + description: "Returned for a request that references an identical entity that has already been registered." + } + } + }; + } +} diff --git a/docs/api/index.md b/docs/api/index.md index 0db0bdad22..0b24d8f157 100644 --- a/docs/api/index.md +++ b/docs/api/index.md @@ -10,13 +10,20 @@ Flytekit is the main Python SDK for building workflows. ::: -:::{grid-item-card} {octicon}`workflow` Flytectl SDK +:::{grid-item-card} {octicon}`workflow` Flytectl CLI :link: flytectl/docs_index :link-type: doc Flytectl is a Golang binary that can be installed on any platform supported by Golang. ::: +:::{grid-item-card} {octicon}`code` FlyteIDL +:link: flyteidl/docs_index +:link-type: doc + +Flyte’s core specification language. +::: + :::: ```{toctree} @@ -25,4 +32,5 @@ Flytectl is a Golang binary that can be installed on any platform supported by G flytekit flytectl -``` \ No newline at end of file +flyteidl +``` diff --git a/docs/community/contribute_docs.md b/docs/community/contribute_docs.md index 654204391f..30755edcb8 100644 --- a/docs/community/contribute_docs.md +++ b/docs/community/contribute_docs.md @@ -13,7 +13,7 @@ The Flyte documentation comprises the following types: * **API documentation:** * {doc}`flytekit <../api/flytekit/docs_index>` * {doc}`flytectl <../api/flytectl/docs_index>` - * {doc}`flyteidl <../reference_flyteidl>` + * {doc}`flyteidl <../api/flyteidl/docs_index>` * **{ref}`Tutorials `:** Longer, more advanced guides that use multiple Flyte features to solve real-world problems. Some tutorials may require extra setup, while others can only run on larger clusters. * **{ref}`Integrations examples `:** These examples showcase how to use the Flyte plugins that integrate with the broader data and machine learning ecosystem. * **{ref}`Deployment documentation `:** Guidance on deploying and configuring the Flyte backend. diff --git a/docs/conf.py b/docs/conf.py index 867fb3184d..a4cc689cc4 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -288,6 +288,16 @@ "flytectl/gen/flytectl_demo_teardown": "../../api/flytectl/gen/flytectl_demo_teardown", "flytectl/gen/flytectl_demo_exec": "../../api/flytectl/gen/flytectl_demo_exec", "flytectl/gen/flytectl_demo_reload": "../../api/flytectl/gen/flytectl_demo_reload", + + # flyteidl + "reference_flyteidl": "../../api/flyteidl/docs_index.html", + "protos/docs/core/core": "../../api/flyteidl/docs/core/core.html", + "protos/docs/admin/admin": "../../api/flyteidl/docs/admin/admin.html", + "protos/docs/service/service": "../../api/flyteidl/docs/service/service.html", + "protos/docs/datacatalog/datacatalog": "../../api/flyteidl/docs/datacatalog/datacatalog.html", + "protos/docs/event/event": "../../api/flyteidl/docs/event/event.html", + "protos/docs/plugins/plugins": "../../api/flyteidl/docs/plugins/plugins.html", + "protos/README": "../../api/flyteidl/contributing.html", } @@ -376,6 +386,9 @@ html_theme_options = { # custom flyteorg pydata theme options "github_url": "https://github.com/flyteorg/flyte", + "logo": { + "text": "Flyte Docs", + }, "icon_links": [ { "name": "GitHub", @@ -529,7 +542,7 @@ # Pattern for replacing all ref/doc labels that point to protos/docs with /protos/docs PROTO_REF_PATTERN = r"([:<])(protos/docs)" -PROTO_REF_REPLACE = r"\1/protos/docs" +PROTO_REF_REPLACE = r"\1/api/flyteidl/docs" # These patterns are used to replace values in source files that are imported # from other repos. @@ -549,7 +562,7 @@ r"<_tags/DistributedComputing>": r"", r"{ref}`bioinformatics `": r"bioinformatics", PROTO_REF_PATTERN: PROTO_REF_REPLACE, - r"/protos/docs/service/index": r"/protos/docs/service/service" + r"/protos/docs/service/index": r"/api/flyteidl/docs/service/service" } # r"": r"", @@ -626,8 +639,8 @@ "name": "flyteidl", "source": "../flyteidl", "docs_path": "protos", - "dest": "protos", # to stay compatible with flyteidl docs path naming - "cmd": ["cp", "../flyteidl/README.md", "protos/README.md"], + "dest": "api/flyteidl", + # "cmd": ["cp", "../flyteidl/README.md", "api/flyteidl/docs/contributing.md"], "local": True, } ] diff --git a/docs/index.md b/docs/index.md index a80517ce20..61bb687131 100644 --- a/docs/index.md +++ b/docs/index.md @@ -102,7 +102,7 @@ Below are the API reference to the different components of Flyte: - Flyte's official Python SDK. * - {doc}`FlyteCTL ` - Flyte's command-line interface for interacting with a Flyte cluster. -* - {doc}`FlyteIDL ` +* - {doc}`FlyteIDL ` - Flyte's core specification language. ``` @@ -135,6 +135,5 @@ flytesnacks/tutorials/index flytesnacks/integrations/index deployment/index api/index -reference_flyteidl community/index ``` diff --git a/docs/reference_flyteidl.md b/docs/reference_flyteidl.md deleted file mode 100644 index bfe9924c9e..0000000000 --- a/docs/reference_flyteidl.md +++ /dev/null @@ -1,18 +0,0 @@ -# FlyteIDL API reference - -The protocol buffers defined here provide a high level specification of various -entities in Flyte control plane and data plane. It provides detailed definitions -and documentation of all these entities. - -```{toctree} -:maxdepth: 2 -:name: flyteidltoc - -Flyte Core Language Specification -FlyteAdmin Service -FlyteAdmin REST and gRPC Interface -Data Catalog Service -Internal and External Eventing Interface -Flyte Task Plugin Specification -Contributing Guide -``` diff --git a/flyteidl/README.md b/flyteidl/README.md index 67685f45b7..25a5badebc 100644 --- a/flyteidl/README.md +++ b/flyteidl/README.md @@ -1,6 +1,6 @@ # Flyteidl -This is one of the core repositories of Flyte. It contains the Specification of the Flyte Language using protobuf messages, the Backend API specification in gRPC, and Swagger REST. The repo contains the generated clients and protocol message structures in multiple languages. Along with the generated code, the repository also contains the Golang clients for Flyte's backend APIs (the services grouped under FlyteAdmin). +This is one of the core components of Flyte. It contains the Specification of the Flyte Language using protobuf messages, the Backend API specification in gRPC, and Swagger REST. The repo contains the generated clients and protocol message structures in multiple languages. Along with the generated code, the repository also contains the Golang clients for Flyte's backend APIs (the services grouped under FlyteAdmin). [![Slack](https://img.shields.io/badge/slack-join_chat-white.svg?logo=slack&style=social)](https://slack.flyte.org) @@ -11,69 +11,4 @@ This is one of the core repositories of Flyte. It contains the Specification of ## Contributing to Flyteidl -## Tooling for Flyteidl - -1. Run ``make download_tooling`` to install generator dependencies. - -```bash - make download_tooling -``` - -2. Ensure Docker is installed locally. -3. Run ``make generate`` to generate all the code, mock client, and docs for FlyteAdmin Service. - -```bash - make generate -``` - -4. To add new dependencies for documentation generation, modify ``doc-requirements.in`` and run - -```bash - make doc-requirements.txt -``` - -## Docs structure - -The index.rst files for protos are arranged in parallel under the ``docs`` folder. -All the proto definitions are within ``protos/flyteidl`` and their corresponding docs are in ``protos/docs``. - -``` -docs -├── admin -│   ├── admin.rst -│   └── index.rst -├── core -│   ├── core.rst -│   └── index.rst -├── datacatalog -│   ├── datacatalog.rst -│   └── index.rst -├── event -│   ├── event.rst -│   └── index.rst -├── plugins -│   ├── index.rst -│   └── plugins.rst -├── service -│   ├── index.rst -│   └── service.rst -``` - -Each module in protos has a module in docs with the same name. -For example: ``protos/flyteidl/core`` has a module ``protos/docs/core`` under the ``docs`` folder which has the corresponding index and documentation files. - - -## Generating Documentation - -* If a new module is to be introduced, follow the structure for core files in `generate_protos.sh` file which helps generate the core documentation from its proto files. -``` - core_proto_files=`ls protos/flyteidl/core/*.proto |xargs` - # Remove any currently generated file - ls -d protos/docs/core/* | grep -v index.rst | xargs rm - protoc --doc_out=protos/docs/core --doc_opt=restructuredtext,core.rst -I=protos `echo $core_proto_files` -``` - -* ``make generate`` generates the modified rst files. - -* ``make html`` generates the Sphinx documentation from the docs folder that uses the modified rst files. - +See the [contributing docs](protocs/contributing.md) for more information. diff --git a/flyteidl/protos/contributing.md b/flyteidl/protos/contributing.md new file mode 100644 index 0000000000..825ad7eeb6 --- /dev/null +++ b/flyteidl/protos/contributing.md @@ -0,0 +1,68 @@ +## Contributing to FlyteIDL + +## Install Tooling + +1. Run ``make download_tooling`` to install generator dependencies. + +```bash + make download_tooling +``` + +2. Ensure Docker is installed locally. +3. Run ``make generate`` to generate all the code, mock client, and docs for FlyteAdmin Service. + +```bash + make generate +``` + +4. To add new dependencies for documentation generation, modify ``doc-requirements.in`` and run + +```bash + make doc-requirements.txt +``` + +## Docs structure + +The index.rst files for protos are arranged in parallel under the ``docs`` folder. +All the proto definitions are within ``protos/flyteidl`` and their corresponding docs are in ``protos/docs``. + +``` +docs +├── admin +│   ├── admin.rst +│   └── index.rst +├── core +│   ├── core.rst +│   └── index.rst +├── datacatalog +│   ├── datacatalog.rst +│   └── index.rst +├── event +│   ├── event.rst +│   └── index.rst +├── plugins +│   ├── index.rst +│   └── plugins.rst +├── service +│   ├── index.rst +│   └── service.rst +``` + +Each module in protos has a module in docs with the same name. +For example: ``protos/flyteidl/core`` has a module ``protos/docs/core`` under the ``docs`` folder which has the corresponding index and documentation files. + + +## Generating Documentation + +* If a new module is to be introduced, follow the structure for core files in `generate_protos.sh` file which helps generate the core documentation from its proto files. +``` + core_proto_files=`ls protos/flyteidl/core/*.proto |xargs` + # Remove any currently generated file + ls -d protos/docs/core/* | grep -v index.rst | xargs rm + protoc --doc_out=protos/docs/core --doc_opt=restructuredtext,core.rst -I=protos `echo $core_proto_files` +``` + +* ``make generate`` generates the modified rst files. + +* ``make html`` generates the Sphinx documentation from the docs folder that uses the modified rst files. + diff --git a/flyteidl/protos/docs_index.rst b/flyteidl/protos/docs_index.rst new file mode 100644 index 0000000000..27383673de --- /dev/null +++ b/flyteidl/protos/docs_index.rst @@ -0,0 +1,19 @@ +Flyte Language and API specification +==================================== + +The protocol buffers defined here provide a high level specification of various +entities in Flyte control plane and data plane. It provides detailed definition +and documentation of all these entities. + +.. toctree:: + :maxdepth: 1 + :caption: flyteidl + :name: flyteidltoc + + docs/admin/index + docs/core/index + docs/datacatalog/index + docs/event/index + docs/plugins/index + docs/service/index + docs/contributing From 0b7bb72abb15ccd9e7c94c7e97aabfde912b7a44 Mon Sep 17 00:00:00 2001 From: Niels Bantilan Date: Thu, 3 Oct 2024 12:22:51 -0400 Subject: [PATCH 2/5] do not check in flyteidl docs Signed-off-by: Niels Bantilan --- .gitignore | 1 + docs/api/flyteidl/buf.lock | 18 - docs/api/flyteidl/buf.yaml | 12 - docs/api/flyteidl/docs/admin/admin.rst | 4623 ----------------- docs/api/flyteidl/docs/admin/index.rst | 13 - docs/api/flyteidl/docs/contributing.md | 79 - docs/api/flyteidl/docs/core/core.rst | 3952 -------------- docs/api/flyteidl/docs/core/index.rst | 15 - .../flyteidl/docs/datacatalog/datacatalog.rst | 1313 ----- docs/api/flyteidl/docs/datacatalog/index.rst | 16 - docs/api/flyteidl/docs/event/event.rst | 726 --- docs/api/flyteidl/docs/event/index.rst | 27 - docs/api/flyteidl/docs/plugins/index.rst | 14 - docs/api/flyteidl/docs/plugins/plugins.rst | 780 --- docs/api/flyteidl/docs/restructuredtext.tmpl | 129 - docs/api/flyteidl/docs/service/index.rst | 13 - docs/api/flyteidl/docs/service/service.rst | 543 -- .../docs/withoutscalar_restructuredtext.tmpl | 105 - docs/api/flyteidl/docs_index.rst | 19 - docs/api/flyteidl/flyteidl/admin/agent.proto | 258 - .../flyteidl/admin/cluster_assignment.proto | 11 - docs/api/flyteidl/flyteidl/admin/common.proto | 327 -- .../flyteidl/admin/description_entity.proto | 95 - docs/api/flyteidl/flyteidl/admin/event.proto | 70 - .../flyteidl/flyteidl/admin/execution.proto | 428 -- .../flyteidl/flyteidl/admin/launch_plan.proto | 226 - .../flyteidl/admin/matchable_resource.proto | 194 - .../flyteidl/admin/node_execution.proto | 245 - .../flyteidl/admin/notification.proto | 27 - .../api/flyteidl/flyteidl/admin/project.proto | 132 - .../flyteidl/admin/project_attributes.proto | 69 - .../admin/project_domain_attributes.proto | 80 - .../flyteidl/flyteidl/admin/schedule.proto | 43 - docs/api/flyteidl/flyteidl/admin/signal.proto | 86 - docs/api/flyteidl/flyteidl/admin/task.proto | 71 - .../flyteidl/admin/task_execution.proto | 168 - .../api/flyteidl/flyteidl/admin/version.proto | 27 - .../flyteidl/flyteidl/admin/workflow.proto | 92 - .../flyteidl/admin/workflow_attributes.proto | 89 - .../flyteidl/cacheservice/cacheservice.proto | 143 - .../flyteidl/flyteidl/core/artifact_id.proto | 112 - docs/api/flyteidl/flyteidl/core/catalog.proto | 63 - .../api/flyteidl/flyteidl/core/compiler.proto | 64 - .../flyteidl/flyteidl/core/condition.proto | 63 - .../flyteidl/flyteidl/core/dynamic_job.proto | 32 - docs/api/flyteidl/flyteidl/core/errors.proto | 35 - .../flyteidl/flyteidl/core/execution.proto | 118 - .../flyteidl/core/execution_envs.proto | 45 - .../flyteidl/flyteidl/core/identifier.proto | 80 - .../flyteidl/flyteidl/core/interface.proto | 64 - .../api/flyteidl/flyteidl/core/literals.proto | 200 - docs/api/flyteidl/flyteidl/core/metrics.proto | 50 - .../api/flyteidl/flyteidl/core/security.proto | 130 - docs/api/flyteidl/flyteidl/core/tasks.proto | 351 -- docs/api/flyteidl/flyteidl/core/types.proto | 208 - .../api/flyteidl/flyteidl/core/workflow.proto | 331 -- .../flyteidl/core/workflow_closure.proto | 18 - .../flyteidl/datacatalog/datacatalog.proto | 420 -- .../flyteidl/flyteidl/event/cloudevents.proto | 73 - docs/api/flyteidl/flyteidl/event/event.proto | 328 -- .../flyteidl/flyteidl/plugins/array_job.proto | 30 - .../flyteidl/flyteidl/plugins/common.proto | 27 - docs/api/flyteidl/flyteidl/plugins/dask.proto | 41 - .../flyteidl/plugins/kubeflow/common.proto | 28 - .../flyteidl/plugins/kubeflow/mpi.proto | 47 - .../flyteidl/plugins/kubeflow/pytorch.proto | 53 - .../plugins/kubeflow/tensorflow.proto | 46 - docs/api/flyteidl/flyteidl/plugins/mpi.proto | 20 - .../flyteidl/flyteidl/plugins/presto.proto | 14 - .../flyteidl/flyteidl/plugins/pytorch.proto | 25 - .../flyteidl/flyteidl/plugins/qubole.proto | 26 - docs/api/flyteidl/flyteidl/plugins/ray.proto | 53 - .../api/flyteidl/flyteidl/plugins/spark.proto | 34 - .../flyteidl/plugins/tensorflow.proto | 18 - .../flyteidl/flyteidl/plugins/waitable.proto | 15 - .../api/flyteidl/flyteidl/service/admin.proto | 668 --- .../api/flyteidl/flyteidl/service/agent.proto | 79 - docs/api/flyteidl/flyteidl/service/auth.proto | 94 - .../flyteidl/flyteidl/service/dataproxy.proto | 205 - .../service/external_plugin_service.proto | 79 - .../flyteidl/flyteidl/service/identity.proto | 51 - .../flyteidl/flyteidl/service/signal.proto | 55 - 82 files changed, 1 insertion(+), 19771 deletions(-) delete mode 100644 docs/api/flyteidl/buf.lock delete mode 100644 docs/api/flyteidl/buf.yaml delete mode 100644 docs/api/flyteidl/docs/admin/admin.rst delete mode 100644 docs/api/flyteidl/docs/admin/index.rst delete mode 100644 docs/api/flyteidl/docs/contributing.md delete mode 100644 docs/api/flyteidl/docs/core/core.rst delete mode 100644 docs/api/flyteidl/docs/core/index.rst delete mode 100644 docs/api/flyteidl/docs/datacatalog/datacatalog.rst delete mode 100644 docs/api/flyteidl/docs/datacatalog/index.rst delete mode 100644 docs/api/flyteidl/docs/event/event.rst delete mode 100644 docs/api/flyteidl/docs/event/index.rst delete mode 100644 docs/api/flyteidl/docs/plugins/index.rst delete mode 100644 docs/api/flyteidl/docs/plugins/plugins.rst delete mode 100644 docs/api/flyteidl/docs/restructuredtext.tmpl delete mode 100644 docs/api/flyteidl/docs/service/index.rst delete mode 100644 docs/api/flyteidl/docs/service/service.rst delete mode 100644 docs/api/flyteidl/docs/withoutscalar_restructuredtext.tmpl delete mode 100644 docs/api/flyteidl/docs_index.rst delete mode 100644 docs/api/flyteidl/flyteidl/admin/agent.proto delete mode 100644 docs/api/flyteidl/flyteidl/admin/cluster_assignment.proto delete mode 100644 docs/api/flyteidl/flyteidl/admin/common.proto delete mode 100644 docs/api/flyteidl/flyteidl/admin/description_entity.proto delete mode 100644 docs/api/flyteidl/flyteidl/admin/event.proto delete mode 100644 docs/api/flyteidl/flyteidl/admin/execution.proto delete mode 100644 docs/api/flyteidl/flyteidl/admin/launch_plan.proto delete mode 100644 docs/api/flyteidl/flyteidl/admin/matchable_resource.proto delete mode 100644 docs/api/flyteidl/flyteidl/admin/node_execution.proto delete mode 100644 docs/api/flyteidl/flyteidl/admin/notification.proto delete mode 100644 docs/api/flyteidl/flyteidl/admin/project.proto delete mode 100644 docs/api/flyteidl/flyteidl/admin/project_attributes.proto delete mode 100644 docs/api/flyteidl/flyteidl/admin/project_domain_attributes.proto delete mode 100644 docs/api/flyteidl/flyteidl/admin/schedule.proto delete mode 100644 docs/api/flyteidl/flyteidl/admin/signal.proto delete mode 100644 docs/api/flyteidl/flyteidl/admin/task.proto delete mode 100644 docs/api/flyteidl/flyteidl/admin/task_execution.proto delete mode 100644 docs/api/flyteidl/flyteidl/admin/version.proto delete mode 100644 docs/api/flyteidl/flyteidl/admin/workflow.proto delete mode 100644 docs/api/flyteidl/flyteidl/admin/workflow_attributes.proto delete mode 100644 docs/api/flyteidl/flyteidl/cacheservice/cacheservice.proto delete mode 100644 docs/api/flyteidl/flyteidl/core/artifact_id.proto delete mode 100644 docs/api/flyteidl/flyteidl/core/catalog.proto delete mode 100644 docs/api/flyteidl/flyteidl/core/compiler.proto delete mode 100644 docs/api/flyteidl/flyteidl/core/condition.proto delete mode 100644 docs/api/flyteidl/flyteidl/core/dynamic_job.proto delete mode 100644 docs/api/flyteidl/flyteidl/core/errors.proto delete mode 100644 docs/api/flyteidl/flyteidl/core/execution.proto delete mode 100644 docs/api/flyteidl/flyteidl/core/execution_envs.proto delete mode 100644 docs/api/flyteidl/flyteidl/core/identifier.proto delete mode 100644 docs/api/flyteidl/flyteidl/core/interface.proto delete mode 100644 docs/api/flyteidl/flyteidl/core/literals.proto delete mode 100644 docs/api/flyteidl/flyteidl/core/metrics.proto delete mode 100644 docs/api/flyteidl/flyteidl/core/security.proto delete mode 100644 docs/api/flyteidl/flyteidl/core/tasks.proto delete mode 100644 docs/api/flyteidl/flyteidl/core/types.proto delete mode 100644 docs/api/flyteidl/flyteidl/core/workflow.proto delete mode 100644 docs/api/flyteidl/flyteidl/core/workflow_closure.proto delete mode 100644 docs/api/flyteidl/flyteidl/datacatalog/datacatalog.proto delete mode 100644 docs/api/flyteidl/flyteidl/event/cloudevents.proto delete mode 100644 docs/api/flyteidl/flyteidl/event/event.proto delete mode 100644 docs/api/flyteidl/flyteidl/plugins/array_job.proto delete mode 100644 docs/api/flyteidl/flyteidl/plugins/common.proto delete mode 100644 docs/api/flyteidl/flyteidl/plugins/dask.proto delete mode 100644 docs/api/flyteidl/flyteidl/plugins/kubeflow/common.proto delete mode 100644 docs/api/flyteidl/flyteidl/plugins/kubeflow/mpi.proto delete mode 100644 docs/api/flyteidl/flyteidl/plugins/kubeflow/pytorch.proto delete mode 100644 docs/api/flyteidl/flyteidl/plugins/kubeflow/tensorflow.proto delete mode 100644 docs/api/flyteidl/flyteidl/plugins/mpi.proto delete mode 100644 docs/api/flyteidl/flyteidl/plugins/presto.proto delete mode 100644 docs/api/flyteidl/flyteidl/plugins/pytorch.proto delete mode 100644 docs/api/flyteidl/flyteidl/plugins/qubole.proto delete mode 100644 docs/api/flyteidl/flyteidl/plugins/ray.proto delete mode 100644 docs/api/flyteidl/flyteidl/plugins/spark.proto delete mode 100644 docs/api/flyteidl/flyteidl/plugins/tensorflow.proto delete mode 100644 docs/api/flyteidl/flyteidl/plugins/waitable.proto delete mode 100644 docs/api/flyteidl/flyteidl/service/admin.proto delete mode 100644 docs/api/flyteidl/flyteidl/service/agent.proto delete mode 100644 docs/api/flyteidl/flyteidl/service/auth.proto delete mode 100644 docs/api/flyteidl/flyteidl/service/dataproxy.proto delete mode 100644 docs/api/flyteidl/flyteidl/service/external_plugin_service.proto delete mode 100644 docs/api/flyteidl/flyteidl/service/identity.proto delete mode 100644 docs/api/flyteidl/flyteidl/service/signal.proto diff --git a/.gitignore b/.gitignore index 35c73f38c9..ecb45d1558 100644 --- a/.gitignore +++ b/.gitignore @@ -33,6 +33,7 @@ docs/_tags/ docs/api/flytectl docs/protos docs/api/flytekit +docs/api/flyteidl docs/flytesnacks docs/examples docs/_src diff --git a/docs/api/flyteidl/buf.lock b/docs/api/flyteidl/buf.lock deleted file mode 100644 index 0e0addc9f2..0000000000 --- a/docs/api/flyteidl/buf.lock +++ /dev/null @@ -1,18 +0,0 @@ -# Generated by buf. DO NOT EDIT. -version: v1 -deps: - - remote: buf.build - owner: googleapis - repository: googleapis - commit: 62f35d8aed1149c291d606d958a7ce32 - digest: shake256:c5f5c2401cf70b7c9719834954f31000a978397fdfebda861419bb4ab90fa8efae92710fddab0820533908a1e25ed692a8e119432b7b260c895087a4975b32f3 - - remote: buf.build - owner: grpc-ecosystem - repository: grpc-gateway - commit: 3f42134f4c564983838425bc43c7a65f - digest: shake256:3d11d4c0fe5e05fda0131afefbce233940e27f0c31c5d4e385686aea58ccd30f72053f61af432fa83f1fc11cda57f5f18ca3da26a29064f73c5a0d076bba8d92 - - remote: buf.build - owner: unionai - repository: protoc-gen-swagger - commit: fd9d94dc48154d5c94ccc43695df150f - digest: shake256:57743c99f8173b432f0750eac13671fe7721a824cbf5d4fbd85ffdd0d7b45ded507f7b0a49020f9a5eb2a434e9009ad9480140b4c9173ff58bd85c4685197d5b diff --git a/docs/api/flyteidl/buf.yaml b/docs/api/flyteidl/buf.yaml deleted file mode 100644 index 420796f854..0000000000 --- a/docs/api/flyteidl/buf.yaml +++ /dev/null @@ -1,12 +0,0 @@ -version: v1 -name: buf.build/flyteorg/flyteidl -lint: - use: - - DEFAULT -breaking: - use: - - FILE -deps: - - buf.build/googleapis/googleapis:62f35d8aed1149c291d606d958a7ce32 - - buf.build/unionai/protoc-gen-swagger - - buf.build/grpc-ecosystem/grpc-gateway diff --git a/docs/api/flyteidl/docs/admin/admin.rst b/docs/api/flyteidl/docs/admin/admin.rst deleted file mode 100644 index 832f656c16..0000000000 --- a/docs/api/flyteidl/docs/admin/admin.rst +++ /dev/null @@ -1,4623 +0,0 @@ -###################### -Protocol Documentation -###################### - - - - -.. _ref_flyteidl/admin/cluster_assignment.proto: - -flyteidl/admin/cluster_assignment.proto -================================================================== - - - - - -.. _ref_flyteidl.admin.ClusterAssignment: - -ClusterAssignment ------------------------------------------------------------------- - -Encapsulates specifications for routing an execution onto a specific cluster. - - - -.. csv-table:: ClusterAssignment type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "cluster_pool_name", ":ref:`ref_string`", "", "" - - - - - - -.. - end messages - - -.. - end enums - - -.. - end HasExtensions - - -.. - end services - - - - -.. _ref_flyteidl/admin/common.proto: - -flyteidl/admin/common.proto -================================================================== - - - - - -.. _ref_flyteidl.admin.Annotations: - -Annotations ------------------------------------------------------------------- - -Annotation values to be applied to an execution resource. -In the future a mode (e.g. OVERRIDE, APPEND, etc) can be defined -to specify how to merge annotations defined at registration and execution time. - - - -.. csv-table:: Annotations type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "values", ":ref:`ref_flyteidl.admin.Annotations.ValuesEntry`", "repeated", "Map of custom annotations to be applied to the execution resource." - - - - - - - -.. _ref_flyteidl.admin.Annotations.ValuesEntry: - -Annotations.ValuesEntry ------------------------------------------------------------------- - - - - - -.. csv-table:: Annotations.ValuesEntry type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "key", ":ref:`ref_string`", "", "" - "value", ":ref:`ref_string`", "", "" - - - - - - - -.. _ref_flyteidl.admin.AuthRole: - -AuthRole ------------------------------------------------------------------- - -Defines permissions associated with executions created by this launch plan spec. -Use either of these roles when they have permissions required by your workflow execution. -Deprecated. - - - -.. csv-table:: AuthRole type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "assumable_iam_role", ":ref:`ref_string`", "", "Defines an optional iam role which will be used for tasks run in executions created with this launch plan." - "kubernetes_service_account", ":ref:`ref_string`", "", "Defines an optional kubernetes service account which will be used for tasks run in executions created with this launch plan." - - - - - - - -.. _ref_flyteidl.admin.EmailNotification: - -EmailNotification ------------------------------------------------------------------- - -Defines an email notification specification. - - - -.. csv-table:: EmailNotification type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "recipients_email", ":ref:`ref_string`", "repeated", "The list of email addresses recipients for this notification. +required" - - - - - - - -.. _ref_flyteidl.admin.Labels: - -Labels ------------------------------------------------------------------- - -Label values to be applied to an execution resource. -In the future a mode (e.g. OVERRIDE, APPEND, etc) can be defined -to specify how to merge labels defined at registration and execution time. - - - -.. csv-table:: Labels type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "values", ":ref:`ref_flyteidl.admin.Labels.ValuesEntry`", "repeated", "Map of custom labels to be applied to the execution resource." - - - - - - - -.. _ref_flyteidl.admin.Labels.ValuesEntry: - -Labels.ValuesEntry ------------------------------------------------------------------- - - - - - -.. csv-table:: Labels.ValuesEntry type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "key", ":ref:`ref_string`", "", "" - "value", ":ref:`ref_string`", "", "" - - - - - - - -.. _ref_flyteidl.admin.NamedEntity: - -NamedEntity ------------------------------------------------------------------- - -Encapsulates information common to a NamedEntity, a Flyte resource such as a task, -workflow or launch plan. A NamedEntity is exclusively identified by its resource type -and identifier. - - - -.. csv-table:: NamedEntity type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "resource_type", ":ref:`ref_flyteidl.core.ResourceType`", "", "Resource type of the named entity. One of Task, Workflow or LaunchPlan." - "id", ":ref:`ref_flyteidl.admin.NamedEntityIdentifier`", "", "" - "metadata", ":ref:`ref_flyteidl.admin.NamedEntityMetadata`", "", "Additional metadata around a named entity." - - - - - - - -.. _ref_flyteidl.admin.NamedEntityGetRequest: - -NamedEntityGetRequest ------------------------------------------------------------------- - -A request to retrieve the metadata associated with a NamedEntityIdentifier - - - -.. csv-table:: NamedEntityGetRequest type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "resource_type", ":ref:`ref_flyteidl.core.ResourceType`", "", "Resource type of the metadata to get. One of Task, Workflow or LaunchPlan. +required" - "id", ":ref:`ref_flyteidl.admin.NamedEntityIdentifier`", "", "The identifier for the named entity for which to fetch metadata. +required" - - - - - - - -.. _ref_flyteidl.admin.NamedEntityIdentifier: - -NamedEntityIdentifier ------------------------------------------------------------------- - -Encapsulation of fields that identifies a Flyte resource. -A Flyte resource can be a task, workflow or launch plan. -A resource can internally have multiple versions and is uniquely identified -by project, domain, and name. - - - -.. csv-table:: NamedEntityIdentifier type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "project", ":ref:`ref_string`", "", "Name of the project the resource belongs to." - "domain", ":ref:`ref_string`", "", "Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project." - "name", ":ref:`ref_string`", "", "User provided value for the resource. The combination of project + domain + name uniquely identifies the resource. +optional - in certain contexts - like 'List API', 'Launch plans'" - - - - - - - -.. _ref_flyteidl.admin.NamedEntityIdentifierList: - -NamedEntityIdentifierList ------------------------------------------------------------------- - -Represents a list of NamedEntityIdentifiers. - - - -.. csv-table:: NamedEntityIdentifierList type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "entities", ":ref:`ref_flyteidl.admin.NamedEntityIdentifier`", "repeated", "A list of identifiers." - "token", ":ref:`ref_string`", "", "In the case of multiple pages of results, the server-provided token can be used to fetch the next page in a query. If there are no more results, this value will be empty." - - - - - - - -.. _ref_flyteidl.admin.NamedEntityIdentifierListRequest: - -NamedEntityIdentifierListRequest ------------------------------------------------------------------- - -Represents a request structure to list NamedEntityIdentifiers. - - - -.. csv-table:: NamedEntityIdentifierListRequest type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "project", ":ref:`ref_string`", "", "Name of the project that contains the identifiers. +required" - "domain", ":ref:`ref_string`", "", "Name of the domain the identifiers belongs to within the project. +required" - "limit", ":ref:`ref_uint32`", "", "Indicates the number of resources to be returned. +required" - "token", ":ref:`ref_string`", "", "In the case of multiple pages of results, the server-provided token can be used to fetch the next page in a query. +optional" - "sort_by", ":ref:`ref_flyteidl.admin.Sort`", "", "Specifies how listed entities should be sorted in the response. +optional" - "filters", ":ref:`ref_string`", "", "Indicates a list of filters passed as string. +optional" - - - - - - - -.. _ref_flyteidl.admin.NamedEntityList: - -NamedEntityList ------------------------------------------------------------------- - -Represents a list of NamedEntityIdentifiers. - - - -.. csv-table:: NamedEntityList type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "entities", ":ref:`ref_flyteidl.admin.NamedEntity`", "repeated", "A list of NamedEntity objects" - "token", ":ref:`ref_string`", "", "In the case of multiple pages of results, the server-provided token can be used to fetch the next page in a query. If there are no more results, this value will be empty." - - - - - - - -.. _ref_flyteidl.admin.NamedEntityListRequest: - -NamedEntityListRequest ------------------------------------------------------------------- - -Represents a request structure to list NamedEntity objects - - - -.. csv-table:: NamedEntityListRequest type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "resource_type", ":ref:`ref_flyteidl.core.ResourceType`", "", "Resource type of the metadata to query. One of Task, Workflow or LaunchPlan. +required" - "project", ":ref:`ref_string`", "", "Name of the project that contains the identifiers. +required" - "domain", ":ref:`ref_string`", "", "Name of the domain the identifiers belongs to within the project." - "limit", ":ref:`ref_uint32`", "", "Indicates the number of resources to be returned." - "token", ":ref:`ref_string`", "", "In the case of multiple pages of results, the server-provided token can be used to fetch the next page in a query. +optional" - "sort_by", ":ref:`ref_flyteidl.admin.Sort`", "", "Specifies how listed entities should be sorted in the response. +optional" - "filters", ":ref:`ref_string`", "", "Indicates a list of filters passed as string. +optional" - - - - - - - -.. _ref_flyteidl.admin.NamedEntityMetadata: - -NamedEntityMetadata ------------------------------------------------------------------- - -Additional metadata around a named entity. - - - -.. csv-table:: NamedEntityMetadata type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "description", ":ref:`ref_string`", "", "Common description across all versions of the entity +optional" - "state", ":ref:`ref_flyteidl.admin.NamedEntityState`", "", "Shared state across all version of the entity At this point in time, only workflow entities can have their state archived." - - - - - - - -.. _ref_flyteidl.admin.NamedEntityUpdateRequest: - -NamedEntityUpdateRequest ------------------------------------------------------------------- - -Request to set the referenced named entity state to the configured value. - - - -.. csv-table:: NamedEntityUpdateRequest type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "resource_type", ":ref:`ref_flyteidl.core.ResourceType`", "", "Resource type of the metadata to update +required" - "id", ":ref:`ref_flyteidl.admin.NamedEntityIdentifier`", "", "Identifier of the metadata to update +required" - "metadata", ":ref:`ref_flyteidl.admin.NamedEntityMetadata`", "", "Metadata object to set as the new value +required" - - - - - - - -.. _ref_flyteidl.admin.NamedEntityUpdateResponse: - -NamedEntityUpdateResponse ------------------------------------------------------------------- - -Purposefully empty, may be populated in the future. - - - - - - - - -.. _ref_flyteidl.admin.Notification: - -Notification ------------------------------------------------------------------- - -Represents a structure for notifications based on execution status. -The notification content is configured within flyte admin but can be templatized. -Future iterations could expose configuring notifications with custom content. - - - -.. csv-table:: Notification type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "phases", ":ref:`ref_flyteidl.core.WorkflowExecution.Phase`", "repeated", "A list of phases to which users can associate the notifications to. +required" - "email", ":ref:`ref_flyteidl.admin.EmailNotification`", "", "" - "pager_duty", ":ref:`ref_flyteidl.admin.PagerDutyNotification`", "", "" - "slack", ":ref:`ref_flyteidl.admin.SlackNotification`", "", "" - - - - - - - -.. _ref_flyteidl.admin.ObjectGetRequest: - -ObjectGetRequest ------------------------------------------------------------------- - -Shared request structure to fetch a single resource. -Resources include: Task, Workflow, LaunchPlan - - - -.. csv-table:: ObjectGetRequest type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "id", ":ref:`ref_flyteidl.core.Identifier`", "", "Indicates a unique version of resource. +required" - - - - - - - -.. _ref_flyteidl.admin.PagerDutyNotification: - -PagerDutyNotification ------------------------------------------------------------------- - -Defines a pager duty notification specification. - - - -.. csv-table:: PagerDutyNotification type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "recipients_email", ":ref:`ref_string`", "repeated", "Currently, PagerDuty notifications leverage email to trigger a notification. +required" - - - - - - - -.. _ref_flyteidl.admin.RawOutputDataConfig: - -RawOutputDataConfig ------------------------------------------------------------------- - -Encapsulates user settings pertaining to offloaded data (i.e. Blobs, Schema, query data, etc.). -See https://github.com/flyteorg/flyte/issues/211 for more background information. - - - -.. csv-table:: RawOutputDataConfig type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "output_location_prefix", ":ref:`ref_string`", "", "Prefix for where offloaded data from user workflows will be written e.g. s3://bucket/key or s3://bucket/" - - - - - - - -.. _ref_flyteidl.admin.ResourceListRequest: - -ResourceListRequest ------------------------------------------------------------------- - -Shared request structure to retrieve a list of resources. -Resources include: Task, Workflow, LaunchPlan - - - -.. csv-table:: ResourceListRequest type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "id", ":ref:`ref_flyteidl.admin.NamedEntityIdentifier`", "", "id represents the unique identifier of the resource. +required" - "limit", ":ref:`ref_uint32`", "", "Indicates the number of resources to be returned. +required" - "token", ":ref:`ref_string`", "", "In the case of multiple pages of results, this server-provided token can be used to fetch the next page in a query. +optional" - "filters", ":ref:`ref_string`", "", "Indicates a list of filters passed as string. More info on constructing filters : +optional" - "sort_by", ":ref:`ref_flyteidl.admin.Sort`", "", "Sort ordering. +optional" - - - - - - - -.. _ref_flyteidl.admin.SlackNotification: - -SlackNotification ------------------------------------------------------------------- - -Defines a slack notification specification. - - - -.. csv-table:: SlackNotification type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "recipients_email", ":ref:`ref_string`", "repeated", "Currently, Slack notifications leverage email to trigger a notification. +required" - - - - - - - -.. _ref_flyteidl.admin.Sort: - -Sort ------------------------------------------------------------------- - -Specifies sort ordering in a list request. - - - -.. csv-table:: Sort type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "key", ":ref:`ref_string`", "", "Indicates an attribute to sort the response values. +required" - "direction", ":ref:`ref_flyteidl.admin.Sort.Direction`", "", "Indicates the direction to apply sort key for response values. +optional" - - - - - - - -.. _ref_flyteidl.admin.UrlBlob: - -UrlBlob ------------------------------------------------------------------- - -Represents a string url and associated metadata used throughout the platform. - - - -.. csv-table:: UrlBlob type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "url", ":ref:`ref_string`", "", "Actual url value." - "bytes", ":ref:`ref_int64`", "", "Represents the size of the file accessible at the above url." - - - - - - -.. - end messages - - - -.. _ref_flyteidl.admin.NamedEntityState: - -NamedEntityState ------------------------------------------------------------------- - -The status of the named entity is used to control its visibility in the UI. - -.. csv-table:: Enum NamedEntityState values - :header: "Name", "Number", "Description" - :widths: auto - - "NAMED_ENTITY_ACTIVE", "0", "By default, all named entities are considered active and under development." - "NAMED_ENTITY_ARCHIVED", "1", "Archived named entities are no longer visible in the UI." - "SYSTEM_GENERATED", "2", "System generated entities that aren't explicitly created or managed by a user." - - - -.. _ref_flyteidl.admin.Sort.Direction: - -Sort.Direction ------------------------------------------------------------------- - - - -.. csv-table:: Enum Sort.Direction values - :header: "Name", "Number", "Description" - :widths: auto - - "DESCENDING", "0", "By default, fields are sorted in descending order." - "ASCENDING", "1", "" - - -.. - end enums - - -.. - end HasExtensions - - -.. - end services - - - - -.. _ref_flyteidl/admin/description_entity.proto: - -flyteidl/admin/description_entity.proto -================================================================== - - - - - -.. _ref_flyteidl.admin.Description: - -Description ------------------------------------------------------------------- - -Full user description with formatting preserved. This can be rendered -by clients, such as the console or command line tools with in-tact -formatting. - - - -.. csv-table:: Description type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "value", ":ref:`ref_string`", "", "long description - no more than 4KB" - "uri", ":ref:`ref_string`", "", "if the description sizes exceed some threshold we can offload the entire description proto altogether to an external data store, like S3 rather than store inline in the db" - "format", ":ref:`ref_flyteidl.admin.DescriptionFormat`", "", "Format of the long description" - "icon_link", ":ref:`ref_string`", "", "Optional link to an icon for the entity" - - - - - - - -.. _ref_flyteidl.admin.DescriptionEntity: - -DescriptionEntity ------------------------------------------------------------------- - -DescriptionEntity contains detailed description for the task/workflow. -Documentation could provide insight into the algorithms, business use case, etc. - - - -.. csv-table:: DescriptionEntity type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "id", ":ref:`ref_flyteidl.core.Identifier`", "", "id represents the unique identifier of the description entity." - "short_description", ":ref:`ref_string`", "", "One-liner overview of the entity." - "long_description", ":ref:`ref_flyteidl.admin.Description`", "", "Full user description with formatting preserved." - "source_code", ":ref:`ref_flyteidl.admin.SourceCode`", "", "Optional link to source code used to define this entity." - "tags", ":ref:`ref_string`", "repeated", "User-specified tags. These are arbitrary and can be used for searching filtering and discovering tasks." - - - - - - - -.. _ref_flyteidl.admin.DescriptionEntityList: - -DescriptionEntityList ------------------------------------------------------------------- - -Represents a list of DescriptionEntities returned from the admin. -See :ref:`ref_flyteidl.admin.DescriptionEntity` for more details - - - -.. csv-table:: DescriptionEntityList type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "descriptionEntities", ":ref:`ref_flyteidl.admin.DescriptionEntity`", "repeated", "A list of DescriptionEntities returned based on the request." - "token", ":ref:`ref_string`", "", "In the case of multiple pages of results, the server-provided token can be used to fetch the next page in a query. If there are no more results, this value will be empty." - - - - - - - -.. _ref_flyteidl.admin.DescriptionEntityListRequest: - -DescriptionEntityListRequest ------------------------------------------------------------------- - -Represents a request structure to retrieve a list of DescriptionEntities. -See :ref:`ref_flyteidl.admin.DescriptionEntity` for more details - - - -.. csv-table:: DescriptionEntityListRequest type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "resource_type", ":ref:`ref_flyteidl.core.ResourceType`", "", "Identifies the specific type of resource that this identifier corresponds to." - "id", ":ref:`ref_flyteidl.admin.NamedEntityIdentifier`", "", "The identifier for the description entity. +required" - "limit", ":ref:`ref_uint32`", "", "Indicates the number of resources to be returned. +required" - "token", ":ref:`ref_string`", "", "In the case of multiple pages of results, the server-provided token can be used to fetch the next page in a query. +optional" - "filters", ":ref:`ref_string`", "", "Indicates a list of filters passed as string. More info on constructing filters : +optional" - "sort_by", ":ref:`ref_flyteidl.admin.Sort`", "", "Sort ordering for returned list. +optional" - - - - - - - -.. _ref_flyteidl.admin.SourceCode: - -SourceCode ------------------------------------------------------------------- - -Link to source code used to define this entity - - - -.. csv-table:: SourceCode type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "link", ":ref:`ref_string`", "", "" - - - - - - -.. - end messages - - - -.. _ref_flyteidl.admin.DescriptionFormat: - -DescriptionFormat ------------------------------------------------------------------- - -The format of the long description - -.. csv-table:: Enum DescriptionFormat values - :header: "Name", "Number", "Description" - :widths: auto - - "DESCRIPTION_FORMAT_UNKNOWN", "0", "" - "DESCRIPTION_FORMAT_MARKDOWN", "1", "" - "DESCRIPTION_FORMAT_HTML", "2", "" - "DESCRIPTION_FORMAT_RST", "3", "python default documentation - comments is rst" - - -.. - end enums - - -.. - end HasExtensions - - -.. - end services - - - - -.. _ref_flyteidl/admin/event.proto: - -flyteidl/admin/event.proto -================================================================== - - - - - -.. _ref_flyteidl.admin.EventErrorAlreadyInTerminalState: - -EventErrorAlreadyInTerminalState ------------------------------------------------------------------- - -Indicates that a sent event was not used to update execution state due to -the referenced execution already being terminated (and therefore ineligible -for further state transitions). - - - -.. csv-table:: EventErrorAlreadyInTerminalState type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "current_phase", ":ref:`ref_string`", "", "+required" - - - - - - - -.. _ref_flyteidl.admin.EventErrorIncompatibleCluster: - -EventErrorIncompatibleCluster ------------------------------------------------------------------- - -Indicates an event was rejected because it came from a different cluster than -is on record as running the execution. - - - -.. csv-table:: EventErrorIncompatibleCluster type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "cluster", ":ref:`ref_string`", "", "The cluster which has been recorded as processing the execution. +required" - - - - - - - -.. _ref_flyteidl.admin.EventFailureReason: - -EventFailureReason ------------------------------------------------------------------- - -Indicates why a sent event was not used to update execution. - - - -.. csv-table:: EventFailureReason type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "already_in_terminal_state", ":ref:`ref_flyteidl.admin.EventErrorAlreadyInTerminalState`", "", "" - "incompatible_cluster", ":ref:`ref_flyteidl.admin.EventErrorIncompatibleCluster`", "", "" - - - - - - - -.. _ref_flyteidl.admin.NodeExecutionEventRequest: - -NodeExecutionEventRequest ------------------------------------------------------------------- - -Request to send a notification that a node execution event has occurred. - - - -.. csv-table:: NodeExecutionEventRequest type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "request_id", ":ref:`ref_string`", "", "Unique ID for this request that can be traced between services" - "event", ":ref:`ref_flyteidl.event.NodeExecutionEvent`", "", "Details about the event that occurred." - - - - - - - -.. _ref_flyteidl.admin.NodeExecutionEventResponse: - -NodeExecutionEventResponse ------------------------------------------------------------------- - -Purposefully empty, may be populated in the future. - - - - - - - - -.. _ref_flyteidl.admin.TaskExecutionEventRequest: - -TaskExecutionEventRequest ------------------------------------------------------------------- - -Request to send a notification that a task execution event has occurred. - - - -.. csv-table:: TaskExecutionEventRequest type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "request_id", ":ref:`ref_string`", "", "Unique ID for this request that can be traced between services" - "event", ":ref:`ref_flyteidl.event.TaskExecutionEvent`", "", "Details about the event that occurred." - - - - - - - -.. _ref_flyteidl.admin.TaskExecutionEventResponse: - -TaskExecutionEventResponse ------------------------------------------------------------------- - -Purposefully empty, may be populated in the future. - - - - - - - - -.. _ref_flyteidl.admin.WorkflowExecutionEventRequest: - -WorkflowExecutionEventRequest ------------------------------------------------------------------- - -Request to send a notification that a workflow execution event has occurred. - - - -.. csv-table:: WorkflowExecutionEventRequest type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "request_id", ":ref:`ref_string`", "", "Unique ID for this request that can be traced between services" - "event", ":ref:`ref_flyteidl.event.WorkflowExecutionEvent`", "", "Details about the event that occurred." - - - - - - - -.. _ref_flyteidl.admin.WorkflowExecutionEventResponse: - -WorkflowExecutionEventResponse ------------------------------------------------------------------- - -Purposefully empty, may be populated in the future. - - - - - - - -.. - end messages - - -.. - end enums - - -.. - end HasExtensions - - -.. - end services - - - - -.. _ref_flyteidl/admin/execution.proto: - -flyteidl/admin/execution.proto -================================================================== - - - - - -.. _ref_flyteidl.admin.AbortMetadata: - -AbortMetadata ------------------------------------------------------------------- - -Specifies metadata around an aborted workflow execution. - - - -.. csv-table:: AbortMetadata type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "cause", ":ref:`ref_string`", "", "In the case of a user-specified abort, this will pass along the user-supplied cause." - "principal", ":ref:`ref_string`", "", "Identifies the entity (if any) responsible for terminating the execution" - - - - - - - -.. _ref_flyteidl.admin.Execution: - -Execution ------------------------------------------------------------------- - -A workflow execution represents an instantiated workflow, including all inputs and additional -metadata as well as computed results included state, outputs, and duration-based attributes. -Used as a response object used in Get and List execution requests. - - - -.. csv-table:: Execution type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "id", ":ref:`ref_flyteidl.core.WorkflowExecutionIdentifier`", "", "Unique identifier of the workflow execution." - "spec", ":ref:`ref_flyteidl.admin.ExecutionSpec`", "", "User-provided configuration and inputs for launching the execution." - "closure", ":ref:`ref_flyteidl.admin.ExecutionClosure`", "", "Execution results." - - - - - - - -.. _ref_flyteidl.admin.ExecutionClosure: - -ExecutionClosure ------------------------------------------------------------------- - -Encapsulates the results of the Execution - - - -.. csv-table:: ExecutionClosure type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "outputs", ":ref:`ref_flyteidl.admin.LiteralMapBlob`", "", "**Deprecated.** Output URI in the case of a successful execution. DEPRECATED. Use GetExecutionData to fetch output data instead." - "error", ":ref:`ref_flyteidl.core.ExecutionError`", "", "Error information in the case of a failed execution." - "abort_cause", ":ref:`ref_string`", "", "**Deprecated.** In the case of a user-specified abort, this will pass along the user-supplied cause." - "abort_metadata", ":ref:`ref_flyteidl.admin.AbortMetadata`", "", "In the case of a user-specified abort, this will pass along the user and their supplied cause." - "output_data", ":ref:`ref_flyteidl.core.LiteralMap`", "", "**Deprecated.** Raw output data produced by this execution. DEPRECATED. Use GetExecutionData to fetch output data instead." - "computed_inputs", ":ref:`ref_flyteidl.core.LiteralMap`", "", "**Deprecated.** Inputs computed and passed for execution. computed_inputs depends on inputs in ExecutionSpec, fixed and default inputs in launch plan" - "phase", ":ref:`ref_flyteidl.core.WorkflowExecution.Phase`", "", "Most recent recorded phase for the execution." - "started_at", ":ref:`ref_google.protobuf.Timestamp`", "", "Reported time at which the execution began running." - "duration", ":ref:`ref_google.protobuf.Duration`", "", "The amount of time the execution spent running." - "created_at", ":ref:`ref_google.protobuf.Timestamp`", "", "Reported time at which the execution was created." - "updated_at", ":ref:`ref_google.protobuf.Timestamp`", "", "Reported time at which the execution was last updated." - "notifications", ":ref:`ref_flyteidl.admin.Notification`", "repeated", "The notification settings to use after merging the CreateExecutionRequest and the launch plan notification settings. An execution launched with notifications will always prefer that definition to notifications defined statically in a launch plan." - "workflow_id", ":ref:`ref_flyteidl.core.Identifier`", "", "Identifies the workflow definition for this execution." - "state_change_details", ":ref:`ref_flyteidl.admin.ExecutionStateChangeDetails`", "", "Provides the details of the last stage change" - - - - - - - -.. _ref_flyteidl.admin.ExecutionCreateRequest: - -ExecutionCreateRequest ------------------------------------------------------------------- - -Request to launch an execution with the given project, domain and optionally-assigned name. - - - -.. csv-table:: ExecutionCreateRequest type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "project", ":ref:`ref_string`", "", "Name of the project the execution belongs to. +required" - "domain", ":ref:`ref_string`", "", "Name of the domain the execution belongs to. A domain can be considered as a subset within a specific project. +required" - "name", ":ref:`ref_string`", "", "User provided value for the resource. If none is provided the system will generate a unique string. +optional" - "spec", ":ref:`ref_flyteidl.admin.ExecutionSpec`", "", "Additional fields necessary to launch the execution. +optional" - "inputs", ":ref:`ref_flyteidl.core.LiteralMap`", "", "The inputs required to start the execution. All required inputs must be included in this map. If not required and not provided, defaults apply. +optional" - - - - - - - -.. _ref_flyteidl.admin.ExecutionCreateResponse: - -ExecutionCreateResponse ------------------------------------------------------------------- - -The unique identifier for a successfully created execution. -If the name was *not* specified in the create request, this identifier will include a generated name. - - - -.. csv-table:: ExecutionCreateResponse type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "id", ":ref:`ref_flyteidl.core.WorkflowExecutionIdentifier`", "", "" - - - - - - - -.. _ref_flyteidl.admin.ExecutionList: - -ExecutionList ------------------------------------------------------------------- - -Used as a response for request to list executions. -See :ref:`ref_flyteidl.admin.Execution` for more details - - - -.. csv-table:: ExecutionList type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "executions", ":ref:`ref_flyteidl.admin.Execution`", "repeated", "" - "token", ":ref:`ref_string`", "", "In the case of multiple pages of results, the server-provided token can be used to fetch the next page in a query. If there are no more results, this value will be empty." - - - - - - - -.. _ref_flyteidl.admin.ExecutionMetadata: - -ExecutionMetadata ------------------------------------------------------------------- - -Represents attributes about an execution which are not required to launch the execution but are useful to record. -These attributes are assigned at launch time and do not change. - - - -.. csv-table:: ExecutionMetadata type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "mode", ":ref:`ref_flyteidl.admin.ExecutionMetadata.ExecutionMode`", "", "" - "principal", ":ref:`ref_string`", "", "Identifier of the entity that triggered this execution. For systems using back-end authentication any value set here will be discarded in favor of the authenticated user context." - "nesting", ":ref:`ref_uint32`", "", "Indicates the nestedness of this execution. If a user launches a workflow execution, the default nesting is 0. If this execution further launches a workflow (child workflow), the nesting level is incremented by 0 => 1 Generally, if workflow at nesting level k launches a workflow then the child workflow will have nesting = k + 1." - "scheduled_at", ":ref:`ref_google.protobuf.Timestamp`", "", "For scheduled executions, the requested time for execution for this specific schedule invocation." - "parent_node_execution", ":ref:`ref_flyteidl.core.NodeExecutionIdentifier`", "", "Which subworkflow node (if any) launched this execution" - "reference_execution", ":ref:`ref_flyteidl.core.WorkflowExecutionIdentifier`", "", "Optional, a reference workflow execution related to this execution. In the case of a relaunch, this references the original workflow execution." - "system_metadata", ":ref:`ref_flyteidl.admin.SystemMetadata`", "", "Optional, platform-specific metadata about the execution. In this the future this may be gated behind an ACL or some sort of authorization." - - - - - - - -.. _ref_flyteidl.admin.ExecutionRecoverRequest: - -ExecutionRecoverRequest ------------------------------------------------------------------- - -Request to recover the referenced execution. - - - -.. csv-table:: ExecutionRecoverRequest type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "id", ":ref:`ref_flyteidl.core.WorkflowExecutionIdentifier`", "", "Identifier of the workflow execution to recover." - "name", ":ref:`ref_string`", "", "User provided value for the recovered execution. If none is provided the system will generate a unique string. +optional" - "metadata", ":ref:`ref_flyteidl.admin.ExecutionMetadata`", "", "Additional metadata which will be used to overwrite any metadata in the reference execution when triggering a recovery execution." - - - - - - - -.. _ref_flyteidl.admin.ExecutionRelaunchRequest: - -ExecutionRelaunchRequest ------------------------------------------------------------------- - -Request to relaunch the referenced execution. - - - -.. csv-table:: ExecutionRelaunchRequest type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "id", ":ref:`ref_flyteidl.core.WorkflowExecutionIdentifier`", "", "Identifier of the workflow execution to relaunch. +required" - "name", ":ref:`ref_string`", "", "User provided value for the relaunched execution. If none is provided the system will generate a unique string. +optional" - "overwrite_cache", ":ref:`ref_bool`", "", "Allows for all cached values of a workflow and its tasks to be overwritten for a single execution. If enabled, all calculations are performed even if cached results would be available, overwriting the stored data once execution finishes successfully." - - - - - - - -.. _ref_flyteidl.admin.ExecutionSpec: - -ExecutionSpec ------------------------------------------------------------------- - -An ExecutionSpec encompasses all data used to launch this execution. The Spec does not change over the lifetime -of an execution as it progresses across phase changes. - - - -.. csv-table:: ExecutionSpec type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "launch_plan", ":ref:`ref_flyteidl.core.Identifier`", "", "Launch plan to be executed" - "inputs", ":ref:`ref_flyteidl.core.LiteralMap`", "", "**Deprecated.** Input values to be passed for the execution" - "metadata", ":ref:`ref_flyteidl.admin.ExecutionMetadata`", "", "Metadata for the execution" - "notifications", ":ref:`ref_flyteidl.admin.NotificationList`", "", "List of notifications based on Execution status transitions When this list is not empty it is used rather than any notifications defined in the referenced launch plan. When this list is empty, the notifications defined for the launch plan will be applied." - "disable_all", ":ref:`ref_bool`", "", "This should be set to true if all notifications are intended to be disabled for this execution." - "labels", ":ref:`ref_flyteidl.admin.Labels`", "", "Labels to apply to the execution resource." - "annotations", ":ref:`ref_flyteidl.admin.Annotations`", "", "Annotations to apply to the execution resource." - "security_context", ":ref:`ref_flyteidl.core.SecurityContext`", "", "Optional: security context override to apply this execution." - "auth_role", ":ref:`ref_flyteidl.admin.AuthRole`", "", "**Deprecated.** Optional: auth override to apply this execution." - "quality_of_service", ":ref:`ref_flyteidl.core.QualityOfService`", "", "Indicates the runtime priority of the execution." - "max_parallelism", ":ref:`ref_int32`", "", "Controls the maximum number of task nodes that can be run in parallel for the entire workflow. This is useful to achieve fairness. Note: MapTasks are regarded as one unit, and parallelism/concurrency of MapTasks is independent from this." - "raw_output_data_config", ":ref:`ref_flyteidl.admin.RawOutputDataConfig`", "", "User setting to configure where to store offloaded data (i.e. Blobs, structured datasets, query data, etc.). This should be a prefix like s3://my-bucket/my-data" - "cluster_assignment", ":ref:`ref_flyteidl.admin.ClusterAssignment`", "", "Controls how to select an available cluster on which this execution should run." - "interruptible", ":ref:`ref_google.protobuf.BoolValue`", "", "Allows for the interruptible flag of a workflow to be overwritten for a single execution. Omitting this field uses the workflow's value as a default. As we need to distinguish between the field not being provided and its default value false, we have to use a wrapper around the bool field." - "overwrite_cache", ":ref:`ref_bool`", "", "Allows for all cached values of a workflow and its tasks to be overwritten for a single execution. If enabled, all calculations are performed even if cached results would be available, overwriting the stored data once execution finishes successfully." - - - - - - - -.. _ref_flyteidl.admin.ExecutionStateChangeDetails: - -ExecutionStateChangeDetails ------------------------------------------------------------------- - - - - - -.. csv-table:: ExecutionStateChangeDetails type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "state", ":ref:`ref_flyteidl.admin.ExecutionState`", "", "The state of the execution is used to control its visibility in the UI/CLI." - "occurred_at", ":ref:`ref_google.protobuf.Timestamp`", "", "This timestamp represents when the state changed." - "principal", ":ref:`ref_string`", "", "Identifies the entity (if any) responsible for causing the state change of the execution" - - - - - - - -.. _ref_flyteidl.admin.ExecutionTerminateRequest: - -ExecutionTerminateRequest ------------------------------------------------------------------- - -Request to terminate an in-progress execution. This action is irreversible. -If an execution is already terminated, this request will simply be a no-op. -This request will fail if it references a non-existent execution. -If the request succeeds the phase "ABORTED" will be recorded for the termination -with the optional cause added to the output_result. - - - -.. csv-table:: ExecutionTerminateRequest type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "id", ":ref:`ref_flyteidl.core.WorkflowExecutionIdentifier`", "", "Uniquely identifies the individual workflow execution to be terminated." - "cause", ":ref:`ref_string`", "", "Optional reason for aborting." - - - - - - - -.. _ref_flyteidl.admin.ExecutionTerminateResponse: - -ExecutionTerminateResponse ------------------------------------------------------------------- - -Purposefully empty, may be populated in the future. - - - - - - - - -.. _ref_flyteidl.admin.ExecutionUpdateRequest: - -ExecutionUpdateRequest ------------------------------------------------------------------- - - - - - -.. csv-table:: ExecutionUpdateRequest type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "id", ":ref:`ref_flyteidl.core.WorkflowExecutionIdentifier`", "", "Identifier of the execution to update" - "state", ":ref:`ref_flyteidl.admin.ExecutionState`", "", "State to set as the new value active/archive" - - - - - - - -.. _ref_flyteidl.admin.ExecutionUpdateResponse: - -ExecutionUpdateResponse ------------------------------------------------------------------- - - - - - - - - - - -.. _ref_flyteidl.admin.LiteralMapBlob: - -LiteralMapBlob ------------------------------------------------------------------- - -Input/output data can represented by actual values or a link to where values are stored - - - -.. csv-table:: LiteralMapBlob type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "values", ":ref:`ref_flyteidl.core.LiteralMap`", "", "**Deprecated.** Data in LiteralMap format" - "uri", ":ref:`ref_string`", "", "In the event that the map is too large, we return a uri to the data" - - - - - - - -.. _ref_flyteidl.admin.NotificationList: - -NotificationList ------------------------------------------------------------------- - - - - - -.. csv-table:: NotificationList type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "notifications", ":ref:`ref_flyteidl.admin.Notification`", "repeated", "" - - - - - - - -.. _ref_flyteidl.admin.SystemMetadata: - -SystemMetadata ------------------------------------------------------------------- - -Represents system, rather than user-facing, metadata about an execution. - - - -.. csv-table:: SystemMetadata type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "execution_cluster", ":ref:`ref_string`", "", "Which execution cluster this execution ran on." - - - - - - - -.. _ref_flyteidl.admin.WorkflowExecutionGetDataRequest: - -WorkflowExecutionGetDataRequest ------------------------------------------------------------------- - -Request structure to fetch inputs, output and other data produced by an execution. -By default this data is not returned inline in :ref:`ref_flyteidl.admin.WorkflowExecutionGetRequest` - - - -.. csv-table:: WorkflowExecutionGetDataRequest type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "id", ":ref:`ref_flyteidl.core.WorkflowExecutionIdentifier`", "", "The identifier of the execution for which to fetch inputs and outputs." - - - - - - - -.. _ref_flyteidl.admin.WorkflowExecutionGetDataResponse: - -WorkflowExecutionGetDataResponse ------------------------------------------------------------------- - -Response structure for WorkflowExecutionGetDataRequest which contains inputs and outputs for an execution. - - - -.. csv-table:: WorkflowExecutionGetDataResponse type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "outputs", ":ref:`ref_flyteidl.admin.UrlBlob`", "", "**Deprecated.** Signed url to fetch a core.LiteralMap of execution outputs. Deprecated: Please use full_outputs instead." - "inputs", ":ref:`ref_flyteidl.admin.UrlBlob`", "", "**Deprecated.** Signed url to fetch a core.LiteralMap of execution inputs. Deprecated: Please use full_inputs instead." - "full_inputs", ":ref:`ref_flyteidl.core.LiteralMap`", "", "Full_inputs will only be populated if they are under a configured size threshold." - "full_outputs", ":ref:`ref_flyteidl.core.LiteralMap`", "", "Full_outputs will only be populated if they are under a configured size threshold." - - - - - - - -.. _ref_flyteidl.admin.WorkflowExecutionGetRequest: - -WorkflowExecutionGetRequest ------------------------------------------------------------------- - -A message used to fetch a single workflow execution entity. -See :ref:`ref_flyteidl.admin.Execution` for more details - - - -.. csv-table:: WorkflowExecutionGetRequest type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "id", ":ref:`ref_flyteidl.core.WorkflowExecutionIdentifier`", "", "Uniquely identifies an individual workflow execution." - - - - - - -.. - end messages - - - -.. _ref_flyteidl.admin.ExecutionMetadata.ExecutionMode: - -ExecutionMetadata.ExecutionMode ------------------------------------------------------------------- - -The method by which this execution was launched. - -.. csv-table:: Enum ExecutionMetadata.ExecutionMode values - :header: "Name", "Number", "Description" - :widths: auto - - "MANUAL", "0", "The default execution mode, MANUAL implies that an execution was launched by an individual." - "SCHEDULED", "1", "A schedule triggered this execution launch." - "SYSTEM", "2", "A system process was responsible for launching this execution rather an individual." - "RELAUNCH", "3", "This execution was launched with identical inputs as a previous execution." - "CHILD_WORKFLOW", "4", "This execution was triggered by another execution." - "RECOVERED", "5", "This execution was recovered from another execution." - - - -.. _ref_flyteidl.admin.ExecutionState: - -ExecutionState ------------------------------------------------------------------- - -The state of the execution is used to control its visibility in the UI/CLI. - -.. csv-table:: Enum ExecutionState values - :header: "Name", "Number", "Description" - :widths: auto - - "EXECUTION_ACTIVE", "0", "By default, all executions are considered active." - "EXECUTION_ARCHIVED", "1", "Archived executions are no longer visible in the UI." - - -.. - end enums - - -.. - end HasExtensions - - -.. - end services - - - - -.. _ref_flyteidl/admin/launch_plan.proto: - -flyteidl/admin/launch_plan.proto -================================================================== - - - - - -.. _ref_flyteidl.admin.ActiveLaunchPlanListRequest: - -ActiveLaunchPlanListRequest ------------------------------------------------------------------- - -Represents a request structure to list active launch plans within a project/domain. -See :ref:`ref_flyteidl.admin.LaunchPlan` for more details - - - -.. csv-table:: ActiveLaunchPlanListRequest type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "project", ":ref:`ref_string`", "", "Name of the project that contains the identifiers. +required." - "domain", ":ref:`ref_string`", "", "Name of the domain the identifiers belongs to within the project. +required." - "limit", ":ref:`ref_uint32`", "", "Indicates the number of resources to be returned. +required." - "token", ":ref:`ref_string`", "", "In the case of multiple pages of results, the server-provided token can be used to fetch the next page in a query. +optional" - "sort_by", ":ref:`ref_flyteidl.admin.Sort`", "", "Sort ordering. +optional" - - - - - - - -.. _ref_flyteidl.admin.ActiveLaunchPlanRequest: - -ActiveLaunchPlanRequest ------------------------------------------------------------------- - -Represents a request struct for finding an active launch plan for a given NamedEntityIdentifier -See :ref:`ref_flyteidl.admin.LaunchPlan` for more details - - - -.. csv-table:: ActiveLaunchPlanRequest type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "id", ":ref:`ref_flyteidl.admin.NamedEntityIdentifier`", "", "+required." - - - - - - - -.. _ref_flyteidl.admin.Auth: - -Auth ------------------------------------------------------------------- - -Defines permissions associated with executions created by this launch plan spec. -Use either of these roles when they have permissions required by your workflow execution. -Deprecated. - - - -.. csv-table:: Auth type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "assumable_iam_role", ":ref:`ref_string`", "", "Defines an optional iam role which will be used for tasks run in executions created with this launch plan." - "kubernetes_service_account", ":ref:`ref_string`", "", "Defines an optional kubernetes service account which will be used for tasks run in executions created with this launch plan." - - - - - - - -.. _ref_flyteidl.admin.LaunchPlan: - -LaunchPlan ------------------------------------------------------------------- - -A LaunchPlan provides the capability to templatize workflow executions. -Launch plans simplify associating one or more schedules, inputs and notifications with your workflows. -Launch plans can be shared and used to trigger executions with predefined inputs even when a workflow -definition doesn't necessarily have a default value for said input. - - - -.. csv-table:: LaunchPlan type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "id", ":ref:`ref_flyteidl.core.Identifier`", "", "Uniquely identifies a launch plan entity." - "spec", ":ref:`ref_flyteidl.admin.LaunchPlanSpec`", "", "User-provided launch plan details, including reference workflow, inputs and other metadata." - "closure", ":ref:`ref_flyteidl.admin.LaunchPlanClosure`", "", "Values computed by the flyte platform after launch plan registration." - - - - - - - -.. _ref_flyteidl.admin.LaunchPlanClosure: - -LaunchPlanClosure ------------------------------------------------------------------- - -Values computed by the flyte platform after launch plan registration. -These include expected_inputs required to be present in a CreateExecutionRequest -to launch the reference workflow as well timestamp values associated with the launch plan. - - - -.. csv-table:: LaunchPlanClosure type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "state", ":ref:`ref_flyteidl.admin.LaunchPlanState`", "", "Indicate the Launch plan state." - "expected_inputs", ":ref:`ref_flyteidl.core.ParameterMap`", "", "Indicates the set of inputs expected when creating an execution with the Launch plan" - "expected_outputs", ":ref:`ref_flyteidl.core.VariableMap`", "", "Indicates the set of outputs expected to be produced by creating an execution with the Launch plan" - "created_at", ":ref:`ref_google.protobuf.Timestamp`", "", "Time at which the launch plan was created." - "updated_at", ":ref:`ref_google.protobuf.Timestamp`", "", "Time at which the launch plan was last updated." - - - - - - - -.. _ref_flyteidl.admin.LaunchPlanCreateRequest: - -LaunchPlanCreateRequest ------------------------------------------------------------------- - -Request to register a launch plan. The included LaunchPlanSpec may have a complete or incomplete set of inputs required -to launch a workflow execution. By default all launch plans are registered in state INACTIVE. If you wish to -set the state to ACTIVE, you must submit a LaunchPlanUpdateRequest, after you have successfully created a launch plan. - - - -.. csv-table:: LaunchPlanCreateRequest type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "id", ":ref:`ref_flyteidl.core.Identifier`", "", "Uniquely identifies a launch plan entity." - "spec", ":ref:`ref_flyteidl.admin.LaunchPlanSpec`", "", "User-provided launch plan details, including reference workflow, inputs and other metadata." - - - - - - - -.. _ref_flyteidl.admin.LaunchPlanCreateResponse: - -LaunchPlanCreateResponse ------------------------------------------------------------------- - -Purposefully empty, may be populated in the future. - - - - - - - - -.. _ref_flyteidl.admin.LaunchPlanList: - -LaunchPlanList ------------------------------------------------------------------- - -Response object for list launch plan requests. -See :ref:`ref_flyteidl.admin.LaunchPlan` for more details - - - -.. csv-table:: LaunchPlanList type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "launch_plans", ":ref:`ref_flyteidl.admin.LaunchPlan`", "repeated", "" - "token", ":ref:`ref_string`", "", "In the case of multiple pages of results, the server-provided token can be used to fetch the next page in a query. If there are no more results, this value will be empty." - - - - - - - -.. _ref_flyteidl.admin.LaunchPlanMetadata: - -LaunchPlanMetadata ------------------------------------------------------------------- - -Additional launch plan attributes included in the LaunchPlanSpec not strictly required to launch -the reference workflow. - - - -.. csv-table:: LaunchPlanMetadata type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "schedule", ":ref:`ref_flyteidl.admin.Schedule`", "", "Schedule to execute the Launch Plan" - "notifications", ":ref:`ref_flyteidl.admin.Notification`", "repeated", "List of notifications based on Execution status transitions" - - - - - - - -.. _ref_flyteidl.admin.LaunchPlanSpec: - -LaunchPlanSpec ------------------------------------------------------------------- - -User-provided launch plan definition and configuration values. - - - -.. csv-table:: LaunchPlanSpec type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "workflow_id", ":ref:`ref_flyteidl.core.Identifier`", "", "Reference to the Workflow template that the launch plan references" - "entity_metadata", ":ref:`ref_flyteidl.admin.LaunchPlanMetadata`", "", "Metadata for the Launch Plan" - "default_inputs", ":ref:`ref_flyteidl.core.ParameterMap`", "", "Input values to be passed for the execution. These can be overridden when an execution is created with this launch plan." - "fixed_inputs", ":ref:`ref_flyteidl.core.LiteralMap`", "", "Fixed, non-overridable inputs for the Launch Plan. These can not be overridden when an execution is created with this launch plan." - "role", ":ref:`ref_string`", "", "**Deprecated.** String to indicate the role to use to execute the workflow underneath" - "labels", ":ref:`ref_flyteidl.admin.Labels`", "", "Custom labels to be applied to the execution resource." - "annotations", ":ref:`ref_flyteidl.admin.Annotations`", "", "Custom annotations to be applied to the execution resource." - "auth", ":ref:`ref_flyteidl.admin.Auth`", "", "**Deprecated.** Indicates the permission associated with workflow executions triggered with this launch plan." - "auth_role", ":ref:`ref_flyteidl.admin.AuthRole`", "", "**Deprecated.** " - "security_context", ":ref:`ref_flyteidl.core.SecurityContext`", "", "Indicates security context for permissions triggered with this launch plan" - "quality_of_service", ":ref:`ref_flyteidl.core.QualityOfService`", "", "Indicates the runtime priority of the execution." - "raw_output_data_config", ":ref:`ref_flyteidl.admin.RawOutputDataConfig`", "", "Encapsulates user settings pertaining to offloaded data (i.e. Blobs, Schema, query data, etc.)." - "max_parallelism", ":ref:`ref_int32`", "", "Controls the maximum number of tasknodes that can be run in parallel for the entire workflow. This is useful to achieve fairness. Note: MapTasks are regarded as one unit, and parallelism/concurrency of MapTasks is independent from this." - "interruptible", ":ref:`ref_google.protobuf.BoolValue`", "", "Allows for the interruptible flag of a workflow to be overwritten for a single execution. Omitting this field uses the workflow's value as a default. As we need to distinguish between the field not being provided and its default value false, we have to use a wrapper around the bool field." - "overwrite_cache", ":ref:`ref_bool`", "", "Allows for all cached values of a workflow and its tasks to be overwritten for a single execution. If enabled, all calculations are performed even if cached results would be available, overwriting the stored data once execution finishes successfully." - - - - - - - -.. _ref_flyteidl.admin.LaunchPlanUpdateRequest: - -LaunchPlanUpdateRequest ------------------------------------------------------------------- - -Request to set the referenced launch plan state to the configured value. -See :ref:`ref_flyteidl.admin.LaunchPlan` for more details - - - -.. csv-table:: LaunchPlanUpdateRequest type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "id", ":ref:`ref_flyteidl.core.Identifier`", "", "Identifier of launch plan for which to change state. +required." - "state", ":ref:`ref_flyteidl.admin.LaunchPlanState`", "", "Desired state to apply to the launch plan. +required." - - - - - - - -.. _ref_flyteidl.admin.LaunchPlanUpdateResponse: - -LaunchPlanUpdateResponse ------------------------------------------------------------------- - -Purposefully empty, may be populated in the future. - - - - - - - -.. - end messages - - - -.. _ref_flyteidl.admin.LaunchPlanState: - -LaunchPlanState ------------------------------------------------------------------- - -By default any launch plan regardless of state can be used to launch a workflow execution. -However, at most one version of a launch plan -(e.g. a NamedEntityIdentifier set of shared project, domain and name values) can be -active at a time in regards to *schedules*. That is, at most one schedule in a NamedEntityIdentifier -group will be observed and trigger executions at a defined cadence. - -.. csv-table:: Enum LaunchPlanState values - :header: "Name", "Number", "Description" - :widths: auto - - "INACTIVE", "0", "" - "ACTIVE", "1", "" - - -.. - end enums - - -.. - end HasExtensions - - -.. - end services - - - - -.. _ref_flyteidl/admin/matchable_resource.proto: - -flyteidl/admin/matchable_resource.proto -================================================================== - - - - - -.. _ref_flyteidl.admin.ClusterResourceAttributes: - -ClusterResourceAttributes ------------------------------------------------------------------- - - - - - -.. csv-table:: ClusterResourceAttributes type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "attributes", ":ref:`ref_flyteidl.admin.ClusterResourceAttributes.AttributesEntry`", "repeated", "Custom resource attributes which will be applied in cluster resource creation (e.g. quotas). Map keys are the *case-sensitive* names of variables in templatized resource files. Map values should be the custom values which get substituted during resource creation." - - - - - - - -.. _ref_flyteidl.admin.ClusterResourceAttributes.AttributesEntry: - -ClusterResourceAttributes.AttributesEntry ------------------------------------------------------------------- - - - - - -.. csv-table:: ClusterResourceAttributes.AttributesEntry type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "key", ":ref:`ref_string`", "", "" - "value", ":ref:`ref_string`", "", "" - - - - - - - -.. _ref_flyteidl.admin.ExecutionClusterLabel: - -ExecutionClusterLabel ------------------------------------------------------------------- - - - - - -.. csv-table:: ExecutionClusterLabel type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "value", ":ref:`ref_string`", "", "Label value to determine where the execution will be run" - - - - - - - -.. _ref_flyteidl.admin.ExecutionQueueAttributes: - -ExecutionQueueAttributes ------------------------------------------------------------------- - - - - - -.. csv-table:: ExecutionQueueAttributes type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "tags", ":ref:`ref_string`", "repeated", "Tags used for assigning execution queues for tasks defined within this project." - - - - - - - -.. _ref_flyteidl.admin.ListMatchableAttributesRequest: - -ListMatchableAttributesRequest ------------------------------------------------------------------- - -Request all matching resource attributes for a resource type. -See :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for more details - - - -.. csv-table:: ListMatchableAttributesRequest type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "resource_type", ":ref:`ref_flyteidl.admin.MatchableResource`", "", "+required" - - - - - - - -.. _ref_flyteidl.admin.ListMatchableAttributesResponse: - -ListMatchableAttributesResponse ------------------------------------------------------------------- - -Response for a request for all matching resource attributes for a resource type. -See :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for more details - - - -.. csv-table:: ListMatchableAttributesResponse type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "configurations", ":ref:`ref_flyteidl.admin.MatchableAttributesConfiguration`", "repeated", "" - - - - - - - -.. _ref_flyteidl.admin.MatchableAttributesConfiguration: - -MatchableAttributesConfiguration ------------------------------------------------------------------- - -Represents a custom set of attributes applied for either a domain; a domain and project; or -domain, project and workflow name. -These are used to override system level defaults for kubernetes cluster resource management, -default execution values, and more all across different levels of specificity. - - - -.. csv-table:: MatchableAttributesConfiguration type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "attributes", ":ref:`ref_flyteidl.admin.MatchingAttributes`", "", "" - "domain", ":ref:`ref_string`", "", "" - "project", ":ref:`ref_string`", "", "" - "workflow", ":ref:`ref_string`", "", "" - "launch_plan", ":ref:`ref_string`", "", "" - - - - - - - -.. _ref_flyteidl.admin.MatchingAttributes: - -MatchingAttributes ------------------------------------------------------------------- - -Generic container for encapsulating all types of the above attributes messages. - - - -.. csv-table:: MatchingAttributes type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "task_resource_attributes", ":ref:`ref_flyteidl.admin.TaskResourceAttributes`", "", "" - "cluster_resource_attributes", ":ref:`ref_flyteidl.admin.ClusterResourceAttributes`", "", "" - "execution_queue_attributes", ":ref:`ref_flyteidl.admin.ExecutionQueueAttributes`", "", "" - "execution_cluster_label", ":ref:`ref_flyteidl.admin.ExecutionClusterLabel`", "", "" - "quality_of_service", ":ref:`ref_flyteidl.core.QualityOfService`", "", "" - "plugin_overrides", ":ref:`ref_flyteidl.admin.PluginOverrides`", "", "" - "workflow_execution_config", ":ref:`ref_flyteidl.admin.WorkflowExecutionConfig`", "", "" - "cluster_assignment", ":ref:`ref_flyteidl.admin.ClusterAssignment`", "", "" - - - - - - - -.. _ref_flyteidl.admin.PluginOverride: - -PluginOverride ------------------------------------------------------------------- - -This MatchableAttribute configures selecting alternate plugin implementations for a given task type. -In addition to an override implementation a selection of fallbacks can be provided or other modes -for handling cases where the desired plugin override is not enabled in a given Flyte deployment. - - - -.. csv-table:: PluginOverride type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "task_type", ":ref:`ref_string`", "", "A predefined yet extensible Task type identifier." - "plugin_id", ":ref:`ref_string`", "repeated", "A set of plugin ids which should handle tasks of this type instead of the default registered plugin. The list will be tried in order until a plugin is found with that id." - "missing_plugin_behavior", ":ref:`ref_flyteidl.admin.PluginOverride.MissingPluginBehavior`", "", "Defines the behavior when no plugin from the plugin_id list is not found." - - - - - - - -.. _ref_flyteidl.admin.PluginOverrides: - -PluginOverrides ------------------------------------------------------------------- - - - - - -.. csv-table:: PluginOverrides type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "overrides", ":ref:`ref_flyteidl.admin.PluginOverride`", "repeated", "" - - - - - - - -.. _ref_flyteidl.admin.TaskResourceAttributes: - -TaskResourceAttributes ------------------------------------------------------------------- - -Defines task resource defaults and limits that will be applied at task registration. - - - -.. csv-table:: TaskResourceAttributes type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "defaults", ":ref:`ref_flyteidl.admin.TaskResourceSpec`", "", "" - "limits", ":ref:`ref_flyteidl.admin.TaskResourceSpec`", "", "" - - - - - - - -.. _ref_flyteidl.admin.TaskResourceSpec: - -TaskResourceSpec ------------------------------------------------------------------- - -Defines a set of overridable task resource attributes set during task registration. - - - -.. csv-table:: TaskResourceSpec type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "cpu", ":ref:`ref_string`", "", "" - "gpu", ":ref:`ref_string`", "", "" - "memory", ":ref:`ref_string`", "", "" - "storage", ":ref:`ref_string`", "", "" - "ephemeral_storage", ":ref:`ref_string`", "", "" - - - - - - - -.. _ref_flyteidl.admin.WorkflowExecutionConfig: - -WorkflowExecutionConfig ------------------------------------------------------------------- - -Adds defaults for customizable workflow-execution specifications and overrides. - - - -.. csv-table:: WorkflowExecutionConfig type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "max_parallelism", ":ref:`ref_int32`", "", "Can be used to control the number of parallel nodes to run within the workflow. This is useful to achieve fairness." - "security_context", ":ref:`ref_flyteidl.core.SecurityContext`", "", "Indicates security context permissions for executions triggered with this matchable attribute." - "raw_output_data_config", ":ref:`ref_flyteidl.admin.RawOutputDataConfig`", "", "Encapsulates user settings pertaining to offloaded data (i.e. Blobs, Schema, query data, etc.)." - "labels", ":ref:`ref_flyteidl.admin.Labels`", "", "Custom labels to be applied to a triggered execution resource." - "annotations", ":ref:`ref_flyteidl.admin.Annotations`", "", "Custom annotations to be applied to a triggered execution resource." - "interruptible", ":ref:`ref_google.protobuf.BoolValue`", "", "Allows for the interruptible flag of a workflow to be overwritten for a single execution. Omitting this field uses the workflow's value as a default. As we need to distinguish between the field not being provided and its default value false, we have to use a wrapper around the bool field." - "overwrite_cache", ":ref:`ref_bool`", "", "Allows for all cached values of a workflow and its tasks to be overwritten for a single execution. If enabled, all calculations are performed even if cached results would be available, overwriting the stored data once execution finishes successfully." - - - - - - -.. - end messages - - - -.. _ref_flyteidl.admin.MatchableResource: - -MatchableResource ------------------------------------------------------------------- - -Defines a resource that can be configured by customizable Project-, ProjectDomain- or WorkflowAttributes -based on matching tags. - -.. csv-table:: Enum MatchableResource values - :header: "Name", "Number", "Description" - :widths: auto - - "TASK_RESOURCE", "0", "Applies to customizable task resource requests and limits." - "CLUSTER_RESOURCE", "1", "Applies to configuring templated kubernetes cluster resources." - "EXECUTION_QUEUE", "2", "Configures task and dynamic task execution queue assignment." - "EXECUTION_CLUSTER_LABEL", "3", "Configures the K8s cluster label to be used for execution to be run" - "QUALITY_OF_SERVICE_SPECIFICATION", "4", "Configures default quality of service when undefined in an execution spec." - "PLUGIN_OVERRIDE", "5", "Selects configurable plugin implementation behavior for a given task type." - "WORKFLOW_EXECUTION_CONFIG", "6", "Adds defaults for customizable workflow-execution specifications and overrides." - "CLUSTER_ASSIGNMENT", "7", "Controls how to select an available cluster on which this execution should run." - - - -.. _ref_flyteidl.admin.PluginOverride.MissingPluginBehavior: - -PluginOverride.MissingPluginBehavior ------------------------------------------------------------------- - - - -.. csv-table:: Enum PluginOverride.MissingPluginBehavior values - :header: "Name", "Number", "Description" - :widths: auto - - "FAIL", "0", "By default, if this plugin is not enabled for a Flyte deployment then execution will fail." - "USE_DEFAULT", "1", "Uses the system-configured default implementation." - - -.. - end enums - - -.. - end HasExtensions - - -.. - end services - - - - -.. _ref_flyteidl/admin/node_execution.proto: - -flyteidl/admin/node_execution.proto -================================================================== - - - - - -.. _ref_flyteidl.admin.DynamicWorkflowNodeMetadata: - -DynamicWorkflowNodeMetadata ------------------------------------------------------------------- - -For dynamic workflow nodes we capture information about the dynamic workflow definition that gets generated. - - - -.. csv-table:: DynamicWorkflowNodeMetadata type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "id", ":ref:`ref_flyteidl.core.Identifier`", "", "id represents the unique identifier of the workflow." - "compiled_workflow", ":ref:`ref_flyteidl.core.CompiledWorkflowClosure`", "", "Represents the compiled representation of the embedded dynamic workflow." - - - - - - - -.. _ref_flyteidl.admin.NodeExecution: - -NodeExecution ------------------------------------------------------------------- - -Encapsulates all details for a single node execution entity. -A node represents a component in the overall workflow graph. A node launch a task, multiple tasks, an entire nested -sub-workflow, or even a separate child-workflow execution. -The same task can be called repeatedly in a single workflow but each node is unique. - - - -.. csv-table:: NodeExecution type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "id", ":ref:`ref_flyteidl.core.NodeExecutionIdentifier`", "", "Uniquely identifies an individual node execution." - "input_uri", ":ref:`ref_string`", "", "Path to remote data store where input blob is stored." - "closure", ":ref:`ref_flyteidl.admin.NodeExecutionClosure`", "", "Computed results associated with this node execution." - "metadata", ":ref:`ref_flyteidl.admin.NodeExecutionMetaData`", "", "Metadata for Node Execution" - - - - - - - -.. _ref_flyteidl.admin.NodeExecutionClosure: - -NodeExecutionClosure ------------------------------------------------------------------- - -Container for node execution details and results. - - - -.. csv-table:: NodeExecutionClosure type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "output_uri", ":ref:`ref_string`", "", "**Deprecated.** Links to a remotely stored, serialized core.LiteralMap of node execution outputs. DEPRECATED. Use GetNodeExecutionData to fetch output data instead." - "error", ":ref:`ref_flyteidl.core.ExecutionError`", "", "Error information for the Node" - "output_data", ":ref:`ref_flyteidl.core.LiteralMap`", "", "**Deprecated.** Raw output data produced by this node execution. DEPRECATED. Use GetNodeExecutionData to fetch output data instead." - "phase", ":ref:`ref_flyteidl.core.NodeExecution.Phase`", "", "The last recorded phase for this node execution." - "started_at", ":ref:`ref_google.protobuf.Timestamp`", "", "Time at which the node execution began running." - "duration", ":ref:`ref_google.protobuf.Duration`", "", "The amount of time the node execution spent running." - "created_at", ":ref:`ref_google.protobuf.Timestamp`", "", "Time at which the node execution was created." - "updated_at", ":ref:`ref_google.protobuf.Timestamp`", "", "Time at which the node execution was last updated." - "workflow_node_metadata", ":ref:`ref_flyteidl.admin.WorkflowNodeMetadata`", "", "" - "task_node_metadata", ":ref:`ref_flyteidl.admin.TaskNodeMetadata`", "", "" - "deck_uri", ":ref:`ref_string`", "", "String location uniquely identifying where the deck HTML file is. NativeUrl specifies the url in the format of the configured storage provider (e.g. s3://my-bucket/randomstring/suffix.tar)" - - - - - - - -.. _ref_flyteidl.admin.NodeExecutionForTaskListRequest: - -NodeExecutionForTaskListRequest ------------------------------------------------------------------- - -Represents a request structure to retrieve a list of node execution entities launched by a specific task. -This can arise when a task yields a subworkflow. - - - -.. csv-table:: NodeExecutionForTaskListRequest type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "task_execution_id", ":ref:`ref_flyteidl.core.TaskExecutionIdentifier`", "", "Indicates the node execution to filter by. +required" - "limit", ":ref:`ref_uint32`", "", "Indicates the number of resources to be returned. +required" - "token", ":ref:`ref_string`", "", "In the case of multiple pages of results, the, server-provided token can be used to fetch the next page in a query. +optional" - "filters", ":ref:`ref_string`", "", "Indicates a list of filters passed as string. More info on constructing filters : +optional" - "sort_by", ":ref:`ref_flyteidl.admin.Sort`", "", "Sort ordering. +optional" - - - - - - - -.. _ref_flyteidl.admin.NodeExecutionGetDataRequest: - -NodeExecutionGetDataRequest ------------------------------------------------------------------- - -Request structure to fetch inputs and output for a node execution. -By default, these are not returned in :ref:`ref_flyteidl.admin.NodeExecutionGetRequest` - - - -.. csv-table:: NodeExecutionGetDataRequest type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "id", ":ref:`ref_flyteidl.core.NodeExecutionIdentifier`", "", "The identifier of the node execution for which to fetch inputs and outputs." - - - - - - - -.. _ref_flyteidl.admin.NodeExecutionGetDataResponse: - -NodeExecutionGetDataResponse ------------------------------------------------------------------- - -Response structure for NodeExecutionGetDataRequest which contains inputs and outputs for a node execution. - - - -.. csv-table:: NodeExecutionGetDataResponse type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "inputs", ":ref:`ref_flyteidl.admin.UrlBlob`", "", "**Deprecated.** Signed url to fetch a core.LiteralMap of node execution inputs. Deprecated: Please use full_inputs instead." - "outputs", ":ref:`ref_flyteidl.admin.UrlBlob`", "", "**Deprecated.** Signed url to fetch a core.LiteralMap of node execution outputs. Deprecated: Please use full_outputs instead." - "full_inputs", ":ref:`ref_flyteidl.core.LiteralMap`", "", "Full_inputs will only be populated if they are under a configured size threshold." - "full_outputs", ":ref:`ref_flyteidl.core.LiteralMap`", "", "Full_outputs will only be populated if they are under a configured size threshold." - "dynamic_workflow", ":ref:`ref_flyteidl.admin.DynamicWorkflowNodeMetadata`", "", "Optional Workflow closure for a dynamically generated workflow, in the case this node yields a dynamic workflow we return its structure here." - - - - - - - -.. _ref_flyteidl.admin.NodeExecutionGetRequest: - -NodeExecutionGetRequest ------------------------------------------------------------------- - -A message used to fetch a single node execution entity. -See :ref:`ref_flyteidl.admin.NodeExecution` for more details - - - -.. csv-table:: NodeExecutionGetRequest type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "id", ":ref:`ref_flyteidl.core.NodeExecutionIdentifier`", "", "Uniquely identifies an individual node execution. +required" - - - - - - - -.. _ref_flyteidl.admin.NodeExecutionList: - -NodeExecutionList ------------------------------------------------------------------- - -Request structure to retrieve a list of node execution entities. -See :ref:`ref_flyteidl.admin.NodeExecution` for more details - - - -.. csv-table:: NodeExecutionList type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "node_executions", ":ref:`ref_flyteidl.admin.NodeExecution`", "repeated", "" - "token", ":ref:`ref_string`", "", "In the case of multiple pages of results, the server-provided token can be used to fetch the next page in a query. If there are no more results, this value will be empty." - - - - - - - -.. _ref_flyteidl.admin.NodeExecutionListRequest: - -NodeExecutionListRequest ------------------------------------------------------------------- - -Represents a request structure to retrieve a list of node execution entities. -See :ref:`ref_flyteidl.admin.NodeExecution` for more details - - - -.. csv-table:: NodeExecutionListRequest type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "workflow_execution_id", ":ref:`ref_flyteidl.core.WorkflowExecutionIdentifier`", "", "Indicates the workflow execution to filter by. +required" - "limit", ":ref:`ref_uint32`", "", "Indicates the number of resources to be returned. +required" - "token", ":ref:`ref_string`", "", "" - "filters", ":ref:`ref_string`", "", "Indicates a list of filters passed as string. More info on constructing filters : +optional" - "sort_by", ":ref:`ref_flyteidl.admin.Sort`", "", "Sort ordering. +optional" - "unique_parent_id", ":ref:`ref_string`", "", "Unique identifier of the parent node in the execution +optional" - - - - - - - -.. _ref_flyteidl.admin.NodeExecutionMetaData: - -NodeExecutionMetaData ------------------------------------------------------------------- - -Represents additional attributes related to a Node Execution - - - -.. csv-table:: NodeExecutionMetaData type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "retry_group", ":ref:`ref_string`", "", "Node executions are grouped depending on retries of the parent Retry group is unique within the context of a parent node." - "is_parent_node", ":ref:`ref_bool`", "", "Boolean flag indicating if the node has child nodes under it This can be true when a node contains a dynamic workflow which then produces child nodes." - "spec_node_id", ":ref:`ref_string`", "", "Node id of the node in the original workflow This maps to value of WorkflowTemplate.nodes[X].id" - "is_dynamic", ":ref:`ref_bool`", "", "Boolean flag indicating if the node has contains a dynamic workflow which then produces child nodes. This is to distinguish between subworkflows and dynamic workflows which can both have is_parent_node as true." - - - - - - - -.. _ref_flyteidl.admin.TaskNodeMetadata: - -TaskNodeMetadata ------------------------------------------------------------------- - -Metadata for the case in which the node is a TaskNode - - - -.. csv-table:: TaskNodeMetadata type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "cache_status", ":ref:`ref_flyteidl.core.CatalogCacheStatus`", "", "Captures the status of caching for this execution." - "catalog_key", ":ref:`ref_flyteidl.core.CatalogMetadata`", "", "This structure carries the catalog artifact information" - "checkpoint_uri", ":ref:`ref_string`", "", "The latest checkpoint location" - - - - - - - -.. _ref_flyteidl.admin.WorkflowNodeMetadata: - -WorkflowNodeMetadata ------------------------------------------------------------------- - -Metadata for a WorkflowNode - - - -.. csv-table:: WorkflowNodeMetadata type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "executionId", ":ref:`ref_flyteidl.core.WorkflowExecutionIdentifier`", "", "The identifier for a workflow execution launched by a node." - - - - - - -.. - end messages - - -.. - end enums - - -.. - end HasExtensions - - -.. - end services - - - - -.. _ref_flyteidl/admin/notification.proto: - -flyteidl/admin/notification.proto -================================================================== - - - - - -.. _ref_flyteidl.admin.EmailMessage: - -EmailMessage ------------------------------------------------------------------- - -Represents the Email object that is sent to a publisher/subscriber -to forward the notification. -Note: This is internal to Admin and doesn't need to be exposed to other components. - - - -.. csv-table:: EmailMessage type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "recipients_email", ":ref:`ref_string`", "repeated", "The list of email addresses to receive an email with the content populated in the other fields. Currently, each email recipient will receive its own email. This populates the TO field." - "sender_email", ":ref:`ref_string`", "", "The email of the sender. This populates the FROM field." - "subject_line", ":ref:`ref_string`", "", "The content of the subject line. This populates the SUBJECT field." - "body", ":ref:`ref_string`", "", "The content of the email body. This populates the BODY field." - - - - - - -.. - end messages - - -.. - end enums - - -.. - end HasExtensions - - -.. - end services - - - - -.. _ref_flyteidl/admin/project.proto: - -flyteidl/admin/project.proto -================================================================== - - - - - -.. _ref_flyteidl.admin.Domain: - -Domain ------------------------------------------------------------------- - -Namespace within a project commonly used to differentiate between different service instances. -e.g. "production", "development", etc. - - - -.. csv-table:: Domain type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "id", ":ref:`ref_string`", "", "Globally unique domain name." - "name", ":ref:`ref_string`", "", "Display name." - - - - - - - -.. _ref_flyteidl.admin.Project: - -Project ------------------------------------------------------------------- - -Top-level namespace used to classify different entities like workflows and executions. - - - -.. csv-table:: Project type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "id", ":ref:`ref_string`", "", "Globally unique project name." - "name", ":ref:`ref_string`", "", "Display name." - "domains", ":ref:`ref_flyteidl.admin.Domain`", "repeated", "" - "description", ":ref:`ref_string`", "", "" - "labels", ":ref:`ref_flyteidl.admin.Labels`", "", "Leverage Labels from flyteidl.admin.common.proto to tag projects with ownership information." - "state", ":ref:`ref_flyteidl.admin.Project.ProjectState`", "", "" - - - - - - - -.. _ref_flyteidl.admin.ProjectListRequest: - -ProjectListRequest ------------------------------------------------------------------- - -Request to retrieve a list of projects matching specified filters. -See :ref:`ref_flyteidl.admin.Project` for more details - - - -.. csv-table:: ProjectListRequest type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "limit", ":ref:`ref_uint32`", "", "Indicates the number of projects to be returned. +required" - "token", ":ref:`ref_string`", "", "In the case of multiple pages of results, this server-provided token can be used to fetch the next page in a query. +optional" - "filters", ":ref:`ref_string`", "", "Indicates a list of filters passed as string. More info on constructing filters : +optional" - "sort_by", ":ref:`ref_flyteidl.admin.Sort`", "", "Sort ordering. +optional" - - - - - - - -.. _ref_flyteidl.admin.ProjectRegisterRequest: - -ProjectRegisterRequest ------------------------------------------------------------------- - -Adds a new user-project within the Flyte deployment. -See :ref:`ref_flyteidl.admin.Project` for more details - - - -.. csv-table:: ProjectRegisterRequest type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "project", ":ref:`ref_flyteidl.admin.Project`", "", "+required" - - - - - - - -.. _ref_flyteidl.admin.ProjectRegisterResponse: - -ProjectRegisterResponse ------------------------------------------------------------------- - -Purposefully empty, may be updated in the future. - - - - - - - - -.. _ref_flyteidl.admin.ProjectUpdateResponse: - -ProjectUpdateResponse ------------------------------------------------------------------- - -Purposefully empty, may be updated in the future. - - - - - - - - -.. _ref_flyteidl.admin.Projects: - -Projects ------------------------------------------------------------------- - -Represents a list of projects. -See :ref:`ref_flyteidl.admin.Project` for more details - - - -.. csv-table:: Projects type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "projects", ":ref:`ref_flyteidl.admin.Project`", "repeated", "" - "token", ":ref:`ref_string`", "", "In the case of multiple pages of results, the server-provided token can be used to fetch the next page in a query. If there are no more results, this value will be empty." - - - - - - -.. - end messages - - - -.. _ref_flyteidl.admin.Project.ProjectState: - -Project.ProjectState ------------------------------------------------------------------- - -The state of the project is used to control its visibility in the UI and validity. - -.. csv-table:: Enum Project.ProjectState values - :header: "Name", "Number", "Description" - :widths: auto - - "ACTIVE", "0", "By default, all projects are considered active." - "ARCHIVED", "1", "Archived projects are no longer visible in the UI and no longer valid." - "SYSTEM_GENERATED", "2", "System generated projects that aren't explicitly created or managed by a user." - - -.. - end enums - - -.. - end HasExtensions - - -.. - end services - - - - -.. _ref_flyteidl/admin/project_attributes.proto: - -flyteidl/admin/project_attributes.proto -================================================================== - - - - - -.. _ref_flyteidl.admin.ProjectAttributes: - -ProjectAttributes ------------------------------------------------------------------- - -Defines a set of custom matching attributes at the project level. -For more info on matchable attributes, see :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` - - - -.. csv-table:: ProjectAttributes type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "project", ":ref:`ref_string`", "", "Unique project id for which this set of attributes will be applied." - "matching_attributes", ":ref:`ref_flyteidl.admin.MatchingAttributes`", "", "" - - - - - - - -.. _ref_flyteidl.admin.ProjectAttributesDeleteRequest: - -ProjectAttributesDeleteRequest ------------------------------------------------------------------- - -Request to delete a set matchable project level attribute override. -For more info on matchable attributes, see :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` - - - -.. csv-table:: ProjectAttributesDeleteRequest type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "project", ":ref:`ref_string`", "", "Unique project id which this set of attributes references. +required" - "resource_type", ":ref:`ref_flyteidl.admin.MatchableResource`", "", "Which type of matchable attributes to delete. +required" - - - - - - - -.. _ref_flyteidl.admin.ProjectAttributesDeleteResponse: - -ProjectAttributesDeleteResponse ------------------------------------------------------------------- - -Purposefully empty, may be populated in the future. - - - - - - - - -.. _ref_flyteidl.admin.ProjectAttributesGetRequest: - -ProjectAttributesGetRequest ------------------------------------------------------------------- - -Request to get an individual project level attribute override. -For more info on matchable attributes, see :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` - - - -.. csv-table:: ProjectAttributesGetRequest type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "project", ":ref:`ref_string`", "", "Unique project id which this set of attributes references. +required" - "resource_type", ":ref:`ref_flyteidl.admin.MatchableResource`", "", "Which type of matchable attributes to return. +required" - - - - - - - -.. _ref_flyteidl.admin.ProjectAttributesGetResponse: - -ProjectAttributesGetResponse ------------------------------------------------------------------- - -Response to get an individual project level attribute override. -For more info on matchable attributes, see :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` - - - -.. csv-table:: ProjectAttributesGetResponse type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "attributes", ":ref:`ref_flyteidl.admin.ProjectAttributes`", "", "" - - - - - - - -.. _ref_flyteidl.admin.ProjectAttributesUpdateRequest: - -ProjectAttributesUpdateRequest ------------------------------------------------------------------- - -Sets custom attributes for a project -For more info on matchable attributes, see :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` - - - -.. csv-table:: ProjectAttributesUpdateRequest type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "attributes", ":ref:`ref_flyteidl.admin.ProjectAttributes`", "", "+required" - - - - - - - -.. _ref_flyteidl.admin.ProjectAttributesUpdateResponse: - -ProjectAttributesUpdateResponse ------------------------------------------------------------------- - -Purposefully empty, may be populated in the future. - - - - - - - -.. - end messages - - -.. - end enums - - -.. - end HasExtensions - - -.. - end services - - - - -.. _ref_flyteidl/admin/project_domain_attributes.proto: - -flyteidl/admin/project_domain_attributes.proto -================================================================== - - - - - -.. _ref_flyteidl.admin.ProjectDomainAttributes: - -ProjectDomainAttributes ------------------------------------------------------------------- - -Defines a set of custom matching attributes which defines resource defaults for a project and domain. -For more info on matchable attributes, see :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` - - - -.. csv-table:: ProjectDomainAttributes type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "project", ":ref:`ref_string`", "", "Unique project id for which this set of attributes will be applied." - "domain", ":ref:`ref_string`", "", "Unique domain id for which this set of attributes will be applied." - "matching_attributes", ":ref:`ref_flyteidl.admin.MatchingAttributes`", "", "" - - - - - - - -.. _ref_flyteidl.admin.ProjectDomainAttributesDeleteRequest: - -ProjectDomainAttributesDeleteRequest ------------------------------------------------------------------- - -Request to delete a set matchable project domain attribute override. -For more info on matchable attributes, see :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` - - - -.. csv-table:: ProjectDomainAttributesDeleteRequest type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "project", ":ref:`ref_string`", "", "Unique project id which this set of attributes references. +required" - "domain", ":ref:`ref_string`", "", "Unique domain id which this set of attributes references. +required" - "resource_type", ":ref:`ref_flyteidl.admin.MatchableResource`", "", "Which type of matchable attributes to delete. +required" - - - - - - - -.. _ref_flyteidl.admin.ProjectDomainAttributesDeleteResponse: - -ProjectDomainAttributesDeleteResponse ------------------------------------------------------------------- - -Purposefully empty, may be populated in the future. - - - - - - - - -.. _ref_flyteidl.admin.ProjectDomainAttributesGetRequest: - -ProjectDomainAttributesGetRequest ------------------------------------------------------------------- - -Request to get an individual project domain attribute override. -For more info on matchable attributes, see :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` - - - -.. csv-table:: ProjectDomainAttributesGetRequest type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "project", ":ref:`ref_string`", "", "Unique project id which this set of attributes references. +required" - "domain", ":ref:`ref_string`", "", "Unique domain id which this set of attributes references. +required" - "resource_type", ":ref:`ref_flyteidl.admin.MatchableResource`", "", "Which type of matchable attributes to return. +required" - - - - - - - -.. _ref_flyteidl.admin.ProjectDomainAttributesGetResponse: - -ProjectDomainAttributesGetResponse ------------------------------------------------------------------- - -Response to get an individual project domain attribute override. -For more info on matchable attributes, see :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` - - - -.. csv-table:: ProjectDomainAttributesGetResponse type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "attributes", ":ref:`ref_flyteidl.admin.ProjectDomainAttributes`", "", "" - - - - - - - -.. _ref_flyteidl.admin.ProjectDomainAttributesUpdateRequest: - -ProjectDomainAttributesUpdateRequest ------------------------------------------------------------------- - -Sets custom attributes for a project-domain combination. -For more info on matchable attributes, see :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` - - - -.. csv-table:: ProjectDomainAttributesUpdateRequest type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "attributes", ":ref:`ref_flyteidl.admin.ProjectDomainAttributes`", "", "+required" - - - - - - - -.. _ref_flyteidl.admin.ProjectDomainAttributesUpdateResponse: - -ProjectDomainAttributesUpdateResponse ------------------------------------------------------------------- - -Purposefully empty, may be populated in the future. - - - - - - - -.. - end messages - - -.. - end enums - - -.. - end HasExtensions - - -.. - end services - - - - -.. _ref_flyteidl/admin/schedule.proto: - -flyteidl/admin/schedule.proto -================================================================== - - - - - -.. _ref_flyteidl.admin.CronSchedule: - -CronSchedule ------------------------------------------------------------------- - -Options for schedules to run according to a cron expression. - - - -.. csv-table:: CronSchedule type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "schedule", ":ref:`ref_string`", "", "Standard/default cron implementation as described by https://en.wikipedia.org/wiki/Cron#CRON_expression; Also supports nonstandard predefined scheduling definitions as described by https://docs.aws.amazon.com/AmazonCloudWatch/latest/events/ScheduledEvents.html#CronExpressions except @reboot" - "offset", ":ref:`ref_string`", "", "ISO 8601 duration as described by https://en.wikipedia.org/wiki/ISO_8601#Durations" - - - - - - - -.. _ref_flyteidl.admin.FixedRate: - -FixedRate ------------------------------------------------------------------- - -Option for schedules run at a certain frequency e.g. every 2 minutes. - - - -.. csv-table:: FixedRate type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "value", ":ref:`ref_uint32`", "", "" - "unit", ":ref:`ref_flyteidl.admin.FixedRateUnit`", "", "" - - - - - - - -.. _ref_flyteidl.admin.Schedule: - -Schedule ------------------------------------------------------------------- - -Defines complete set of information required to trigger an execution on a schedule. - - - -.. csv-table:: Schedule type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "cron_expression", ":ref:`ref_string`", "", "**Deprecated.** Uses AWS syntax: Minutes Hours Day-of-month Month Day-of-week Year e.g. for a schedule that runs every 15 minutes: 0/15 * * * ? *" - "rate", ":ref:`ref_flyteidl.admin.FixedRate`", "", "" - "cron_schedule", ":ref:`ref_flyteidl.admin.CronSchedule`", "", "" - "kickoff_time_input_arg", ":ref:`ref_string`", "", "Name of the input variable that the kickoff time will be supplied to when the workflow is kicked off." - - - - - - -.. - end messages - - - -.. _ref_flyteidl.admin.FixedRateUnit: - -FixedRateUnit ------------------------------------------------------------------- - -Represents a frequency at which to run a schedule. - -.. csv-table:: Enum FixedRateUnit values - :header: "Name", "Number", "Description" - :widths: auto - - "MINUTE", "0", "" - "HOUR", "1", "" - "DAY", "2", "" - - -.. - end enums - - -.. - end HasExtensions - - -.. - end services - - - - -.. _ref_flyteidl/admin/signal.proto: - -flyteidl/admin/signal.proto -================================================================== - - - - - -.. _ref_flyteidl.admin.Signal: - -Signal ------------------------------------------------------------------- - -Signal encapsulates a unique identifier, associated metadata, and a value for a single Flyte -signal. Signals may exist either without a set value (representing a signal request) or with a -populated value (indicating the signal has been given). - - - -.. csv-table:: Signal type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "id", ":ref:`ref_flyteidl.core.SignalIdentifier`", "", "A unique identifier for the requested signal." - "type", ":ref:`ref_flyteidl.core.LiteralType`", "", "A type denoting the required value type for this signal." - "value", ":ref:`ref_flyteidl.core.Literal`", "", "The value of the signal. This is only available if the signal has been "set" and must match the defined the type." - - - - - - - -.. _ref_flyteidl.admin.SignalGetOrCreateRequest: - -SignalGetOrCreateRequest ------------------------------------------------------------------- - -SignalGetOrCreateRequest represents a request structure to retrieve or create a signal. -See :ref:`ref_flyteidl.admin.Signal` for more details - - - -.. csv-table:: SignalGetOrCreateRequest type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "id", ":ref:`ref_flyteidl.core.SignalIdentifier`", "", "A unique identifier for the requested signal." - "type", ":ref:`ref_flyteidl.core.LiteralType`", "", "A type denoting the required value type for this signal." - - - - - - - -.. _ref_flyteidl.admin.SignalList: - -SignalList ------------------------------------------------------------------- - -SignalList represents collection of signals along with the token of the last result. -See :ref:`ref_flyteidl.admin.Signal` for more details - - - -.. csv-table:: SignalList type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "signals", ":ref:`ref_flyteidl.admin.Signal`", "repeated", "A list of signals matching the input filters." - "token", ":ref:`ref_string`", "", "In the case of multiple pages of results, the server-provided token can be used to fetch the next page in a query. If there are no more results, this value will be empty." - - - - - - - -.. _ref_flyteidl.admin.SignalListRequest: - -SignalListRequest ------------------------------------------------------------------- - -SignalListRequest represents a request structure to retrieve a collection of signals. -See :ref:`ref_flyteidl.admin.Signal` for more details - - - -.. csv-table:: SignalListRequest type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "workflow_execution_id", ":ref:`ref_flyteidl.core.WorkflowExecutionIdentifier`", "", "Indicates the workflow execution to filter by. +required" - "limit", ":ref:`ref_uint32`", "", "Indicates the number of resources to be returned. +required" - "token", ":ref:`ref_string`", "", "In the case of multiple pages of results, the, server-provided token can be used to fetch the next page in a query. +optional" - "filters", ":ref:`ref_string`", "", "Indicates a list of filters passed as string. +optional" - "sort_by", ":ref:`ref_flyteidl.admin.Sort`", "", "Sort ordering. +optional" - - - - - - - -.. _ref_flyteidl.admin.SignalSetRequest: - -SignalSetRequest ------------------------------------------------------------------- - -SignalSetRequest represents a request structure to set the value on a signal. Setting a signal -effetively satisfies the signal condition within a Flyte workflow. -See :ref:`ref_flyteidl.admin.Signal` for more details - - - -.. csv-table:: SignalSetRequest type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "id", ":ref:`ref_flyteidl.core.SignalIdentifier`", "", "A unique identifier for the requested signal." - "value", ":ref:`ref_flyteidl.core.Literal`", "", "The value of this signal, must match the defining signal type." - - - - - - - -.. _ref_flyteidl.admin.SignalSetResponse: - -SignalSetResponse ------------------------------------------------------------------- - -SignalSetResponse represents a response structure if signal setting succeeds. - -Purposefully empty, may be populated in the future. - - - - - - - -.. - end messages - - -.. - end enums - - -.. - end HasExtensions - - -.. - end services - - - - -.. _ref_flyteidl/admin/task.proto: - -flyteidl/admin/task.proto -================================================================== - - - - - -.. _ref_flyteidl.admin.Task: - -Task ------------------------------------------------------------------- - -Flyte workflows are composed of many ordered tasks. That is small, reusable, self-contained logical blocks -arranged to process workflow inputs and produce a deterministic set of outputs. -Tasks can come in many varieties tuned for specialized behavior. - - - -.. csv-table:: Task type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "id", ":ref:`ref_flyteidl.core.Identifier`", "", "id represents the unique identifier of the task." - "closure", ":ref:`ref_flyteidl.admin.TaskClosure`", "", "closure encapsulates all the fields that maps to a compiled version of the task." - "short_description", ":ref:`ref_string`", "", "One-liner overview of the entity." - - - - - - - -.. _ref_flyteidl.admin.TaskClosure: - -TaskClosure ------------------------------------------------------------------- - -Compute task attributes which include values derived from the TaskSpec, as well as plugin-specific data -and task metadata. - - - -.. csv-table:: TaskClosure type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "compiled_task", ":ref:`ref_flyteidl.core.CompiledTask`", "", "Represents the compiled representation of the task from the specification provided." - "created_at", ":ref:`ref_google.protobuf.Timestamp`", "", "Time at which the task was created." - - - - - - - -.. _ref_flyteidl.admin.TaskCreateRequest: - -TaskCreateRequest ------------------------------------------------------------------- - -Represents a request structure to create a revision of a task. -See :ref:`ref_flyteidl.admin.Task` for more details - - - -.. csv-table:: TaskCreateRequest type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "id", ":ref:`ref_flyteidl.core.Identifier`", "", "id represents the unique identifier of the task. +required" - "spec", ":ref:`ref_flyteidl.admin.TaskSpec`", "", "Represents the specification for task. +required" - - - - - - - -.. _ref_flyteidl.admin.TaskCreateResponse: - -TaskCreateResponse ------------------------------------------------------------------- - -Represents a response structure if task creation succeeds. - -Purposefully empty, may be populated in the future. - - - - - - - - -.. _ref_flyteidl.admin.TaskList: - -TaskList ------------------------------------------------------------------- - -Represents a list of tasks returned from the admin. -See :ref:`ref_flyteidl.admin.Task` for more details - - - -.. csv-table:: TaskList type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "tasks", ":ref:`ref_flyteidl.admin.Task`", "repeated", "A list of tasks returned based on the request." - "token", ":ref:`ref_string`", "", "In the case of multiple pages of results, the server-provided token can be used to fetch the next page in a query. If there are no more results, this value will be empty." - - - - - - - -.. _ref_flyteidl.admin.TaskSpec: - -TaskSpec ------------------------------------------------------------------- - -Represents a structure that encapsulates the user-configured specification of the task. - - - -.. csv-table:: TaskSpec type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "template", ":ref:`ref_flyteidl.core.TaskTemplate`", "", "Template of the task that encapsulates all the metadata of the task." - "description", ":ref:`ref_flyteidl.admin.DescriptionEntity`", "", "Represents the specification for description entity." - - - - - - -.. - end messages - - -.. - end enums - - -.. - end HasExtensions - - -.. - end services - - - - -.. _ref_flyteidl/admin/task_execution.proto: - -flyteidl/admin/task_execution.proto -================================================================== - - - - - -.. _ref_flyteidl.admin.TaskExecution: - -TaskExecution ------------------------------------------------------------------- - -Encapsulates all details for a single task execution entity. -A task execution represents an instantiated task, including all inputs and additional -metadata as well as computed results included state, outputs, and duration-based attributes. - - - -.. csv-table:: TaskExecution type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "id", ":ref:`ref_flyteidl.core.TaskExecutionIdentifier`", "", "Unique identifier for the task execution." - "input_uri", ":ref:`ref_string`", "", "Path to remote data store where input blob is stored." - "closure", ":ref:`ref_flyteidl.admin.TaskExecutionClosure`", "", "Task execution details and results." - "is_parent", ":ref:`ref_bool`", "", "Whether this task spawned nodes." - - - - - - - -.. _ref_flyteidl.admin.TaskExecutionClosure: - -TaskExecutionClosure ------------------------------------------------------------------- - -Container for task execution details and results. - - - -.. csv-table:: TaskExecutionClosure type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "output_uri", ":ref:`ref_string`", "", "**Deprecated.** Path to remote data store where output blob is stored if the execution succeeded (and produced outputs). DEPRECATED. Use GetTaskExecutionData to fetch output data instead." - "error", ":ref:`ref_flyteidl.core.ExecutionError`", "", "Error information for the task execution. Populated if the execution failed." - "output_data", ":ref:`ref_flyteidl.core.LiteralMap`", "", "**Deprecated.** Raw output data produced by this task execution. DEPRECATED. Use GetTaskExecutionData to fetch output data instead." - "phase", ":ref:`ref_flyteidl.core.TaskExecution.Phase`", "", "The last recorded phase for this task execution." - "logs", ":ref:`ref_flyteidl.core.TaskLog`", "repeated", "Detailed log information output by the task execution." - "started_at", ":ref:`ref_google.protobuf.Timestamp`", "", "Time at which the task execution began running." - "duration", ":ref:`ref_google.protobuf.Duration`", "", "The amount of time the task execution spent running." - "created_at", ":ref:`ref_google.protobuf.Timestamp`", "", "Time at which the task execution was created." - "updated_at", ":ref:`ref_google.protobuf.Timestamp`", "", "Time at which the task execution was last updated." - "custom_info", ":ref:`ref_google.protobuf.Struct`", "", "Custom data specific to the task plugin." - "reason", ":ref:`ref_string`", "", "If there is an explanation for the most recent phase transition, the reason will capture it." - "task_type", ":ref:`ref_string`", "", "A predefined yet extensible Task type identifier." - "metadata", ":ref:`ref_flyteidl.event.TaskExecutionMetadata`", "", "Metadata around how a task was executed." - "event_version", ":ref:`ref_int32`", "", "The event version is used to indicate versioned changes in how data is maintained using this proto message. For example, event_verison > 0 means that maps tasks logs use the TaskExecutionMetadata ExternalResourceInfo fields for each subtask rather than the TaskLog in this message." - - - - - - - -.. _ref_flyteidl.admin.TaskExecutionGetDataRequest: - -TaskExecutionGetDataRequest ------------------------------------------------------------------- - -Request structure to fetch inputs and output for a task execution. -By default this data is not returned inline in :ref:`ref_flyteidl.admin.TaskExecutionGetRequest` - - - -.. csv-table:: TaskExecutionGetDataRequest type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "id", ":ref:`ref_flyteidl.core.TaskExecutionIdentifier`", "", "The identifier of the task execution for which to fetch inputs and outputs. +required" - - - - - - - -.. _ref_flyteidl.admin.TaskExecutionGetDataResponse: - -TaskExecutionGetDataResponse ------------------------------------------------------------------- - -Response structure for TaskExecutionGetDataRequest which contains inputs and outputs for a task execution. - - - -.. csv-table:: TaskExecutionGetDataResponse type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "inputs", ":ref:`ref_flyteidl.admin.UrlBlob`", "", "**Deprecated.** Signed url to fetch a core.LiteralMap of task execution inputs. Deprecated: Please use full_inputs instead." - "outputs", ":ref:`ref_flyteidl.admin.UrlBlob`", "", "**Deprecated.** Signed url to fetch a core.LiteralMap of task execution outputs. Deprecated: Please use full_outputs instead." - "full_inputs", ":ref:`ref_flyteidl.core.LiteralMap`", "", "Full_inputs will only be populated if they are under a configured size threshold." - "full_outputs", ":ref:`ref_flyteidl.core.LiteralMap`", "", "Full_outputs will only be populated if they are under a configured size threshold." - - - - - - - -.. _ref_flyteidl.admin.TaskExecutionGetRequest: - -TaskExecutionGetRequest ------------------------------------------------------------------- - -A message used to fetch a single task execution entity. -See :ref:`ref_flyteidl.admin.TaskExecution` for more details - - - -.. csv-table:: TaskExecutionGetRequest type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "id", ":ref:`ref_flyteidl.core.TaskExecutionIdentifier`", "", "Unique identifier for the task execution. +required" - - - - - - - -.. _ref_flyteidl.admin.TaskExecutionList: - -TaskExecutionList ------------------------------------------------------------------- - -Response structure for a query to list of task execution entities. -See :ref:`ref_flyteidl.admin.TaskExecution` for more details - - - -.. csv-table:: TaskExecutionList type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "task_executions", ":ref:`ref_flyteidl.admin.TaskExecution`", "repeated", "" - "token", ":ref:`ref_string`", "", "In the case of multiple pages of results, the server-provided token can be used to fetch the next page in a query. If there are no more results, this value will be empty." - - - - - - - -.. _ref_flyteidl.admin.TaskExecutionListRequest: - -TaskExecutionListRequest ------------------------------------------------------------------- - -Represents a request structure to retrieve a list of task execution entities yielded by a specific node execution. -See :ref:`ref_flyteidl.admin.TaskExecution` for more details - - - -.. csv-table:: TaskExecutionListRequest type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "node_execution_id", ":ref:`ref_flyteidl.core.NodeExecutionIdentifier`", "", "Indicates the node execution to filter by. +required" - "limit", ":ref:`ref_uint32`", "", "Indicates the number of resources to be returned. +required" - "token", ":ref:`ref_string`", "", "In the case of multiple pages of results, the server-provided token can be used to fetch the next page in a query. +optional" - "filters", ":ref:`ref_string`", "", "Indicates a list of filters passed as string. More info on constructing filters : +optional" - "sort_by", ":ref:`ref_flyteidl.admin.Sort`", "", "Sort ordering for returned list. +optional" - - - - - - -.. - end messages - - -.. - end enums - - -.. - end HasExtensions - - -.. - end services - - - - -.. _ref_flyteidl/admin/version.proto: - -flyteidl/admin/version.proto -================================================================== - - - - - -.. _ref_flyteidl.admin.GetVersionRequest: - -GetVersionRequest ------------------------------------------------------------------- - -Empty request for GetVersion - - - - - - - - -.. _ref_flyteidl.admin.GetVersionResponse: - -GetVersionResponse ------------------------------------------------------------------- - -Response for the GetVersion API - - - -.. csv-table:: GetVersionResponse type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "control_plane_version", ":ref:`ref_flyteidl.admin.Version`", "", "The control plane version information. FlyteAdmin and related components form the control plane of Flyte" - - - - - - - -.. _ref_flyteidl.admin.Version: - -Version ------------------------------------------------------------------- - -Provides Version information for a component - - - -.. csv-table:: Version type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "Build", ":ref:`ref_string`", "", "Specifies the GIT sha of the build" - "Version", ":ref:`ref_string`", "", "Version for the build, should follow a semver" - "BuildTime", ":ref:`ref_string`", "", "Build timestamp" - - - - - - -.. - end messages - - -.. - end enums - - -.. - end HasExtensions - - -.. - end services - - - - -.. _ref_flyteidl/admin/workflow.proto: - -flyteidl/admin/workflow.proto -================================================================== - - - - - -.. _ref_flyteidl.admin.CreateWorkflowFailureReason: - -CreateWorkflowFailureReason ------------------------------------------------------------------- - -When a CreateWorkflowRequest fails due to matching id - - - -.. csv-table:: CreateWorkflowFailureReason type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "exists_different_structure", ":ref:`ref_flyteidl.admin.WorkflowErrorExistsDifferentStructure`", "", "" - "exists_identical_structure", ":ref:`ref_flyteidl.admin.WorkflowErrorExistsIdenticalStructure`", "", "" - - - - - - - -.. _ref_flyteidl.admin.Workflow: - -Workflow ------------------------------------------------------------------- - -Represents the workflow structure stored in the Admin -A workflow is created by ordering tasks and associating outputs to inputs -in order to produce a directed-acyclic execution graph. - - - -.. csv-table:: Workflow type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "id", ":ref:`ref_flyteidl.core.Identifier`", "", "id represents the unique identifier of the workflow." - "closure", ":ref:`ref_flyteidl.admin.WorkflowClosure`", "", "closure encapsulates all the fields that maps to a compiled version of the workflow." - "short_description", ":ref:`ref_string`", "", "One-liner overview of the entity." - - - - - - - -.. _ref_flyteidl.admin.WorkflowClosure: - -WorkflowClosure ------------------------------------------------------------------- - -A container holding the compiled workflow produced from the WorkflowSpec and additional metadata. - - - -.. csv-table:: WorkflowClosure type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "compiled_workflow", ":ref:`ref_flyteidl.core.CompiledWorkflowClosure`", "", "Represents the compiled representation of the workflow from the specification provided." - "created_at", ":ref:`ref_google.protobuf.Timestamp`", "", "Time at which the workflow was created." - - - - - - - -.. _ref_flyteidl.admin.WorkflowCreateRequest: - -WorkflowCreateRequest ------------------------------------------------------------------- - -Represents a request structure to create a revision of a workflow. -See :ref:`ref_flyteidl.admin.Workflow` for more details - - - -.. csv-table:: WorkflowCreateRequest type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "id", ":ref:`ref_flyteidl.core.Identifier`", "", "id represents the unique identifier of the workflow. +required" - "spec", ":ref:`ref_flyteidl.admin.WorkflowSpec`", "", "Represents the specification for workflow. +required" - - - - - - - -.. _ref_flyteidl.admin.WorkflowCreateResponse: - -WorkflowCreateResponse ------------------------------------------------------------------- - -Purposefully empty, may be populated in the future. - - - - - - - - -.. _ref_flyteidl.admin.WorkflowErrorExistsDifferentStructure: - -WorkflowErrorExistsDifferentStructure ------------------------------------------------------------------- - -The workflow id is already used and the structure is different - - - -.. csv-table:: WorkflowErrorExistsDifferentStructure type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "id", ":ref:`ref_flyteidl.core.Identifier`", "", "" - - - - - - - -.. _ref_flyteidl.admin.WorkflowErrorExistsIdenticalStructure: - -WorkflowErrorExistsIdenticalStructure ------------------------------------------------------------------- - -The workflow id is already used with an identical sctructure - - - -.. csv-table:: WorkflowErrorExistsIdenticalStructure type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "id", ":ref:`ref_flyteidl.core.Identifier`", "", "" - - - - - - - -.. _ref_flyteidl.admin.WorkflowList: - -WorkflowList ------------------------------------------------------------------- - -Represents a list of workflows returned from the admin. -See :ref:`ref_flyteidl.admin.Workflow` for more details - - - -.. csv-table:: WorkflowList type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "workflows", ":ref:`ref_flyteidl.admin.Workflow`", "repeated", "A list of workflows returned based on the request." - "token", ":ref:`ref_string`", "", "In the case of multiple pages of results, the server-provided token can be used to fetch the next page in a query. If there are no more results, this value will be empty." - - - - - - - -.. _ref_flyteidl.admin.WorkflowSpec: - -WorkflowSpec ------------------------------------------------------------------- - -Represents a structure that encapsulates the specification of the workflow. - - - -.. csv-table:: WorkflowSpec type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "template", ":ref:`ref_flyteidl.core.WorkflowTemplate`", "", "Template of the task that encapsulates all the metadata of the workflow." - "sub_workflows", ":ref:`ref_flyteidl.core.WorkflowTemplate`", "repeated", "Workflows that are embedded into other workflows need to be passed alongside the parent workflow to the propeller compiler (since the compiler doesn't have any knowledge of other workflows - ie, it doesn't reach out to Admin to see other registered workflows). In fact, subworkflows do not even need to be registered." - "description", ":ref:`ref_flyteidl.admin.DescriptionEntity`", "", "Represents the specification for description entity." - - - - - - -.. - end messages - - -.. - end enums - - -.. - end HasExtensions - - -.. - end services - - - - -.. _ref_flyteidl/admin/workflow_attributes.proto: - -flyteidl/admin/workflow_attributes.proto -================================================================== - - - - - -.. _ref_flyteidl.admin.WorkflowAttributes: - -WorkflowAttributes ------------------------------------------------------------------- - -Defines a set of custom matching attributes which defines resource defaults for a project, domain and workflow. -For more info on matchable attributes, see :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` - - - -.. csv-table:: WorkflowAttributes type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "project", ":ref:`ref_string`", "", "Unique project id for which this set of attributes will be applied." - "domain", ":ref:`ref_string`", "", "Unique domain id for which this set of attributes will be applied." - "workflow", ":ref:`ref_string`", "", "Workflow name for which this set of attributes will be applied." - "matching_attributes", ":ref:`ref_flyteidl.admin.MatchingAttributes`", "", "" - - - - - - - -.. _ref_flyteidl.admin.WorkflowAttributesDeleteRequest: - -WorkflowAttributesDeleteRequest ------------------------------------------------------------------- - -Request to delete a set matchable workflow attribute override. -For more info on matchable attributes, see :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` - - - -.. csv-table:: WorkflowAttributesDeleteRequest type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "project", ":ref:`ref_string`", "", "Unique project id which this set of attributes references. +required" - "domain", ":ref:`ref_string`", "", "Unique domain id which this set of attributes references. +required" - "workflow", ":ref:`ref_string`", "", "Workflow name which this set of attributes references. +required" - "resource_type", ":ref:`ref_flyteidl.admin.MatchableResource`", "", "Which type of matchable attributes to delete. +required" - - - - - - - -.. _ref_flyteidl.admin.WorkflowAttributesDeleteResponse: - -WorkflowAttributesDeleteResponse ------------------------------------------------------------------- - -Purposefully empty, may be populated in the future. - - - - - - - - -.. _ref_flyteidl.admin.WorkflowAttributesGetRequest: - -WorkflowAttributesGetRequest ------------------------------------------------------------------- - -Request to get an individual workflow attribute override. -For more info on matchable attributes, see :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` - - - -.. csv-table:: WorkflowAttributesGetRequest type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "project", ":ref:`ref_string`", "", "Unique project id which this set of attributes references. +required" - "domain", ":ref:`ref_string`", "", "Unique domain id which this set of attributes references. +required" - "workflow", ":ref:`ref_string`", "", "Workflow name which this set of attributes references. +required" - "resource_type", ":ref:`ref_flyteidl.admin.MatchableResource`", "", "Which type of matchable attributes to return. +required" - - - - - - - -.. _ref_flyteidl.admin.WorkflowAttributesGetResponse: - -WorkflowAttributesGetResponse ------------------------------------------------------------------- - -Response to get an individual workflow attribute override. - - - -.. csv-table:: WorkflowAttributesGetResponse type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "attributes", ":ref:`ref_flyteidl.admin.WorkflowAttributes`", "", "" - - - - - - - -.. _ref_flyteidl.admin.WorkflowAttributesUpdateRequest: - -WorkflowAttributesUpdateRequest ------------------------------------------------------------------- - -Sets custom attributes for a project, domain and workflow combination. -For more info on matchable attributes, see :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` - - - -.. csv-table:: WorkflowAttributesUpdateRequest type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "attributes", ":ref:`ref_flyteidl.admin.WorkflowAttributes`", "", "" - - - - - - - -.. _ref_flyteidl.admin.WorkflowAttributesUpdateResponse: - -WorkflowAttributesUpdateResponse ------------------------------------------------------------------- - -Purposefully empty, may be populated in the future. - - - - - - - -.. - end messages - - -.. - end enums - - -.. - end HasExtensions - - -.. - end services - - - - -.. _ref_google/protobuf/duration.proto: - -google/protobuf/duration.proto -================================================================== - - - - - -.. _ref_google.protobuf.Duration: - -Duration ------------------------------------------------------------------- - -A Duration represents a signed, fixed-length span of time represented -as a count of seconds and fractions of seconds at nanosecond -resolution. It is independent of any calendar and concepts like "day" -or "month". It is related to Timestamp in that the difference between -two Timestamp values is a Duration and it can be added or subtracted -from a Timestamp. Range is approximately +-10,000 years. - -# Examples - -Example 1: Compute Duration from two Timestamps in pseudo code. - - Timestamp start = ...; - Timestamp end = ...; - Duration duration = ...; - - duration.seconds = end.seconds - start.seconds; - duration.nanos = end.nanos - start.nanos; - - if (duration.seconds < 0 && duration.nanos > 0) { - duration.seconds += 1; - duration.nanos -= 1000000000; - } else if (duration.seconds > 0 && duration.nanos < 0) { - duration.seconds -= 1; - duration.nanos += 1000000000; - } - -Example 2: Compute Timestamp from Timestamp + Duration in pseudo code. - - Timestamp start = ...; - Duration duration = ...; - Timestamp end = ...; - - end.seconds = start.seconds + duration.seconds; - end.nanos = start.nanos + duration.nanos; - - if (end.nanos < 0) { - end.seconds -= 1; - end.nanos += 1000000000; - } else if (end.nanos >= 1000000000) { - end.seconds += 1; - end.nanos -= 1000000000; - } - -Example 3: Compute Duration from datetime.timedelta in Python. - - td = datetime.timedelta(days=3, minutes=10) - duration = Duration() - duration.FromTimedelta(td) - -# JSON Mapping - -In JSON format, the Duration type is encoded as a string rather than an -object, where the string ends in the suffix "s" (indicating seconds) and -is preceded by the number of seconds, with nanoseconds expressed as -fractional seconds. For example, 3 seconds with 0 nanoseconds should be -encoded in JSON format as "3s", while 3 seconds and 1 nanosecond should -be expressed in JSON format as "3.000000001s", and 3 seconds and 1 -microsecond should be expressed in JSON format as "3.000001s". - - - -.. csv-table:: Duration type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "seconds", ":ref:`ref_int64`", "", "Signed seconds of the span of time. Must be from -315,576,000,000 to +315,576,000,000 inclusive. Note: these bounds are computed from: 60 sec/min * 60 min/hr * 24 hr/day * 365.25 days/year * 10000 years" - "nanos", ":ref:`ref_int32`", "", "Signed fractions of a second at nanosecond resolution of the span of time. Durations less than one second are represented with a 0 `seconds` field and a positive or negative `nanos` field. For durations of one second or more, a non-zero value for the `nanos` field must be of the same sign as the `seconds` field. Must be from -999,999,999 to +999,999,999 inclusive." - - - - - - -.. - end messages - - -.. - end enums - - -.. - end HasExtensions - - -.. - end services - - - - -.. _ref_google/protobuf/wrappers.proto: - -google/protobuf/wrappers.proto -================================================================== - - - - - -.. _ref_google.protobuf.BoolValue: - -BoolValue ------------------------------------------------------------------- - -Wrapper message for `bool`. - -The JSON representation for `BoolValue` is JSON `true` and `false`. - - - -.. csv-table:: BoolValue type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "value", ":ref:`ref_bool`", "", "The bool value." - - - - - - - -.. _ref_google.protobuf.BytesValue: - -BytesValue ------------------------------------------------------------------- - -Wrapper message for `bytes`. - -The JSON representation for `BytesValue` is JSON string. - - - -.. csv-table:: BytesValue type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "value", ":ref:`ref_bytes`", "", "The bytes value." - - - - - - - -.. _ref_google.protobuf.DoubleValue: - -DoubleValue ------------------------------------------------------------------- - -Wrapper message for `double`. - -The JSON representation for `DoubleValue` is JSON number. - - - -.. csv-table:: DoubleValue type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "value", ":ref:`ref_double`", "", "The double value." - - - - - - - -.. _ref_google.protobuf.FloatValue: - -FloatValue ------------------------------------------------------------------- - -Wrapper message for `float`. - -The JSON representation for `FloatValue` is JSON number. - - - -.. csv-table:: FloatValue type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "value", ":ref:`ref_float`", "", "The float value." - - - - - - - -.. _ref_google.protobuf.Int32Value: - -Int32Value ------------------------------------------------------------------- - -Wrapper message for `int32`. - -The JSON representation for `Int32Value` is JSON number. - - - -.. csv-table:: Int32Value type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "value", ":ref:`ref_int32`", "", "The int32 value." - - - - - - - -.. _ref_google.protobuf.Int64Value: - -Int64Value ------------------------------------------------------------------- - -Wrapper message for `int64`. - -The JSON representation for `Int64Value` is JSON string. - - - -.. csv-table:: Int64Value type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "value", ":ref:`ref_int64`", "", "The int64 value." - - - - - - - -.. _ref_google.protobuf.StringValue: - -StringValue ------------------------------------------------------------------- - -Wrapper message for `string`. - -The JSON representation for `StringValue` is JSON string. - - - -.. csv-table:: StringValue type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "value", ":ref:`ref_string`", "", "The string value." - - - - - - - -.. _ref_google.protobuf.UInt32Value: - -UInt32Value ------------------------------------------------------------------- - -Wrapper message for `uint32`. - -The JSON representation for `UInt32Value` is JSON number. - - - -.. csv-table:: UInt32Value type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "value", ":ref:`ref_uint32`", "", "The uint32 value." - - - - - - - -.. _ref_google.protobuf.UInt64Value: - -UInt64Value ------------------------------------------------------------------- - -Wrapper message for `uint64`. - -The JSON representation for `UInt64Value` is JSON string. - - - -.. csv-table:: UInt64Value type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "value", ":ref:`ref_uint64`", "", "The uint64 value." - - - - - - -.. - end messages - - -.. - end enums - - -.. - end HasExtensions - - -.. - end services - - diff --git a/docs/api/flyteidl/docs/admin/index.rst b/docs/api/flyteidl/docs/admin/index.rst deleted file mode 100644 index 6518e82dbb..0000000000 --- a/docs/api/flyteidl/docs/admin/index.rst +++ /dev/null @@ -1,13 +0,0 @@ -Flyte Admin Service entities -============================ - -These are the control plane entities that can be used to communicate with the -FlyteAdmin service over gRPC or REST. The endpoint specification is defined in the -`Admin raw protos `__ - -.. toctree:: - :maxdepth: 1 - :caption: admin - :name: admintoc - - admin diff --git a/docs/api/flyteidl/docs/contributing.md b/docs/api/flyteidl/docs/contributing.md deleted file mode 100644 index 67685f45b7..0000000000 --- a/docs/api/flyteidl/docs/contributing.md +++ /dev/null @@ -1,79 +0,0 @@ -# Flyteidl - -This is one of the core repositories of Flyte. It contains the Specification of the Flyte Language using protobuf messages, the Backend API specification in gRPC, and Swagger REST. The repo contains the generated clients and protocol message structures in multiple languages. Along with the generated code, the repository also contains the Golang clients for Flyte's backend APIs (the services grouped under FlyteAdmin). - - -[![Slack](https://img.shields.io/badge/slack-join_chat-white.svg?logo=slack&style=social)](https://slack.flyte.org) - -* [flyte.org](https://flyte.org) -* [Flyte Docs](http://docs.flyte.org) -* [Flyteidl API reference documentation](https://docs.flyte.org/en/latest/reference_flyteidl.html) - -## Contributing to Flyteidl - -## Tooling for Flyteidl - -1. Run ``make download_tooling`` to install generator dependencies. - -```bash - make download_tooling -``` - -2. Ensure Docker is installed locally. -3. Run ``make generate`` to generate all the code, mock client, and docs for FlyteAdmin Service. - -```bash - make generate -``` - -4. To add new dependencies for documentation generation, modify ``doc-requirements.in`` and run - -```bash - make doc-requirements.txt -``` - -## Docs structure - -The index.rst files for protos are arranged in parallel under the ``docs`` folder. -All the proto definitions are within ``protos/flyteidl`` and their corresponding docs are in ``protos/docs``. - -``` -docs -├── admin -│   ├── admin.rst -│   └── index.rst -├── core -│   ├── core.rst -│   └── index.rst -├── datacatalog -│   ├── datacatalog.rst -│   └── index.rst -├── event -│   ├── event.rst -│   └── index.rst -├── plugins -│   ├── index.rst -│   └── plugins.rst -├── service -│   ├── index.rst -│   └── service.rst -``` - -Each module in protos has a module in docs with the same name. -For example: ``protos/flyteidl/core`` has a module ``protos/docs/core`` under the ``docs`` folder which has the corresponding index and documentation files. - - -## Generating Documentation - -* If a new module is to be introduced, follow the structure for core files in `generate_protos.sh` file which helps generate the core documentation from its proto files. -``` - core_proto_files=`ls protos/flyteidl/core/*.proto |xargs` - # Remove any currently generated file - ls -d protos/docs/core/* | grep -v index.rst | xargs rm - protoc --doc_out=protos/docs/core --doc_opt=restructuredtext,core.rst -I=protos `echo $core_proto_files` -``` - -* ``make generate`` generates the modified rst files. - -* ``make html`` generates the Sphinx documentation from the docs folder that uses the modified rst files. - diff --git a/docs/api/flyteidl/docs/core/core.rst b/docs/api/flyteidl/docs/core/core.rst deleted file mode 100644 index dd3cf71341..0000000000 --- a/docs/api/flyteidl/docs/core/core.rst +++ /dev/null @@ -1,3952 +0,0 @@ -###################### -Protocol Documentation -###################### - - - - -.. _ref_flyteidl/core/catalog.proto: - -flyteidl/core/catalog.proto -================================================================== - - - - - -.. _ref_flyteidl.core.CatalogArtifactTag: - -CatalogArtifactTag ------------------------------------------------------------------- - - - - - -.. csv-table:: CatalogArtifactTag type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "artifact_id", ":ref:`ref_string`", "", "Artifact ID is generated name" - "name", ":ref:`ref_string`", "", "Flyte computes the tag automatically, as the hash of the values" - - - - - - - -.. _ref_flyteidl.core.CatalogMetadata: - -CatalogMetadata ------------------------------------------------------------------- - -Catalog artifact information with specific metadata - - - -.. csv-table:: CatalogMetadata type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "dataset_id", ":ref:`ref_flyteidl.core.Identifier`", "", "Dataset ID in the catalog" - "artifact_tag", ":ref:`ref_flyteidl.core.CatalogArtifactTag`", "", "Artifact tag in the catalog" - "source_task_execution", ":ref:`ref_flyteidl.core.TaskExecutionIdentifier`", "", "Today we only support TaskExecutionIdentifier as a source, as catalog caching only works for task executions" - - - - - - - -.. _ref_flyteidl.core.CatalogReservation: - -CatalogReservation ------------------------------------------------------------------- - - - - - - - - - -.. - end messages - - - -.. _ref_flyteidl.core.CatalogCacheStatus: - -CatalogCacheStatus ------------------------------------------------------------------- - -Indicates the status of CatalogCaching. The reason why this is not embedded in TaskNodeMetadata is, that we may use for other types of nodes as well in the future - -.. csv-table:: Enum CatalogCacheStatus values - :header: "Name", "Number", "Description" - :widths: auto - - "CACHE_DISABLED", "0", "Used to indicate that caching was disabled" - "CACHE_MISS", "1", "Used to indicate that the cache lookup resulted in no matches" - "CACHE_HIT", "2", "used to indicate that the associated artifact was a result of a previous execution" - "CACHE_POPULATED", "3", "used to indicate that the resultant artifact was added to the cache" - "CACHE_LOOKUP_FAILURE", "4", "Used to indicate that cache lookup failed because of an error" - "CACHE_PUT_FAILURE", "5", "Used to indicate that cache lookup failed because of an error" - "CACHE_SKIPPED", "6", "Used to indicate the cache lookup was skipped" - - - -.. _ref_flyteidl.core.CatalogReservation.Status: - -CatalogReservation.Status ------------------------------------------------------------------- - -Indicates the status of a catalog reservation operation. - -.. csv-table:: Enum CatalogReservation.Status values - :header: "Name", "Number", "Description" - :widths: auto - - "RESERVATION_DISABLED", "0", "Used to indicate that reservations are disabled" - "RESERVATION_ACQUIRED", "1", "Used to indicate that a reservation was successfully acquired or extended" - "RESERVATION_EXISTS", "2", "Used to indicate that an active reservation currently exists" - "RESERVATION_RELEASED", "3", "Used to indicate that the reservation has been successfully released" - "RESERVATION_FAILURE", "4", "Used to indicate that a reservation operation resulted in failure" - - -.. - end enums - - -.. - end HasExtensions - - -.. - end services - - - - -.. _ref_flyteidl/core/compiler.proto: - -flyteidl/core/compiler.proto -================================================================== - - - - - -.. _ref_flyteidl.core.CompiledTask: - -CompiledTask ------------------------------------------------------------------- - -Output of the Compilation step. This object represent one Task. We store more metadata at this layer - - - -.. csv-table:: CompiledTask type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "template", ":ref:`ref_flyteidl.core.TaskTemplate`", "", "Completely contained TaskTemplate" - - - - - - - -.. _ref_flyteidl.core.CompiledWorkflow: - -CompiledWorkflow ------------------------------------------------------------------- - -Output of the compilation Step. This object represents one workflow. We store more metadata at this layer - - - -.. csv-table:: CompiledWorkflow type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "template", ":ref:`ref_flyteidl.core.WorkflowTemplate`", "", "Completely contained Workflow Template" - "connections", ":ref:`ref_flyteidl.core.ConnectionSet`", "", "For internal use only! This field is used by the system and must not be filled in. Any values set will be ignored." - - - - - - - -.. _ref_flyteidl.core.CompiledWorkflowClosure: - -CompiledWorkflowClosure ------------------------------------------------------------------- - -A Compiled Workflow Closure contains all the information required to start a new execution, or to visualize a workflow -and its details. The CompiledWorkflowClosure should always contain a primary workflow, that is the main workflow that -will being the execution. All subworkflows are denormalized. WorkflowNodes refer to the workflow identifiers of -compiled subworkflows. - - - -.. csv-table:: CompiledWorkflowClosure type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "primary", ":ref:`ref_flyteidl.core.CompiledWorkflow`", "", "+required" - "sub_workflows", ":ref:`ref_flyteidl.core.CompiledWorkflow`", "repeated", "Guaranteed that there will only exist one and only one workflow with a given id, i.e., every sub workflow has a unique identifier. Also every enclosed subworkflow is used either by a primary workflow or by a subworkflow as an inlined workflow +optional" - "tasks", ":ref:`ref_flyteidl.core.CompiledTask`", "repeated", "Guaranteed that there will only exist one and only one task with a given id, i.e., every task has a unique id +required (at least 1)" - - - - - - - -.. _ref_flyteidl.core.ConnectionSet: - -ConnectionSet ------------------------------------------------------------------- - -Adjacency list for the workflow. This is created as part of the compilation process. Every process after the compilation -step uses this created ConnectionSet - - - -.. csv-table:: ConnectionSet type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "downstream", ":ref:`ref_flyteidl.core.ConnectionSet.DownstreamEntry`", "repeated", "A list of all the node ids that are downstream from a given node id" - "upstream", ":ref:`ref_flyteidl.core.ConnectionSet.UpstreamEntry`", "repeated", "A list of all the node ids, that are upstream of this node id" - - - - - - - -.. _ref_flyteidl.core.ConnectionSet.DownstreamEntry: - -ConnectionSet.DownstreamEntry ------------------------------------------------------------------- - - - - - -.. csv-table:: ConnectionSet.DownstreamEntry type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "key", ":ref:`ref_string`", "", "" - "value", ":ref:`ref_flyteidl.core.ConnectionSet.IdList`", "", "" - - - - - - - -.. _ref_flyteidl.core.ConnectionSet.IdList: - -ConnectionSet.IdList ------------------------------------------------------------------- - - - - - -.. csv-table:: ConnectionSet.IdList type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "ids", ":ref:`ref_string`", "repeated", "" - - - - - - - -.. _ref_flyteidl.core.ConnectionSet.UpstreamEntry: - -ConnectionSet.UpstreamEntry ------------------------------------------------------------------- - - - - - -.. csv-table:: ConnectionSet.UpstreamEntry type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "key", ":ref:`ref_string`", "", "" - "value", ":ref:`ref_flyteidl.core.ConnectionSet.IdList`", "", "" - - - - - - -.. - end messages - - -.. - end enums - - -.. - end HasExtensions - - -.. - end services - - - - -.. _ref_flyteidl/core/condition.proto: - -flyteidl/core/condition.proto -================================================================== - - - - - -.. _ref_flyteidl.core.BooleanExpression: - -BooleanExpression ------------------------------------------------------------------- - -Defines a boolean expression tree. It can be a simple or a conjunction expression. -Multiple expressions can be combined using a conjunction or a disjunction to result in a final boolean result. - - - -.. csv-table:: BooleanExpression type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "conjunction", ":ref:`ref_flyteidl.core.ConjunctionExpression`", "", "" - "comparison", ":ref:`ref_flyteidl.core.ComparisonExpression`", "", "" - - - - - - - -.. _ref_flyteidl.core.ComparisonExpression: - -ComparisonExpression ------------------------------------------------------------------- - -Defines a 2-level tree where the root is a comparison operator and Operands are primitives or known variables. -Each expression results in a boolean result. - - - -.. csv-table:: ComparisonExpression type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "operator", ":ref:`ref_flyteidl.core.ComparisonExpression.Operator`", "", "" - "left_value", ":ref:`ref_flyteidl.core.Operand`", "", "" - "right_value", ":ref:`ref_flyteidl.core.Operand`", "", "" - - - - - - - -.. _ref_flyteidl.core.ConjunctionExpression: - -ConjunctionExpression ------------------------------------------------------------------- - -Defines a conjunction expression of two boolean expressions. - - - -.. csv-table:: ConjunctionExpression type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "operator", ":ref:`ref_flyteidl.core.ConjunctionExpression.LogicalOperator`", "", "" - "left_expression", ":ref:`ref_flyteidl.core.BooleanExpression`", "", "" - "right_expression", ":ref:`ref_flyteidl.core.BooleanExpression`", "", "" - - - - - - - -.. _ref_flyteidl.core.Operand: - -Operand ------------------------------------------------------------------- - -Defines an operand to a comparison expression. - - - -.. csv-table:: Operand type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "primitive", ":ref:`ref_flyteidl.core.Primitive`", "", "Can be a constant" - "var", ":ref:`ref_string`", "", "Or one of this node's input variables" - - - - - - -.. - end messages - - - -.. _ref_flyteidl.core.ComparisonExpression.Operator: - -ComparisonExpression.Operator ------------------------------------------------------------------- - -Binary Operator for each expression - -.. csv-table:: Enum ComparisonExpression.Operator values - :header: "Name", "Number", "Description" - :widths: auto - - "EQ", "0", "" - "NEQ", "1", "" - "GT", "2", "Greater Than" - "GTE", "3", "" - "LT", "4", "Less Than" - "LTE", "5", "" - - - -.. _ref_flyteidl.core.ConjunctionExpression.LogicalOperator: - -ConjunctionExpression.LogicalOperator ------------------------------------------------------------------- - -Nested conditions. They can be conjoined using AND / OR -Order of evaluation is not important as the operators are Commutative - -.. csv-table:: Enum ConjunctionExpression.LogicalOperator values - :header: "Name", "Number", "Description" - :widths: auto - - "AND", "0", "Conjunction" - "OR", "1", "" - - -.. - end enums - - -.. - end HasExtensions - - -.. - end services - - - - -.. _ref_flyteidl/core/dynamic_job.proto: - -flyteidl/core/dynamic_job.proto -================================================================== - - - - - -.. _ref_flyteidl.core.DynamicJobSpec: - -DynamicJobSpec ------------------------------------------------------------------- - -Describes a set of tasks to execute and how the final outputs are produced. - - - -.. csv-table:: DynamicJobSpec type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "nodes", ":ref:`ref_flyteidl.core.Node`", "repeated", "A collection of nodes to execute." - "min_successes", ":ref:`ref_int64`", "", "An absolute number of successful completions of nodes required to mark this job as succeeded. As soon as this criteria is met, the dynamic job will be marked as successful and outputs will be computed. If this number becomes impossible to reach (e.g. number of currently running tasks + number of already succeeded tasks < min_successes) the task will be aborted immediately and marked as failed. The default value of this field, if not specified, is the count of nodes repeated field." - "outputs", ":ref:`ref_flyteidl.core.Binding`", "repeated", "Describes how to bind the final output of the dynamic job from the outputs of executed nodes. The referenced ids in bindings should have the generated id for the subtask." - "tasks", ":ref:`ref_flyteidl.core.TaskTemplate`", "repeated", "[Optional] A complete list of task specs referenced in nodes." - "subworkflows", ":ref:`ref_flyteidl.core.WorkflowTemplate`", "repeated", "[Optional] A complete list of task specs referenced in nodes." - - - - - - -.. - end messages - - -.. - end enums - - -.. - end HasExtensions - - -.. - end services - - - - -.. _ref_flyteidl/core/errors.proto: - -flyteidl/core/errors.proto -================================================================== - - - - - -.. _ref_flyteidl.core.ContainerError: - -ContainerError ------------------------------------------------------------------- - -Error message to propagate detailed errors from container executions to the execution -engine. - - - -.. csv-table:: ContainerError type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "code", ":ref:`ref_string`", "", "A simplified code for errors, so that we can provide a glossary of all possible errors." - "message", ":ref:`ref_string`", "", "A detailed error message." - "kind", ":ref:`ref_flyteidl.core.ContainerError.Kind`", "", "An abstract error kind for this error. Defaults to Non_Recoverable if not specified." - "origin", ":ref:`ref_flyteidl.core.ExecutionError.ErrorKind`", "", "Defines the origin of the error (system, user, unknown)." - - - - - - - -.. _ref_flyteidl.core.ErrorDocument: - -ErrorDocument ------------------------------------------------------------------- - -Defines the errors.pb file format the container can produce to communicate -failure reasons to the execution engine. - - - -.. csv-table:: ErrorDocument type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "error", ":ref:`ref_flyteidl.core.ContainerError`", "", "The error raised during execution." - - - - - - -.. - end messages - - - -.. _ref_flyteidl.core.ContainerError.Kind: - -ContainerError.Kind ------------------------------------------------------------------- - -Defines a generic error type that dictates the behavior of the retry strategy. - -.. csv-table:: Enum ContainerError.Kind values - :header: "Name", "Number", "Description" - :widths: auto - - "NON_RECOVERABLE", "0", "" - "RECOVERABLE", "1", "" - - -.. - end enums - - -.. - end HasExtensions - - -.. - end services - - - - -.. _ref_flyteidl/core/execution.proto: - -flyteidl/core/execution.proto -================================================================== - - - - - -.. _ref_flyteidl.core.ExecutionError: - -ExecutionError ------------------------------------------------------------------- - -Represents the error message from the execution. - - - -.. csv-table:: ExecutionError type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "code", ":ref:`ref_string`", "", "Error code indicates a grouping of a type of error. More Info: " - "message", ":ref:`ref_string`", "", "Detailed description of the error - including stack trace." - "error_uri", ":ref:`ref_string`", "", "Full error contents accessible via a URI" - "kind", ":ref:`ref_flyteidl.core.ExecutionError.ErrorKind`", "", "" - - - - - - - -.. _ref_flyteidl.core.NodeExecution: - -NodeExecution ------------------------------------------------------------------- - -Indicates various phases of Node Execution that only include the time spent to run the nodes/workflows - - - - - - - - -.. _ref_flyteidl.core.QualityOfService: - -QualityOfService ------------------------------------------------------------------- - -Indicates the priority of an execution. - - - -.. csv-table:: QualityOfService type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "tier", ":ref:`ref_flyteidl.core.QualityOfService.Tier`", "", "" - "spec", ":ref:`ref_flyteidl.core.QualityOfServiceSpec`", "", "" - - - - - - - -.. _ref_flyteidl.core.QualityOfServiceSpec: - -QualityOfServiceSpec ------------------------------------------------------------------- - -Represents customized execution run-time attributes. - - - -.. csv-table:: QualityOfServiceSpec type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "queueing_budget", ":ref:`ref_google.protobuf.Duration`", "", "Indicates how much queueing delay an execution can tolerate." - - - - - - - -.. _ref_flyteidl.core.TaskExecution: - -TaskExecution ------------------------------------------------------------------- - -Phases that task plugins can go through. Not all phases may be applicable to a specific plugin task, -but this is the cumulative list that customers may want to know about for their task. - - - - - - - - -.. _ref_flyteidl.core.TaskLog: - -TaskLog ------------------------------------------------------------------- - -Log information for the task that is specific to a log sink -When our log story is flushed out, we may have more metadata here like log link expiry - - - -.. csv-table:: TaskLog type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "uri", ":ref:`ref_string`", "", "" - "name", ":ref:`ref_string`", "", "" - "message_format", ":ref:`ref_flyteidl.core.TaskLog.MessageFormat`", "", "" - "ttl", ":ref:`ref_google.protobuf.Duration`", "", "" - - - - - - - -.. _ref_flyteidl.core.WorkflowExecution: - -WorkflowExecution ------------------------------------------------------------------- - -Indicates various phases of Workflow Execution - - - - - - - -.. - end messages - - - -.. _ref_flyteidl.core.ExecutionError.ErrorKind: - -ExecutionError.ErrorKind ------------------------------------------------------------------- - -Error type: System or User - -.. csv-table:: Enum ExecutionError.ErrorKind values - :header: "Name", "Number", "Description" - :widths: auto - - "UNKNOWN", "0", "" - "USER", "1", "" - "SYSTEM", "2", "" - - - -.. _ref_flyteidl.core.NodeExecution.Phase: - -NodeExecution.Phase ------------------------------------------------------------------- - - - -.. csv-table:: Enum NodeExecution.Phase values - :header: "Name", "Number", "Description" - :widths: auto - - "UNDEFINED", "0", "" - "QUEUED", "1", "" - "RUNNING", "2", "" - "SUCCEEDED", "3", "" - "FAILING", "4", "" - "FAILED", "5", "" - "ABORTED", "6", "" - "SKIPPED", "7", "" - "TIMED_OUT", "8", "" - "DYNAMIC_RUNNING", "9", "" - "RECOVERED", "10", "" - - - -.. _ref_flyteidl.core.QualityOfService.Tier: - -QualityOfService.Tier ------------------------------------------------------------------- - - - -.. csv-table:: Enum QualityOfService.Tier values - :header: "Name", "Number", "Description" - :widths: auto - - "UNDEFINED", "0", "Default: no quality of service specified." - "HIGH", "1", "" - "MEDIUM", "2", "" - "LOW", "3", "" - - - -.. _ref_flyteidl.core.TaskExecution.Phase: - -TaskExecution.Phase ------------------------------------------------------------------- - - - -.. csv-table:: Enum TaskExecution.Phase values - :header: "Name", "Number", "Description" - :widths: auto - - "UNDEFINED", "0", "" - "QUEUED", "1", "" - "RUNNING", "2", "" - "SUCCEEDED", "3", "" - "ABORTED", "4", "" - "FAILED", "5", "" - "INITIALIZING", "6", "To indicate cases where task is initializing, like: ErrImagePull, ContainerCreating, PodInitializing" - "WAITING_FOR_RESOURCES", "7", "To address cases, where underlying resource is not available: Backoff error, Resource quota exceeded" - - - -.. _ref_flyteidl.core.TaskLog.MessageFormat: - -TaskLog.MessageFormat ------------------------------------------------------------------- - - - -.. csv-table:: Enum TaskLog.MessageFormat values - :header: "Name", "Number", "Description" - :widths: auto - - "UNKNOWN", "0", "" - "CSV", "1", "" - "JSON", "2", "" - - - -.. _ref_flyteidl.core.WorkflowExecution.Phase: - -WorkflowExecution.Phase ------------------------------------------------------------------- - - - -.. csv-table:: Enum WorkflowExecution.Phase values - :header: "Name", "Number", "Description" - :widths: auto - - "UNDEFINED", "0", "" - "QUEUED", "1", "" - "RUNNING", "2", "" - "SUCCEEDING", "3", "" - "SUCCEEDED", "4", "" - "FAILING", "5", "" - "FAILED", "6", "" - "ABORTED", "7", "" - "TIMED_OUT", "8", "" - "ABORTING", "9", "" - - -.. - end enums - - -.. - end HasExtensions - - -.. - end services - - - - -.. _ref_flyteidl/core/identifier.proto: - -flyteidl/core/identifier.proto -================================================================== - - - - - -.. _ref_flyteidl.core.Identifier: - -Identifier ------------------------------------------------------------------- - -Encapsulation of fields that uniquely identifies a Flyte resource. - - - -.. csv-table:: Identifier type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "resource_type", ":ref:`ref_flyteidl.core.ResourceType`", "", "Identifies the specific type of resource that this identifier corresponds to." - "project", ":ref:`ref_string`", "", "Name of the project the resource belongs to." - "domain", ":ref:`ref_string`", "", "Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project." - "name", ":ref:`ref_string`", "", "User provided value for the resource." - "version", ":ref:`ref_string`", "", "Specific version of the resource." - - - - - - - -.. _ref_flyteidl.core.NodeExecutionIdentifier: - -NodeExecutionIdentifier ------------------------------------------------------------------- - -Encapsulation of fields that identify a Flyte node execution entity. - - - -.. csv-table:: NodeExecutionIdentifier type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "node_id", ":ref:`ref_string`", "", "" - "execution_id", ":ref:`ref_flyteidl.core.WorkflowExecutionIdentifier`", "", "" - - - - - - - -.. _ref_flyteidl.core.SignalIdentifier: - -SignalIdentifier ------------------------------------------------------------------- - -Encapsulation of fields the uniquely identify a signal. - - - -.. csv-table:: SignalIdentifier type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "signal_id", ":ref:`ref_string`", "", "Unique identifier for a signal." - "execution_id", ":ref:`ref_flyteidl.core.WorkflowExecutionIdentifier`", "", "Identifies the Flyte workflow execution this signal belongs to." - - - - - - - -.. _ref_flyteidl.core.TaskExecutionIdentifier: - -TaskExecutionIdentifier ------------------------------------------------------------------- - -Encapsulation of fields that identify a Flyte task execution entity. - - - -.. csv-table:: TaskExecutionIdentifier type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "task_id", ":ref:`ref_flyteidl.core.Identifier`", "", "" - "node_execution_id", ":ref:`ref_flyteidl.core.NodeExecutionIdentifier`", "", "" - "retry_attempt", ":ref:`ref_uint32`", "", "" - - - - - - - -.. _ref_flyteidl.core.WorkflowExecutionIdentifier: - -WorkflowExecutionIdentifier ------------------------------------------------------------------- - -Encapsulation of fields that uniquely identifies a Flyte workflow execution - - - -.. csv-table:: WorkflowExecutionIdentifier type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "project", ":ref:`ref_string`", "", "Name of the project the resource belongs to." - "domain", ":ref:`ref_string`", "", "Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project." - "name", ":ref:`ref_string`", "", "User or system provided value for the resource." - - - - - - -.. - end messages - - - -.. _ref_flyteidl.core.ResourceType: - -ResourceType ------------------------------------------------------------------- - -Indicates a resource type within Flyte. - -.. csv-table:: Enum ResourceType values - :header: "Name", "Number", "Description" - :widths: auto - - "UNSPECIFIED", "0", "" - "TASK", "1", "" - "WORKFLOW", "2", "" - "LAUNCH_PLAN", "3", "" - "DATASET", "4", "A dataset represents an entity modeled in Flyte DataCatalog. A Dataset is also a versioned entity and can be a compilation of multiple individual objects. Eventually all Catalog objects should be modeled similar to Flyte Objects. The Dataset entities makes it possible for the UI and CLI to act on the objects in a similar manner to other Flyte objects" - - -.. - end enums - - -.. - end HasExtensions - - -.. - end services - - - - -.. _ref_flyteidl/core/interface.proto: - -flyteidl/core/interface.proto -================================================================== - - - - - -.. _ref_flyteidl.core.Parameter: - -Parameter ------------------------------------------------------------------- - -A parameter is used as input to a launch plan and has -the special ability to have a default value or mark itself as required. - - - -.. csv-table:: Parameter type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "var", ":ref:`ref_flyteidl.core.Variable`", "", "+required Variable. Defines the type of the variable backing this parameter." - "default", ":ref:`ref_flyteidl.core.Literal`", "", "Defines a default value that has to match the variable type defined." - "required", ":ref:`ref_bool`", "", "+optional, is this value required to be filled." - - - - - - - -.. _ref_flyteidl.core.ParameterMap: - -ParameterMap ------------------------------------------------------------------- - -A map of Parameters. - - - -.. csv-table:: ParameterMap type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "parameters", ":ref:`ref_flyteidl.core.ParameterMap.ParametersEntry`", "repeated", "Defines a map of parameter names to parameters." - - - - - - - -.. _ref_flyteidl.core.ParameterMap.ParametersEntry: - -ParameterMap.ParametersEntry ------------------------------------------------------------------- - - - - - -.. csv-table:: ParameterMap.ParametersEntry type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "key", ":ref:`ref_string`", "", "" - "value", ":ref:`ref_flyteidl.core.Parameter`", "", "" - - - - - - - -.. _ref_flyteidl.core.TypedInterface: - -TypedInterface ------------------------------------------------------------------- - -Defines strongly typed inputs and outputs. - - - -.. csv-table:: TypedInterface type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "inputs", ":ref:`ref_flyteidl.core.VariableMap`", "", "" - "outputs", ":ref:`ref_flyteidl.core.VariableMap`", "", "" - - - - - - - -.. _ref_flyteidl.core.Variable: - -Variable ------------------------------------------------------------------- - -Defines a strongly typed variable. - - - -.. csv-table:: Variable type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "type", ":ref:`ref_flyteidl.core.LiteralType`", "", "Variable literal type." - "description", ":ref:`ref_string`", "", "+optional string describing input variable" - - - - - - - -.. _ref_flyteidl.core.VariableMap: - -VariableMap ------------------------------------------------------------------- - -A map of Variables - - - -.. csv-table:: VariableMap type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "variables", ":ref:`ref_flyteidl.core.VariableMap.VariablesEntry`", "repeated", "Defines a map of variable names to variables." - - - - - - - -.. _ref_flyteidl.core.VariableMap.VariablesEntry: - -VariableMap.VariablesEntry ------------------------------------------------------------------- - - - - - -.. csv-table:: VariableMap.VariablesEntry type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "key", ":ref:`ref_string`", "", "" - "value", ":ref:`ref_flyteidl.core.Variable`", "", "" - - - - - - -.. - end messages - - -.. - end enums - - -.. - end HasExtensions - - -.. - end services - - - - -.. _ref_flyteidl/core/literals.proto: - -flyteidl/core/literals.proto -================================================================== - - - - - -.. _ref_flyteidl.core.Binary: - -Binary ------------------------------------------------------------------- - -A simple byte array with a tag to help different parts of the system communicate about what is in the byte array. -It's strongly advisable that consumers of this type define a unique tag and validate the tag before parsing the data. - - - -.. csv-table:: Binary type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "value", ":ref:`ref_bytes`", "", "" - "tag", ":ref:`ref_string`", "", "" - - - - - - - -.. _ref_flyteidl.core.Binding: - -Binding ------------------------------------------------------------------- - -An input/output binding of a variable to either static value or a node output. - - - -.. csv-table:: Binding type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "var", ":ref:`ref_string`", "", "Variable name must match an input/output variable of the node." - "binding", ":ref:`ref_flyteidl.core.BindingData`", "", "Data to use to bind this variable." - - - - - - - -.. _ref_flyteidl.core.BindingData: - -BindingData ------------------------------------------------------------------- - -Specifies either a simple value or a reference to another output. - - - -.. csv-table:: BindingData type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "scalar", ":ref:`ref_flyteidl.core.Scalar`", "", "A simple scalar value." - "collection", ":ref:`ref_flyteidl.core.BindingDataCollection`", "", "A collection of binding data. This allows nesting of binding data to any number of levels." - "promise", ":ref:`ref_flyteidl.core.OutputReference`", "", "References an output promised by another node." - "map", ":ref:`ref_flyteidl.core.BindingDataMap`", "", "A map of bindings. The key is always a string." - "union", ":ref:`ref_flyteidl.core.UnionInfo`", "", "" - - - - - - - -.. _ref_flyteidl.core.BindingDataCollection: - -BindingDataCollection ------------------------------------------------------------------- - -A collection of BindingData items. - - - -.. csv-table:: BindingDataCollection type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "bindings", ":ref:`ref_flyteidl.core.BindingData`", "repeated", "" - - - - - - - -.. _ref_flyteidl.core.BindingDataMap: - -BindingDataMap ------------------------------------------------------------------- - -A map of BindingData items. - - - -.. csv-table:: BindingDataMap type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "bindings", ":ref:`ref_flyteidl.core.BindingDataMap.BindingsEntry`", "repeated", "" - - - - - - - -.. _ref_flyteidl.core.BindingDataMap.BindingsEntry: - -BindingDataMap.BindingsEntry ------------------------------------------------------------------- - - - - - -.. csv-table:: BindingDataMap.BindingsEntry type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "key", ":ref:`ref_string`", "", "" - "value", ":ref:`ref_flyteidl.core.BindingData`", "", "" - - - - - - - -.. _ref_flyteidl.core.Blob: - -Blob ------------------------------------------------------------------- - -Refers to an offloaded set of files. It encapsulates the type of the store and a unique uri for where the data is. -There are no restrictions on how the uri is formatted since it will depend on how to interact with the store. - - - -.. csv-table:: Blob type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "metadata", ":ref:`ref_flyteidl.core.BlobMetadata`", "", "" - "uri", ":ref:`ref_string`", "", "" - - - - - - - -.. _ref_flyteidl.core.BlobMetadata: - -BlobMetadata ------------------------------------------------------------------- - - - - - -.. csv-table:: BlobMetadata type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "type", ":ref:`ref_flyteidl.core.BlobType`", "", "" - - - - - - - -.. _ref_flyteidl.core.KeyValuePair: - -KeyValuePair ------------------------------------------------------------------- - -A generic key value pair. - - - -.. csv-table:: KeyValuePair type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "key", ":ref:`ref_string`", "", "required." - "value", ":ref:`ref_string`", "", "+optional." - - - - - - - -.. _ref_flyteidl.core.Literal: - -Literal ------------------------------------------------------------------- - -A simple value. This supports any level of nesting (e.g. array of array of array of Blobs) as well as simple primitives. - - - -.. csv-table:: Literal type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "scalar", ":ref:`ref_flyteidl.core.Scalar`", "", "A simple value." - "collection", ":ref:`ref_flyteidl.core.LiteralCollection`", "", "A collection of literals to allow nesting." - "map", ":ref:`ref_flyteidl.core.LiteralMap`", "", "A map of strings to literals." - "hash", ":ref:`ref_string`", "", "A hash representing this literal. This is used for caching purposes. For more details refer to RFC 1893 (https://github.com/flyteorg/flyte/blob/master/rfc/system/1893-caching-of-offloaded-objects.md)" - - - - - - - -.. _ref_flyteidl.core.LiteralCollection: - -LiteralCollection ------------------------------------------------------------------- - -A collection of literals. This is a workaround since oneofs in proto messages cannot contain a repeated field. - - - -.. csv-table:: LiteralCollection type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "literals", ":ref:`ref_flyteidl.core.Literal`", "repeated", "" - - - - - - - -.. _ref_flyteidl.core.LiteralMap: - -LiteralMap ------------------------------------------------------------------- - -A map of literals. This is a workaround since oneofs in proto messages cannot contain a repeated field. - - - -.. csv-table:: LiteralMap type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "literals", ":ref:`ref_flyteidl.core.LiteralMap.LiteralsEntry`", "repeated", "" - - - - - - - -.. _ref_flyteidl.core.LiteralMap.LiteralsEntry: - -LiteralMap.LiteralsEntry ------------------------------------------------------------------- - - - - - -.. csv-table:: LiteralMap.LiteralsEntry type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "key", ":ref:`ref_string`", "", "" - "value", ":ref:`ref_flyteidl.core.Literal`", "", "" - - - - - - - -.. _ref_flyteidl.core.Primitive: - -Primitive ------------------------------------------------------------------- - -Primitive Types - - - -.. csv-table:: Primitive type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "integer", ":ref:`ref_int64`", "", "" - "float_value", ":ref:`ref_double`", "", "" - "string_value", ":ref:`ref_string`", "", "" - "boolean", ":ref:`ref_bool`", "", "" - "datetime", ":ref:`ref_google.protobuf.Timestamp`", "", "" - "duration", ":ref:`ref_google.protobuf.Duration`", "", "" - - - - - - - -.. _ref_flyteidl.core.RetryStrategy: - -RetryStrategy ------------------------------------------------------------------- - -Retry strategy associated with an executable unit. - - - -.. csv-table:: RetryStrategy type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "retries", ":ref:`ref_uint32`", "", "Number of retries. Retries will be consumed when the job fails with a recoverable error. The number of retries must be less than or equals to 10." - - - - - - - -.. _ref_flyteidl.core.Scalar: - -Scalar ------------------------------------------------------------------- - - - - - -.. csv-table:: Scalar type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "primitive", ":ref:`ref_flyteidl.core.Primitive`", "", "" - "blob", ":ref:`ref_flyteidl.core.Blob`", "", "" - "binary", ":ref:`ref_flyteidl.core.Binary`", "", "" - "schema", ":ref:`ref_flyteidl.core.Schema`", "", "" - "none_type", ":ref:`ref_flyteidl.core.Void`", "", "" - "error", ":ref:`ref_flyteidl.core.Error`", "", "" - "generic", ":ref:`ref_google.protobuf.Struct`", "", "" - "structured_dataset", ":ref:`ref_flyteidl.core.StructuredDataset`", "", "" - "union", ":ref:`ref_flyteidl.core.Union`", "", "" - - - - - - - -.. _ref_flyteidl.core.Schema: - -Schema ------------------------------------------------------------------- - -A strongly typed schema that defines the interface of data retrieved from the underlying storage medium. - - - -.. csv-table:: Schema type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "uri", ":ref:`ref_string`", "", "" - "type", ":ref:`ref_flyteidl.core.SchemaType`", "", "" - - - - - - - -.. _ref_flyteidl.core.StructuredDataset: - -StructuredDataset ------------------------------------------------------------------- - - - - - -.. csv-table:: StructuredDataset type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "uri", ":ref:`ref_string`", "", "String location uniquely identifying where the data is. Should start with the storage location (e.g. s3://, gs://, bq://, etc.)" - "metadata", ":ref:`ref_flyteidl.core.StructuredDatasetMetadata`", "", "" - - - - - - - -.. _ref_flyteidl.core.StructuredDatasetMetadata: - -StructuredDatasetMetadata ------------------------------------------------------------------- - - - - - -.. csv-table:: StructuredDatasetMetadata type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "structured_dataset_type", ":ref:`ref_flyteidl.core.StructuredDatasetType`", "", "Bundle the type information along with the literal. This is here because StructuredDatasets can often be more defined at run time than at compile time. That is, at compile time you might only declare a task to return a pandas dataframe or a StructuredDataset, without any column information, but at run time, you might have that column information. flytekit python will copy this type information into the literal, from the type information, if not provided by the various plugins (encoders). Since this field is run time generated, it's not used for any type checking." - - - - - - - -.. _ref_flyteidl.core.Union: - -Union ------------------------------------------------------------------- - -The runtime representation of a tagged union value. See `UnionType` for more details. - - - -.. csv-table:: Union type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "value", ":ref:`ref_flyteidl.core.Literal`", "", "" - "type", ":ref:`ref_flyteidl.core.LiteralType`", "", "" - - - - - - - -.. _ref_flyteidl.core.UnionInfo: - -UnionInfo ------------------------------------------------------------------- - - - - - -.. csv-table:: UnionInfo type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "targetType", ":ref:`ref_flyteidl.core.LiteralType`", "", "" - - - - - - - -.. _ref_flyteidl.core.Void: - -Void ------------------------------------------------------------------- - -Used to denote a nil/null/None assignment to a scalar value. The underlying LiteralType for Void is intentionally -undefined since it can be assigned to a scalar of any LiteralType. - - - - - - - -.. - end messages - - -.. - end enums - - -.. - end HasExtensions - - -.. - end services - - - - -.. _ref_flyteidl/core/security.proto: - -flyteidl/core/security.proto -================================================================== - - - - - -.. _ref_flyteidl.core.Identity: - -Identity ------------------------------------------------------------------- - -Identity encapsulates the various security identities a task can run as. It's up to the underlying plugin to pick the -right identity for the execution environment. - - - -.. csv-table:: Identity type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "iam_role", ":ref:`ref_string`", "", "iam_role references the fully qualified name of Identity & Access Management role to impersonate." - "k8s_service_account", ":ref:`ref_string`", "", "k8s_service_account references a kubernetes service account to impersonate." - "oauth2_client", ":ref:`ref_flyteidl.core.OAuth2Client`", "", "oauth2_client references an oauth2 client. Backend plugins can use this information to impersonate the client when making external calls." - - - - - - - -.. _ref_flyteidl.core.OAuth2Client: - -OAuth2Client ------------------------------------------------------------------- - -OAuth2Client encapsulates OAuth2 Client Credentials to be used when making calls on behalf of that task. - - - -.. csv-table:: OAuth2Client type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "client_id", ":ref:`ref_string`", "", "client_id is the public id for the client to use. The system will not perform any pre-auth validation that the secret requested matches the client_id indicated here. +required" - "client_secret", ":ref:`ref_flyteidl.core.Secret`", "", "client_secret is a reference to the secret used to authenticate the OAuth2 client. +required" - - - - - - - -.. _ref_flyteidl.core.OAuth2TokenRequest: - -OAuth2TokenRequest ------------------------------------------------------------------- - -OAuth2TokenRequest encapsulates information needed to request an OAuth2 token. -FLYTE_TOKENS_ENV_PREFIX will be passed to indicate the prefix of the environment variables that will be present if -tokens are passed through environment variables. -FLYTE_TOKENS_PATH_PREFIX will be passed to indicate the prefix of the path where secrets will be mounted if tokens -are passed through file mounts. - - - -.. csv-table:: OAuth2TokenRequest type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "name", ":ref:`ref_string`", "", "name indicates a unique id for the token request within this task token requests. It'll be used as a suffix for environment variables and as a filename for mounting tokens as files. +required" - "type", ":ref:`ref_flyteidl.core.OAuth2TokenRequest.Type`", "", "type indicates the type of the request to make. Defaults to CLIENT_CREDENTIALS. +required" - "client", ":ref:`ref_flyteidl.core.OAuth2Client`", "", "client references the client_id/secret to use to request the OAuth2 token. +required" - "idp_discovery_endpoint", ":ref:`ref_string`", "", "idp_discovery_endpoint references the discovery endpoint used to retrieve token endpoint and other related information. +optional" - "token_endpoint", ":ref:`ref_string`", "", "token_endpoint references the token issuance endpoint. If idp_discovery_endpoint is not provided, this parameter is mandatory. +optional" - - - - - - - -.. _ref_flyteidl.core.Secret: - -Secret ------------------------------------------------------------------- - -Secret encapsulates information about the secret a task needs to proceed. An environment variable -FLYTE_SECRETS_ENV_PREFIX will be passed to indicate the prefix of the environment variables that will be present if -secrets are passed through environment variables. -FLYTE_SECRETS_DEFAULT_DIR will be passed to indicate the prefix of the path where secrets will be mounted if secrets -are passed through file mounts. - - - -.. csv-table:: Secret type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "group", ":ref:`ref_string`", "", "The name of the secret group where to find the key referenced below. For K8s secrets, this should be the name of the v1/secret object. For Confidant, this should be the Credential name. For Vault, this should be the secret name. For AWS Secret Manager, this should be the name of the secret. +required" - "group_version", ":ref:`ref_string`", "", "The group version to fetch. This is not supported in all secret management systems. It'll be ignored for the ones that do not support it. +optional" - "key", ":ref:`ref_string`", "", "The name of the secret to mount. This has to match an existing secret in the system. It's up to the implementation of the secret management system to require case sensitivity. For K8s secrets, Confidant and Vault, this should match one of the keys inside the secret. For AWS Secret Manager, it's ignored. +optional" - "mount_requirement", ":ref:`ref_flyteidl.core.Secret.MountType`", "", "mount_requirement is optional. Indicates where the secret has to be mounted. If provided, the execution will fail if the underlying key management system cannot satisfy that requirement. If not provided, the default location will depend on the key management system. +optional" - - - - - - - -.. _ref_flyteidl.core.SecurityContext: - -SecurityContext ------------------------------------------------------------------- - -SecurityContext holds security attributes that apply to tasks. - - - -.. csv-table:: SecurityContext type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "run_as", ":ref:`ref_flyteidl.core.Identity`", "", "run_as encapsulates the identity a pod should run as. If the task fills in multiple fields here, it'll be up to the backend plugin to choose the appropriate identity for the execution engine the task will run on." - "secrets", ":ref:`ref_flyteidl.core.Secret`", "repeated", "secrets indicate the list of secrets the task needs in order to proceed. Secrets will be mounted/passed to the pod as it starts. If the plugin responsible for kicking of the task will not run it on a flyte cluster (e.g. AWS Batch), it's the responsibility of the plugin to fetch the secret (which means propeller identity will need access to the secret) and to pass it to the remote execution engine." - "tokens", ":ref:`ref_flyteidl.core.OAuth2TokenRequest`", "repeated", "tokens indicate the list of token requests the task needs in order to proceed. Tokens will be mounted/passed to the pod as it starts. If the plugin responsible for kicking of the task will not run it on a flyte cluster (e.g. AWS Batch), it's the responsibility of the plugin to fetch the secret (which means propeller identity will need access to the secret) and to pass it to the remote execution engine." - - - - - - -.. - end messages - - - -.. _ref_flyteidl.core.OAuth2TokenRequest.Type: - -OAuth2TokenRequest.Type ------------------------------------------------------------------- - -Type of the token requested. - -.. csv-table:: Enum OAuth2TokenRequest.Type values - :header: "Name", "Number", "Description" - :widths: auto - - "CLIENT_CREDENTIALS", "0", "CLIENT_CREDENTIALS indicates a 2-legged OAuth token requested using client credentials." - - - -.. _ref_flyteidl.core.Secret.MountType: - -Secret.MountType ------------------------------------------------------------------- - - - -.. csv-table:: Enum Secret.MountType values - :header: "Name", "Number", "Description" - :widths: auto - - "ANY", "0", "Default case, indicates the client can tolerate either mounting options." - "ENV_VAR", "1", "ENV_VAR indicates the secret needs to be mounted as an environment variable." - "FILE", "2", "FILE indicates the secret needs to be mounted as a file." - - -.. - end enums - - -.. - end HasExtensions - - -.. - end services - - - - -.. _ref_flyteidl/core/tasks.proto: - -flyteidl/core/tasks.proto -================================================================== - - - - - -.. _ref_flyteidl.core.Container: - -Container ------------------------------------------------------------------- - - - - - -.. csv-table:: Container type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "image", ":ref:`ref_string`", "", "Container image url. Eg: docker/redis:latest" - "command", ":ref:`ref_string`", "repeated", "Command to be executed, if not provided, the default entrypoint in the container image will be used." - "args", ":ref:`ref_string`", "repeated", "These will default to Flyte given paths. If provided, the system will not append known paths. If the task still needs flyte's inputs and outputs path, add $(FLYTE_INPUT_FILE), $(FLYTE_OUTPUT_FILE) wherever makes sense and the system will populate these before executing the container." - "resources", ":ref:`ref_flyteidl.core.Resources`", "", "Container resources requirement as specified by the container engine." - "env", ":ref:`ref_flyteidl.core.KeyValuePair`", "repeated", "Environment variables will be set as the container is starting up." - "config", ":ref:`ref_flyteidl.core.KeyValuePair`", "repeated", "**Deprecated.** Allows extra configs to be available for the container. TODO: elaborate on how configs will become available. Deprecated, please use TaskTemplate.config instead." - "ports", ":ref:`ref_flyteidl.core.ContainerPort`", "repeated", "Ports to open in the container. This feature is not supported by all execution engines. (e.g. supported on K8s but not supported on AWS Batch) Only K8s" - "data_config", ":ref:`ref_flyteidl.core.DataLoadingConfig`", "", "BETA: Optional configuration for DataLoading. If not specified, then default values are used. This makes it possible to to run a completely portable container, that uses inputs and outputs only from the local file-system and without having any reference to flyteidl. This is supported only on K8s at the moment. If data loading is enabled, then data will be mounted in accompanying directories specified in the DataLoadingConfig. If the directories are not specified, inputs will be mounted onto and outputs will be uploaded from a pre-determined file-system path. Refer to the documentation to understand the default paths. Only K8s" - "architecture", ":ref:`ref_flyteidl.core.Container.Architecture`", "", "" - - - - - - - -.. _ref_flyteidl.core.ContainerPort: - -ContainerPort ------------------------------------------------------------------- - -Defines port properties for a container. - - - -.. csv-table:: ContainerPort type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "container_port", ":ref:`ref_uint32`", "", "Number of port to expose on the pod's IP address. This must be a valid port number, 0 < x < 65536." - - - - - - - -.. _ref_flyteidl.core.DataLoadingConfig: - -DataLoadingConfig ------------------------------------------------------------------- - -This configuration allows executing raw containers in Flyte using the Flyte CoPilot system. -Flyte CoPilot, eliminates the needs of flytekit or sdk inside the container. Any inputs required by the users container are side-loaded in the input_path -Any outputs generated by the user container - within output_path are automatically uploaded. - - - -.. csv-table:: DataLoadingConfig type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "enabled", ":ref:`ref_bool`", "", "Flag enables DataLoading Config. If this is not set, data loading will not be used!" - "input_path", ":ref:`ref_string`", "", "File system path (start at root). This folder will contain all the inputs exploded to a separate file. Example, if the input interface needs (x: int, y: blob, z: multipart_blob) and the input path is '/var/flyte/inputs', then the file system will look like /var/flyte/inputs/inputs. .pb .json .yaml> -> Format as defined previously. The Blob and Multipart blob will reference local filesystem instead of remote locations /var/flyte/inputs/x -> X is a file that contains the value of x (integer) in string format /var/flyte/inputs/y -> Y is a file in Binary format /var/flyte/inputs/z/... -> Note Z itself is a directory More information about the protocol - refer to docs #TODO reference docs here" - "output_path", ":ref:`ref_string`", "", "File system path (start at root). This folder should contain all the outputs for the task as individual files and/or an error text file" - "format", ":ref:`ref_flyteidl.core.DataLoadingConfig.LiteralMapFormat`", "", "In the inputs folder, there will be an additional summary/metadata file that contains references to all files or inlined primitive values. This format decides the actual encoding for the data. Refer to the encoding to understand the specifics of the contents and the encoding" - "io_strategy", ":ref:`ref_flyteidl.core.IOStrategy`", "", "" - - - - - - - -.. _ref_flyteidl.core.IOStrategy: - -IOStrategy ------------------------------------------------------------------- - -Strategy to use when dealing with Blob, Schema, or multipart blob data (large datasets) - - - -.. csv-table:: IOStrategy type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "download_mode", ":ref:`ref_flyteidl.core.IOStrategy.DownloadMode`", "", "Mode to use to manage downloads" - "upload_mode", ":ref:`ref_flyteidl.core.IOStrategy.UploadMode`", "", "Mode to use to manage uploads" - - - - - - - -.. _ref_flyteidl.core.K8sObjectMetadata: - -K8sObjectMetadata ------------------------------------------------------------------- - -Metadata for building a kubernetes object when a task is executed. - - - -.. csv-table:: K8sObjectMetadata type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "labels", ":ref:`ref_flyteidl.core.K8sObjectMetadata.LabelsEntry`", "repeated", "Optional labels to add to the pod definition." - "annotations", ":ref:`ref_flyteidl.core.K8sObjectMetadata.AnnotationsEntry`", "repeated", "Optional annotations to add to the pod definition." - - - - - - - -.. _ref_flyteidl.core.K8sObjectMetadata.AnnotationsEntry: - -K8sObjectMetadata.AnnotationsEntry ------------------------------------------------------------------- - - - - - -.. csv-table:: K8sObjectMetadata.AnnotationsEntry type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "key", ":ref:`ref_string`", "", "" - "value", ":ref:`ref_string`", "", "" - - - - - - - -.. _ref_flyteidl.core.K8sObjectMetadata.LabelsEntry: - -K8sObjectMetadata.LabelsEntry ------------------------------------------------------------------- - - - - - -.. csv-table:: K8sObjectMetadata.LabelsEntry type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "key", ":ref:`ref_string`", "", "" - "value", ":ref:`ref_string`", "", "" - - - - - - - -.. _ref_flyteidl.core.K8sPod: - -K8sPod ------------------------------------------------------------------- - -Defines a pod spec and additional pod metadata that is created when a task is executed. - - - -.. csv-table:: K8sPod type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "metadata", ":ref:`ref_flyteidl.core.K8sObjectMetadata`", "", "Contains additional metadata for building a kubernetes pod." - "pod_spec", ":ref:`ref_google.protobuf.Struct`", "", "Defines the primary pod spec created when a task is executed. This should be a JSON-marshalled pod spec, which can be defined in - go, using: https://github.com/kubernetes/api/blob/release-1.21/core/v1/types.go#L2936 - python: using https://github.com/kubernetes-client/python/blob/release-19.0/kubernetes/client/models/v1_pod_spec.py" - - - - - - - -.. _ref_flyteidl.core.Resources: - -Resources ------------------------------------------------------------------- - -A customizable interface to convey resources requested for a container. This can be interpreted differently for different -container engines. - - - -.. csv-table:: Resources type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "requests", ":ref:`ref_flyteidl.core.Resources.ResourceEntry`", "repeated", "The desired set of resources requested. ResourceNames must be unique within the list." - "limits", ":ref:`ref_flyteidl.core.Resources.ResourceEntry`", "repeated", "Defines a set of bounds (e.g. min/max) within which the task can reliably run. ResourceNames must be unique within the list." - - - - - - - -.. _ref_flyteidl.core.Resources.ResourceEntry: - -Resources.ResourceEntry ------------------------------------------------------------------- - -Encapsulates a resource name and value. - - - -.. csv-table:: Resources.ResourceEntry type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "name", ":ref:`ref_flyteidl.core.Resources.ResourceName`", "", "Resource name." - "value", ":ref:`ref_string`", "", "Value must be a valid k8s quantity. See https://github.com/kubernetes/apimachinery/blob/master/pkg/api/resource/quantity.go#L30-L80" - - - - - - - -.. _ref_flyteidl.core.RuntimeMetadata: - -RuntimeMetadata ------------------------------------------------------------------- - -Runtime information. This is loosely defined to allow for extensibility. - - - -.. csv-table:: RuntimeMetadata type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "type", ":ref:`ref_flyteidl.core.RuntimeMetadata.RuntimeType`", "", "Type of runtime." - "version", ":ref:`ref_string`", "", "Version of the runtime. All versions should be backward compatible. However, certain cases call for version checks to ensure tighter validation or setting expectations." - "flavor", ":ref:`ref_string`", "", "+optional It can be used to provide extra information about the runtime (e.g. python, golang... etc.)." - - - - - - - -.. _ref_flyteidl.core.Sql: - -Sql ------------------------------------------------------------------- - -Sql represents a generic sql workload with a statement and dialect. - - - -.. csv-table:: Sql type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "statement", ":ref:`ref_string`", "", "The actual query to run, the query can have templated parameters. We use Flyte's Golang templating format for Query templating. For example, insert overwrite directory '{{ .rawOutputDataPrefix }}' stored as parquet select * from my_table where ds = '{{ .Inputs.ds }}'" - "dialect", ":ref:`ref_flyteidl.core.Sql.Dialect`", "", "" - - - - - - - -.. _ref_flyteidl.core.TaskMetadata: - -TaskMetadata ------------------------------------------------------------------- - -Task Metadata - - - -.. csv-table:: TaskMetadata type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "discoverable", ":ref:`ref_bool`", "", "Indicates whether the system should attempt to lookup this task's output to avoid duplication of work." - "runtime", ":ref:`ref_flyteidl.core.RuntimeMetadata`", "", "Runtime information about the task." - "timeout", ":ref:`ref_google.protobuf.Duration`", "", "The overall timeout of a task including user-triggered retries." - "retries", ":ref:`ref_flyteidl.core.RetryStrategy`", "", "Number of retries per task." - "discovery_version", ":ref:`ref_string`", "", "Indicates a logical version to apply to this task for the purpose of discovery." - "deprecated_error_message", ":ref:`ref_string`", "", "If set, this indicates that this task is deprecated. This will enable owners of tasks to notify consumers of the ending of support for a given task." - "interruptible", ":ref:`ref_bool`", "", "" - "cache_serializable", ":ref:`ref_bool`", "", "Indicates whether the system should attempt to execute discoverable instances in serial to avoid duplicate work" - "generates_deck", ":ref:`ref_bool`", "", "Indicates whether the task will generate a Deck URI when it finishes executing." - "tags", ":ref:`ref_flyteidl.core.TaskMetadata.TagsEntry`", "repeated", "Arbitrary tags that allow users and the platform to store small but arbitrary labels" - - - - - - - -.. _ref_flyteidl.core.TaskMetadata.TagsEntry: - -TaskMetadata.TagsEntry ------------------------------------------------------------------- - - - - - -.. csv-table:: TaskMetadata.TagsEntry type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "key", ":ref:`ref_string`", "", "" - "value", ":ref:`ref_string`", "", "" - - - - - - - -.. _ref_flyteidl.core.TaskTemplate: - -TaskTemplate ------------------------------------------------------------------- - -A Task structure that uniquely identifies a task in the system -Tasks are registered as a first step in the system. - - - -.. csv-table:: TaskTemplate type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "id", ":ref:`ref_flyteidl.core.Identifier`", "", "Auto generated taskId by the system. Task Id uniquely identifies this task globally." - "type", ":ref:`ref_string`", "", "A predefined yet extensible Task type identifier. This can be used to customize any of the components. If no extensions are provided in the system, Flyte will resolve the this task to its TaskCategory and default the implementation registered for the TaskCategory." - "metadata", ":ref:`ref_flyteidl.core.TaskMetadata`", "", "Extra metadata about the task." - "interface", ":ref:`ref_flyteidl.core.TypedInterface`", "", "A strongly typed interface for the task. This enables others to use this task within a workflow and guarantees compile-time validation of the workflow to avoid costly runtime failures." - "custom", ":ref:`ref_google.protobuf.Struct`", "", "Custom data about the task. This is extensible to allow various plugins in the system." - "container", ":ref:`ref_flyteidl.core.Container`", "", "" - "k8s_pod", ":ref:`ref_flyteidl.core.K8sPod`", "", "" - "sql", ":ref:`ref_flyteidl.core.Sql`", "", "" - "task_type_version", ":ref:`ref_int32`", "", "This can be used to customize task handling at execution time for the same task type." - "security_context", ":ref:`ref_flyteidl.core.SecurityContext`", "", "security_context encapsulates security attributes requested to run this task." - "config", ":ref:`ref_flyteidl.core.TaskTemplate.ConfigEntry`", "repeated", "Metadata about the custom defined for this task. This is extensible to allow various plugins in the system to use as required. reserve the field numbers 1 through 15 for very frequently occurring message elements" - - - - - - - -.. _ref_flyteidl.core.TaskTemplate.ConfigEntry: - -TaskTemplate.ConfigEntry ------------------------------------------------------------------- - - - - - -.. csv-table:: TaskTemplate.ConfigEntry type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "key", ":ref:`ref_string`", "", "" - "value", ":ref:`ref_string`", "", "" - - - - - - -.. - end messages - - - -.. _ref_flyteidl.core.Container.Architecture: - -Container.Architecture ------------------------------------------------------------------- - -Architecture-type the container image supports. - -.. csv-table:: Enum Container.Architecture values - :header: "Name", "Number", "Description" - :widths: auto - - "UNKNOWN", "0", "" - "AMD64", "1", "" - "ARM64", "2", "" - "ARM_V6", "3", "" - "ARM_V7", "4", "" - - - -.. _ref_flyteidl.core.DataLoadingConfig.LiteralMapFormat: - -DataLoadingConfig.LiteralMapFormat ------------------------------------------------------------------- - -LiteralMapFormat decides the encoding format in which the input metadata should be made available to the containers. -If the user has access to the protocol buffer definitions, it is recommended to use the PROTO format. -JSON and YAML do not need any protobuf definitions to read it -All remote references in core.LiteralMap are replaced with local filesystem references (the data is downloaded to local filesystem) - -.. csv-table:: Enum DataLoadingConfig.LiteralMapFormat values - :header: "Name", "Number", "Description" - :widths: auto - - "JSON", "0", "JSON / YAML for the metadata (which contains inlined primitive values). The representation is inline with the standard json specification as specified - https://www.json.org/json-en.html" - "YAML", "1", "" - "PROTO", "2", "Proto is a serialized binary of `core.LiteralMap` defined in flyteidl/core" - - - -.. _ref_flyteidl.core.IOStrategy.DownloadMode: - -IOStrategy.DownloadMode ------------------------------------------------------------------- - -Mode to use for downloading - -.. csv-table:: Enum IOStrategy.DownloadMode values - :header: "Name", "Number", "Description" - :widths: auto - - "DOWNLOAD_EAGER", "0", "All data will be downloaded before the main container is executed" - "DOWNLOAD_STREAM", "1", "Data will be downloaded as a stream and an End-Of-Stream marker will be written to indicate all data has been downloaded. Refer to protocol for details" - "DO_NOT_DOWNLOAD", "2", "Large objects (offloaded) will not be downloaded" - - - -.. _ref_flyteidl.core.IOStrategy.UploadMode: - -IOStrategy.UploadMode ------------------------------------------------------------------- - -Mode to use for uploading - -.. csv-table:: Enum IOStrategy.UploadMode values - :header: "Name", "Number", "Description" - :widths: auto - - "UPLOAD_ON_EXIT", "0", "All data will be uploaded after the main container exits" - "UPLOAD_EAGER", "1", "Data will be uploaded as it appears. Refer to protocol specification for details" - "DO_NOT_UPLOAD", "2", "Data will not be uploaded, only references will be written" - - - -.. _ref_flyteidl.core.Resources.ResourceName: - -Resources.ResourceName ------------------------------------------------------------------- - -Known resource names. - -.. csv-table:: Enum Resources.ResourceName values - :header: "Name", "Number", "Description" - :widths: auto - - "UNKNOWN", "0", "" - "CPU", "1", "" - "GPU", "2", "" - "MEMORY", "3", "" - "STORAGE", "4", "" - "EPHEMERAL_STORAGE", "5", "For Kubernetes-based deployments, pods use ephemeral local storage for scratch space, caching, and for logs." - - - -.. _ref_flyteidl.core.RuntimeMetadata.RuntimeType: - -RuntimeMetadata.RuntimeType ------------------------------------------------------------------- - - - -.. csv-table:: Enum RuntimeMetadata.RuntimeType values - :header: "Name", "Number", "Description" - :widths: auto - - "OTHER", "0", "" - "FLYTE_SDK", "1", "" - - - -.. _ref_flyteidl.core.Sql.Dialect: - -Sql.Dialect ------------------------------------------------------------------- - -The dialect of the SQL statement. This is used to validate and parse SQL statements at compilation time to avoid -expensive runtime operations. If set to an unsupported dialect, no validation will be done on the statement. -We support the following dialect: ansi, hive. - -.. csv-table:: Enum Sql.Dialect values - :header: "Name", "Number", "Description" - :widths: auto - - "UNDEFINED", "0", "" - "ANSI", "1", "" - "HIVE", "2", "" - "OTHER", "3", "" - - -.. - end enums - - -.. - end HasExtensions - - -.. - end services - - - - -.. _ref_flyteidl/core/types.proto: - -flyteidl/core/types.proto -================================================================== - - - - - -.. _ref_flyteidl.core.BlobType: - -BlobType ------------------------------------------------------------------- - -Defines type behavior for blob objects - - - -.. csv-table:: BlobType type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "format", ":ref:`ref_string`", "", "Format can be a free form string understood by SDK/UI etc like csv, parquet etc" - "dimensionality", ":ref:`ref_flyteidl.core.BlobType.BlobDimensionality`", "", "" - - - - - - - -.. _ref_flyteidl.core.EnumType: - -EnumType ------------------------------------------------------------------- - -Enables declaring enum types, with predefined string values -For len(values) > 0, the first value in the ordered list is regarded as the default value. If you wish -To provide no defaults, make the first value as undefined. - - - -.. csv-table:: EnumType type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "values", ":ref:`ref_string`", "repeated", "Predefined set of enum values." - - - - - - - -.. _ref_flyteidl.core.Error: - -Error ------------------------------------------------------------------- - -Represents an error thrown from a node. - - - -.. csv-table:: Error type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "failed_node_id", ":ref:`ref_string`", "", "The node id that threw the error." - "message", ":ref:`ref_string`", "", "Error message thrown." - - - - - - - -.. _ref_flyteidl.core.LiteralType: - -LiteralType ------------------------------------------------------------------- - -Defines a strong type to allow type checking between interfaces. - - - -.. csv-table:: LiteralType type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "simple", ":ref:`ref_flyteidl.core.SimpleType`", "", "A simple type that can be compared one-to-one with another." - "schema", ":ref:`ref_flyteidl.core.SchemaType`", "", "A complex type that requires matching of inner fields." - "collection_type", ":ref:`ref_flyteidl.core.LiteralType`", "", "Defines the type of the value of a collection. Only homogeneous collections are allowed." - "map_value_type", ":ref:`ref_flyteidl.core.LiteralType`", "", "Defines the type of the value of a map type. The type of the key is always a string." - "blob", ":ref:`ref_flyteidl.core.BlobType`", "", "A blob might have specialized implementation details depending on associated metadata." - "enum_type", ":ref:`ref_flyteidl.core.EnumType`", "", "Defines an enum with pre-defined string values." - "structured_dataset_type", ":ref:`ref_flyteidl.core.StructuredDatasetType`", "", "Generalized schema support" - "union_type", ":ref:`ref_flyteidl.core.UnionType`", "", "Defines an union type with pre-defined LiteralTypes." - "metadata", ":ref:`ref_google.protobuf.Struct`", "", "This field contains type metadata that is descriptive of the type, but is NOT considered in type-checking. This might be used by consumers to identify special behavior or display extended information for the type." - "annotation", ":ref:`ref_flyteidl.core.TypeAnnotation`", "", "This field contains arbitrary data that might have special semantic meaning for the client but does not effect internal flyte behavior." - "structure", ":ref:`ref_flyteidl.core.TypeStructure`", "", "Hints to improve type matching." - - - - - - - -.. _ref_flyteidl.core.OutputReference: - -OutputReference ------------------------------------------------------------------- - -A reference to an output produced by a node. The type can be retrieved -and validated- from -the underlying interface of the node. - - - -.. csv-table:: OutputReference type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "node_id", ":ref:`ref_string`", "", "Node id must exist at the graph layer." - "var", ":ref:`ref_string`", "", "Variable name must refer to an output variable for the node." - - - - - - - -.. _ref_flyteidl.core.SchemaType: - -SchemaType ------------------------------------------------------------------- - -Defines schema columns and types to strongly type-validate schemas interoperability. - - - -.. csv-table:: SchemaType type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "columns", ":ref:`ref_flyteidl.core.SchemaType.SchemaColumn`", "repeated", "A list of ordered columns this schema comprises of." - - - - - - - -.. _ref_flyteidl.core.SchemaType.SchemaColumn: - -SchemaType.SchemaColumn ------------------------------------------------------------------- - - - - - -.. csv-table:: SchemaType.SchemaColumn type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "name", ":ref:`ref_string`", "", "A unique name -within the schema type- for the column" - "type", ":ref:`ref_flyteidl.core.SchemaType.SchemaColumn.SchemaColumnType`", "", "The column type. This allows a limited set of types currently." - - - - - - - -.. _ref_flyteidl.core.StructuredDatasetType: - -StructuredDatasetType ------------------------------------------------------------------- - - - - - -.. csv-table:: StructuredDatasetType type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "columns", ":ref:`ref_flyteidl.core.StructuredDatasetType.DatasetColumn`", "repeated", "A list of ordered columns this schema comprises of." - "format", ":ref:`ref_string`", "", "This is the storage format, the format of the bits at rest parquet, feather, csv, etc. For two types to be compatible, the format will need to be an exact match." - "external_schema_type", ":ref:`ref_string`", "", "This is a string representing the type that the bytes in external_schema_bytes are formatted in. This is an optional field that will not be used for type checking." - "external_schema_bytes", ":ref:`ref_bytes`", "", "The serialized bytes of a third-party schema library like Arrow. This is an optional field that will not be used for type checking." - - - - - - - -.. _ref_flyteidl.core.StructuredDatasetType.DatasetColumn: - -StructuredDatasetType.DatasetColumn ------------------------------------------------------------------- - - - - - -.. csv-table:: StructuredDatasetType.DatasetColumn type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "name", ":ref:`ref_string`", "", "A unique name within the schema type for the column." - "literal_type", ":ref:`ref_flyteidl.core.LiteralType`", "", "The column type." - - - - - - - -.. _ref_flyteidl.core.TypeAnnotation: - -TypeAnnotation ------------------------------------------------------------------- - -TypeAnnotation encapsulates registration time information about a type. This can be used for various control-plane operations. TypeAnnotation will not be available at runtime when a task runs. - - - -.. csv-table:: TypeAnnotation type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "annotations", ":ref:`ref_google.protobuf.Struct`", "", "A arbitrary JSON payload to describe a type." - - - - - - - -.. _ref_flyteidl.core.TypeStructure: - -TypeStructure ------------------------------------------------------------------- - -Hints to improve type matching -e.g. allows distinguishing output from custom type transformers -even if the underlying IDL serialization matches. - - - -.. csv-table:: TypeStructure type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "tag", ":ref:`ref_string`", "", "Must exactly match for types to be castable" - - - - - - - -.. _ref_flyteidl.core.UnionType: - -UnionType ------------------------------------------------------------------- - -Defines a tagged union type, also known as a variant (and formally as the sum type). - -A sum type S is defined by a sequence of types (A, B, C, ...), each tagged by a string tag -A value of type S is constructed from a value of any of the variant types. The specific choice of type is recorded by -storing the varaint's tag with the literal value and can be examined in runtime. - -Type S is typically written as -S := Apple A | Banana B | Cantaloupe C | ... - -Notably, a nullable (optional) type is a sum type between some type X and the singleton type representing a null-value: -Optional X := X | Null - -See also: https://en.wikipedia.org/wiki/Tagged_union - - - -.. csv-table:: UnionType type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "variants", ":ref:`ref_flyteidl.core.LiteralType`", "repeated", "Predefined set of variants in union." - - - - - - -.. - end messages - - - -.. _ref_flyteidl.core.BlobType.BlobDimensionality: - -BlobType.BlobDimensionality ------------------------------------------------------------------- - - - -.. csv-table:: Enum BlobType.BlobDimensionality values - :header: "Name", "Number", "Description" - :widths: auto - - "SINGLE", "0", "" - "MULTIPART", "1", "" - - - -.. _ref_flyteidl.core.SchemaType.SchemaColumn.SchemaColumnType: - -SchemaType.SchemaColumn.SchemaColumnType ------------------------------------------------------------------- - - - -.. csv-table:: Enum SchemaType.SchemaColumn.SchemaColumnType values - :header: "Name", "Number", "Description" - :widths: auto - - "INTEGER", "0", "" - "FLOAT", "1", "" - "STRING", "2", "" - "BOOLEAN", "3", "" - "DATETIME", "4", "" - "DURATION", "5", "" - - - -.. _ref_flyteidl.core.SimpleType: - -SimpleType ------------------------------------------------------------------- - -Define a set of simple types. - -.. csv-table:: Enum SimpleType values - :header: "Name", "Number", "Description" - :widths: auto - - "NONE", "0", "" - "INTEGER", "1", "" - "FLOAT", "2", "" - "STRING", "3", "" - "BOOLEAN", "4", "" - "DATETIME", "5", "" - "DURATION", "6", "" - "BINARY", "7", "" - "ERROR", "8", "" - "STRUCT", "9", "" - - -.. - end enums - - -.. - end HasExtensions - - -.. - end services - - - - -.. _ref_flyteidl/core/workflow.proto: - -flyteidl/core/workflow.proto -================================================================== - - - - - -.. _ref_flyteidl.core.Alias: - -Alias ------------------------------------------------------------------- - -Links a variable to an alias. - - - -.. csv-table:: Alias type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "var", ":ref:`ref_string`", "", "Must match one of the output variable names on a node." - "alias", ":ref:`ref_string`", "", "A workflow-level unique alias that downstream nodes can refer to in their input." - - - - - - - -.. _ref_flyteidl.core.ApproveCondition: - -ApproveCondition ------------------------------------------------------------------- - -ApproveCondition represents a dependency on an external approval. During execution, this will manifest as a boolean -signal with the provided signal_id. - - - -.. csv-table:: ApproveCondition type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "signal_id", ":ref:`ref_string`", "", "A unique identifier for the requested boolean signal." - - - - - - - -.. _ref_flyteidl.core.BranchNode: - -BranchNode ------------------------------------------------------------------- - -BranchNode is a special node that alter the flow of the workflow graph. It allows the control flow to branch at -runtime based on a series of conditions that get evaluated on various parameters (e.g. inputs, primitives). - - - -.. csv-table:: BranchNode type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "if_else", ":ref:`ref_flyteidl.core.IfElseBlock`", "", "+required" - - - - - - - -.. _ref_flyteidl.core.GateNode: - -GateNode ------------------------------------------------------------------- - -GateNode refers to the condition that is required for the gate to successfully complete. - - - -.. csv-table:: GateNode type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "approve", ":ref:`ref_flyteidl.core.ApproveCondition`", "", "ApproveCondition represents a dependency on an external approval provided by a boolean signal." - "signal", ":ref:`ref_flyteidl.core.SignalCondition`", "", "SignalCondition represents a dependency on an signal." - "sleep", ":ref:`ref_flyteidl.core.SleepCondition`", "", "SleepCondition represents a dependency on waiting for the specified duration." - - - - - - - -.. _ref_flyteidl.core.IfBlock: - -IfBlock ------------------------------------------------------------------- - -Defines a condition and the execution unit that should be executed if the condition is satisfied. - - - -.. csv-table:: IfBlock type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "condition", ":ref:`ref_flyteidl.core.BooleanExpression`", "", "" - "then_node", ":ref:`ref_flyteidl.core.Node`", "", "" - - - - - - - -.. _ref_flyteidl.core.IfElseBlock: - -IfElseBlock ------------------------------------------------------------------- - -Defines a series of if/else blocks. The first branch whose condition evaluates to true is the one to execute. -If no conditions were satisfied, the else_node or the error will execute. - - - -.. csv-table:: IfElseBlock type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "case", ":ref:`ref_flyteidl.core.IfBlock`", "", "+required. First condition to evaluate." - "other", ":ref:`ref_flyteidl.core.IfBlock`", "repeated", "+optional. Additional branches to evaluate." - "else_node", ":ref:`ref_flyteidl.core.Node`", "", "The node to execute in case none of the branches were taken." - "error", ":ref:`ref_flyteidl.core.Error`", "", "An error to throw in case none of the branches were taken." - - - - - - - -.. _ref_flyteidl.core.Node: - -Node ------------------------------------------------------------------- - -A Workflow graph Node. One unit of execution in the graph. Each node can be linked to a Task, a Workflow or a branch -node. - - - -.. csv-table:: Node type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "id", ":ref:`ref_string`", "", "A workflow-level unique identifier that identifies this node in the workflow. 'inputs' and 'outputs' are reserved node ids that cannot be used by other nodes." - "metadata", ":ref:`ref_flyteidl.core.NodeMetadata`", "", "Extra metadata about the node." - "inputs", ":ref:`ref_flyteidl.core.Binding`", "repeated", "Specifies how to bind the underlying interface's inputs. All required inputs specified in the underlying interface must be fulfilled." - "upstream_node_ids", ":ref:`ref_string`", "repeated", "+optional Specifies execution dependency for this node ensuring it will only get scheduled to run after all its upstream nodes have completed. This node will have an implicit dependency on any node that appears in inputs field." - "output_aliases", ":ref:`ref_flyteidl.core.Alias`", "repeated", "+optional. A node can define aliases for a subset of its outputs. This is particularly useful if different nodes need to conform to the same interface (e.g. all branches in a branch node). Downstream nodes must refer to this nodes outputs using the alias if one's specified." - "task_node", ":ref:`ref_flyteidl.core.TaskNode`", "", "Information about the Task to execute in this node." - "workflow_node", ":ref:`ref_flyteidl.core.WorkflowNode`", "", "Information about the Workflow to execute in this mode." - "branch_node", ":ref:`ref_flyteidl.core.BranchNode`", "", "Information about the branch node to evaluate in this node." - "gate_node", ":ref:`ref_flyteidl.core.GateNode`", "", "Information about the condition to evaluate in this node." - - - - - - - -.. _ref_flyteidl.core.NodeMetadata: - -NodeMetadata ------------------------------------------------------------------- - -Defines extra information about the Node. - - - -.. csv-table:: NodeMetadata type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "name", ":ref:`ref_string`", "", "A friendly name for the Node" - "timeout", ":ref:`ref_google.protobuf.Duration`", "", "The overall timeout of a task." - "retries", ":ref:`ref_flyteidl.core.RetryStrategy`", "", "Number of retries per task." - "interruptible", ":ref:`ref_bool`", "", "" - - - - - - - -.. _ref_flyteidl.core.SignalCondition: - -SignalCondition ------------------------------------------------------------------- - -SignalCondition represents a dependency on an signal. - - - -.. csv-table:: SignalCondition type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "signal_id", ":ref:`ref_string`", "", "A unique identifier for the requested signal." - "type", ":ref:`ref_flyteidl.core.LiteralType`", "", "A type denoting the required value type for this signal." - "output_variable_name", ":ref:`ref_string`", "", "The variable name for the signal value in this nodes outputs." - - - - - - - -.. _ref_flyteidl.core.SleepCondition: - -SleepCondition ------------------------------------------------------------------- - -SleepCondition represents a dependency on waiting for the specified duration. - - - -.. csv-table:: SleepCondition type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "duration", ":ref:`ref_google.protobuf.Duration`", "", "The overall duration for this sleep." - - - - - - - -.. _ref_flyteidl.core.TaskNode: - -TaskNode ------------------------------------------------------------------- - -Refers to the task that the Node is to execute. - - - -.. csv-table:: TaskNode type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "reference_id", ":ref:`ref_flyteidl.core.Identifier`", "", "A globally unique identifier for the task." - "overrides", ":ref:`ref_flyteidl.core.TaskNodeOverrides`", "", "Optional overrides applied at task execution time." - - - - - - - -.. _ref_flyteidl.core.TaskNodeOverrides: - -TaskNodeOverrides ------------------------------------------------------------------- - -Optional task node overrides that will be applied at task execution time. - - - -.. csv-table:: TaskNodeOverrides type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "resources", ":ref:`ref_flyteidl.core.Resources`", "", "A customizable interface to convey resources requested for a task container." - - - - - - - -.. _ref_flyteidl.core.WorkflowMetadata: - -WorkflowMetadata ------------------------------------------------------------------- - -This is workflow layer metadata. These settings are only applicable to the workflow as a whole, and do not -percolate down to child entities (like tasks) launched by the workflow. - - - -.. csv-table:: WorkflowMetadata type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "quality_of_service", ":ref:`ref_flyteidl.core.QualityOfService`", "", "Indicates the runtime priority of workflow executions." - "on_failure", ":ref:`ref_flyteidl.core.WorkflowMetadata.OnFailurePolicy`", "", "Defines how the system should behave when a failure is detected in the workflow execution." - "tags", ":ref:`ref_flyteidl.core.WorkflowMetadata.TagsEntry`", "repeated", "Arbitrary tags that allow users and the platform to store small but arbitrary labels" - - - - - - - -.. _ref_flyteidl.core.WorkflowMetadata.TagsEntry: - -WorkflowMetadata.TagsEntry ------------------------------------------------------------------- - - - - - -.. csv-table:: WorkflowMetadata.TagsEntry type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "key", ":ref:`ref_string`", "", "" - "value", ":ref:`ref_string`", "", "" - - - - - - - -.. _ref_flyteidl.core.WorkflowMetadataDefaults: - -WorkflowMetadataDefaults ------------------------------------------------------------------- - -The difference between these settings and the WorkflowMetadata ones is that these are meant to be passed down to -a workflow's underlying entities (like tasks). For instance, 'interruptible' has no meaning at the workflow layer, it -is only relevant when a task executes. The settings here are the defaults that are passed to all nodes -unless explicitly overridden at the node layer. -If you are adding a setting that applies to both the Workflow itself, and everything underneath it, it should be -added to both this object and the WorkflowMetadata object above. - - - -.. csv-table:: WorkflowMetadataDefaults type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "interruptible", ":ref:`ref_bool`", "", "Whether child nodes of the workflow are interruptible." - - - - - - - -.. _ref_flyteidl.core.WorkflowNode: - -WorkflowNode ------------------------------------------------------------------- - -Refers to a the workflow the node is to execute. - - - -.. csv-table:: WorkflowNode type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "launchplan_ref", ":ref:`ref_flyteidl.core.Identifier`", "", "A globally unique identifier for the launch plan." - "sub_workflow_ref", ":ref:`ref_flyteidl.core.Identifier`", "", "Reference to a subworkflow, that should be defined with the compiler context" - - - - - - - -.. _ref_flyteidl.core.WorkflowTemplate: - -WorkflowTemplate ------------------------------------------------------------------- - -Flyte Workflow Structure that encapsulates task, branch and subworkflow nodes to form a statically analyzable, -directed acyclic graph. - - - -.. csv-table:: WorkflowTemplate type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "id", ":ref:`ref_flyteidl.core.Identifier`", "", "A globally unique identifier for the workflow." - "metadata", ":ref:`ref_flyteidl.core.WorkflowMetadata`", "", "Extra metadata about the workflow." - "interface", ":ref:`ref_flyteidl.core.TypedInterface`", "", "Defines a strongly typed interface for the Workflow. This can include some optional parameters." - "nodes", ":ref:`ref_flyteidl.core.Node`", "repeated", "A list of nodes. In addition, 'globals' is a special reserved node id that can be used to consume workflow inputs." - "outputs", ":ref:`ref_flyteidl.core.Binding`", "repeated", "A list of output bindings that specify how to construct workflow outputs. Bindings can pull node outputs or specify literals. All workflow outputs specified in the interface field must be bound in order for the workflow to be validated. A workflow has an implicit dependency on all of its nodes to execute successfully in order to bind final outputs. Most of these outputs will be Binding's with a BindingData of type OutputReference. That is, your workflow can just have an output of some constant (`Output(5)`), but usually, the workflow will be pulling outputs from the output of a task." - "failure_node", ":ref:`ref_flyteidl.core.Node`", "", "+optional A catch-all node. This node is executed whenever the execution engine determines the workflow has failed. The interface of this node must match the Workflow interface with an additional input named 'error' of type pb.lyft.flyte.core.Error." - "metadata_defaults", ":ref:`ref_flyteidl.core.WorkflowMetadataDefaults`", "", "workflow defaults" - - - - - - -.. - end messages - - - -.. _ref_flyteidl.core.WorkflowMetadata.OnFailurePolicy: - -WorkflowMetadata.OnFailurePolicy ------------------------------------------------------------------- - -Failure Handling Strategy - -.. csv-table:: Enum WorkflowMetadata.OnFailurePolicy values - :header: "Name", "Number", "Description" - :widths: auto - - "FAIL_IMMEDIATELY", "0", "FAIL_IMMEDIATELY instructs the system to fail as soon as a node fails in the workflow. It'll automatically abort all currently running nodes and clean up resources before finally marking the workflow executions as failed." - "FAIL_AFTER_EXECUTABLE_NODES_COMPLETE", "1", "FAIL_AFTER_EXECUTABLE_NODES_COMPLETE instructs the system to make as much progress as it can. The system will not alter the dependencies of the execution graph so any node that depend on the failed node will not be run. Other nodes that will be executed to completion before cleaning up resources and marking the workflow execution as failed." - - -.. - end enums - - -.. - end HasExtensions - - -.. - end services - - - - -.. _ref_flyteidl/core/workflow_closure.proto: - -flyteidl/core/workflow_closure.proto -================================================================== - - - - - -.. _ref_flyteidl.core.WorkflowClosure: - -WorkflowClosure ------------------------------------------------------------------- - -Defines an enclosed package of workflow and tasks it references. - - - -.. csv-table:: WorkflowClosure type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "workflow", ":ref:`ref_flyteidl.core.WorkflowTemplate`", "", "required. Workflow template." - "tasks", ":ref:`ref_flyteidl.core.TaskTemplate`", "repeated", "optional. A collection of tasks referenced by the workflow. Only needed if the workflow references tasks." - - - - - - -.. - end messages - - -.. - end enums - - -.. - end HasExtensions - - -.. - end services - - - - -.. _ref_google/protobuf/timestamp.proto: - -google/protobuf/timestamp.proto -================================================================== - - - - - -.. _ref_google.protobuf.Timestamp: - -Timestamp ------------------------------------------------------------------- - -A Timestamp represents a point in time independent of any time zone or local -calendar, encoded as a count of seconds and fractions of seconds at -nanosecond resolution. The count is relative to an epoch at UTC midnight on -January 1, 1970, in the proleptic Gregorian calendar which extends the -Gregorian calendar backwards to year one. - -All minutes are 60 seconds long. Leap seconds are "smeared" so that no leap -second table is needed for interpretation, using a [24-hour linear -smear](https://developers.google.com/time/smear). - -The range is from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59.999999999Z. By -restricting to that range, we ensure that we can convert to and from [RFC -3339](https://www.ietf.org/rfc/rfc3339.txt) date strings. - -# Examples - -Example 1: Compute Timestamp from POSIX `time()`. - - Timestamp timestamp; - timestamp.set_seconds(time(NULL)); - timestamp.set_nanos(0); - -Example 2: Compute Timestamp from POSIX `gettimeofday()`. - - struct timeval tv; - gettimeofday(&tv, NULL); - - Timestamp timestamp; - timestamp.set_seconds(tv.tv_sec); - timestamp.set_nanos(tv.tv_usec * 1000); - -Example 3: Compute Timestamp from Win32 `GetSystemTimeAsFileTime()`. - - FILETIME ft; - GetSystemTimeAsFileTime(&ft); - UINT64 ticks = (((UINT64)ft.dwHighDateTime) << 32) | ft.dwLowDateTime; - - // A Windows tick is 100 nanoseconds. Windows epoch 1601-01-01T00:00:00Z - // is 11644473600 seconds before Unix epoch 1970-01-01T00:00:00Z. - Timestamp timestamp; - timestamp.set_seconds((INT64) ((ticks / 10000000) - 11644473600LL)); - timestamp.set_nanos((INT32) ((ticks % 10000000) * 100)); - -Example 4: Compute Timestamp from Java `System.currentTimeMillis()`. - - long millis = System.currentTimeMillis(); - - Timestamp timestamp = Timestamp.newBuilder().setSeconds(millis / 1000) - .setNanos((int) ((millis % 1000) * 1000000)).build(); - -Example 5: Compute Timestamp from Java `Instant.now()`. - - Instant now = Instant.now(); - - Timestamp timestamp = - Timestamp.newBuilder().setSeconds(now.getEpochSecond()) - .setNanos(now.getNano()).build(); - -Example 6: Compute Timestamp from current time in Python. - - timestamp = Timestamp() - timestamp.GetCurrentTime() - -# JSON Mapping - -In JSON format, the Timestamp type is encoded as a string in the -[RFC 3339](https://www.ietf.org/rfc/rfc3339.txt) format. That is, the -format is "{year}-{month}-{day}T{hour}:{min}:{sec}[.{frac_sec}]Z" -where {year} is always expressed using four digits while {month}, {day}, -{hour}, {min}, and {sec} are zero-padded to two digits each. The fractional -seconds, which can go up to 9 digits (i.e. up to 1 nanosecond resolution), -are optional. The "Z" suffix indicates the timezone ("UTC"); the timezone -is required. A proto3 JSON serializer should always use UTC (as indicated by -"Z") when printing the Timestamp type and a proto3 JSON parser should be -able to accept both UTC and other timezones (as indicated by an offset). - -For example, "2017-01-15T01:30:15.01Z" encodes 15.01 seconds past -01:30 UTC on January 15, 2017. - -In JavaScript, one can convert a Date object to this format using the -standard -[toISOString()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Date/toISOString) -method. In Python, a standard `datetime.datetime` object can be converted -to this format using -[`strftime`](https://docs.python.org/2/library/time.html#time.strftime) with -the time format spec '%Y-%m-%dT%H:%M:%S.%fZ'. Likewise, in Java, one can use -the Joda Time's [`ISODateTimeFormat.dateTime()`]( -http://www.joda.org/joda-time/apidocs/org/joda/time/format/ISODateTimeFormat.html#dateTime%2D%2D -) to obtain a formatter capable of generating timestamps in this format. - - - -.. csv-table:: Timestamp type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "seconds", ":ref:`ref_int64`", "", "Represents seconds of UTC time since Unix epoch 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59Z inclusive." - "nanos", ":ref:`ref_int32`", "", "Non-negative fractions of a second at nanosecond resolution. Negative second values with fractions must still have non-negative nanos values that count forward in time. Must be from 0 to 999,999,999 inclusive." - - - - - - -.. - end messages - - -.. - end enums - - -.. - end HasExtensions - - -.. - end services - - - - -.. _ref_google/protobuf/duration.proto: - -google/protobuf/duration.proto -================================================================== - - - - - -.. _ref_google.protobuf.Duration: - -Duration ------------------------------------------------------------------- - -A Duration represents a signed, fixed-length span of time represented -as a count of seconds and fractions of seconds at nanosecond -resolution. It is independent of any calendar and concepts like "day" -or "month". It is related to Timestamp in that the difference between -two Timestamp values is a Duration and it can be added or subtracted -from a Timestamp. Range is approximately +-10,000 years. - -# Examples - -Example 1: Compute Duration from two Timestamps in pseudo code. - - Timestamp start = ...; - Timestamp end = ...; - Duration duration = ...; - - duration.seconds = end.seconds - start.seconds; - duration.nanos = end.nanos - start.nanos; - - if (duration.seconds < 0 && duration.nanos > 0) { - duration.seconds += 1; - duration.nanos -= 1000000000; - } else if (duration.seconds > 0 && duration.nanos < 0) { - duration.seconds -= 1; - duration.nanos += 1000000000; - } - -Example 2: Compute Timestamp from Timestamp + Duration in pseudo code. - - Timestamp start = ...; - Duration duration = ...; - Timestamp end = ...; - - end.seconds = start.seconds + duration.seconds; - end.nanos = start.nanos + duration.nanos; - - if (end.nanos < 0) { - end.seconds -= 1; - end.nanos += 1000000000; - } else if (end.nanos >= 1000000000) { - end.seconds += 1; - end.nanos -= 1000000000; - } - -Example 3: Compute Duration from datetime.timedelta in Python. - - td = datetime.timedelta(days=3, minutes=10) - duration = Duration() - duration.FromTimedelta(td) - -# JSON Mapping - -In JSON format, the Duration type is encoded as a string rather than an -object, where the string ends in the suffix "s" (indicating seconds) and -is preceded by the number of seconds, with nanoseconds expressed as -fractional seconds. For example, 3 seconds with 0 nanoseconds should be -encoded in JSON format as "3s", while 3 seconds and 1 nanosecond should -be expressed in JSON format as "3.000000001s", and 3 seconds and 1 -microsecond should be expressed in JSON format as "3.000001s". - - - -.. csv-table:: Duration type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "seconds", ":ref:`ref_int64`", "", "Signed seconds of the span of time. Must be from -315,576,000,000 to +315,576,000,000 inclusive. Note: these bounds are computed from: 60 sec/min * 60 min/hr * 24 hr/day * 365.25 days/year * 10000 years" - "nanos", ":ref:`ref_int32`", "", "Signed fractions of a second at nanosecond resolution of the span of time. Durations less than one second are represented with a 0 `seconds` field and a positive or negative `nanos` field. For durations of one second or more, a non-zero value for the `nanos` field must be of the same sign as the `seconds` field. Must be from -999,999,999 to +999,999,999 inclusive." - - - - - - -.. - end messages - - -.. - end enums - - -.. - end HasExtensions - - -.. - end services - - - - -.. _ref_google/protobuf/struct.proto: - -google/protobuf/struct.proto -================================================================== - - - - - -.. _ref_google.protobuf.ListValue: - -ListValue ------------------------------------------------------------------- - -`ListValue` is a wrapper around a repeated field of values. - -The JSON representation for `ListValue` is JSON array. - - - -.. csv-table:: ListValue type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "values", ":ref:`ref_google.protobuf.Value`", "repeated", "Repeated field of dynamically typed values." - - - - - - - -.. _ref_google.protobuf.Struct: - -Struct ------------------------------------------------------------------- - -`Struct` represents a structured data value, consisting of fields -which map to dynamically typed values. In some languages, `Struct` -might be supported by a native representation. For example, in -scripting languages like JS a struct is represented as an -object. The details of that representation are described together -with the proto support for the language. - -The JSON representation for `Struct` is JSON object. - - - -.. csv-table:: Struct type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "fields", ":ref:`ref_google.protobuf.Struct.FieldsEntry`", "repeated", "Unordered map of dynamically typed values." - - - - - - - -.. _ref_google.protobuf.Struct.FieldsEntry: - -Struct.FieldsEntry ------------------------------------------------------------------- - - - - - -.. csv-table:: Struct.FieldsEntry type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "key", ":ref:`ref_string`", "", "" - "value", ":ref:`ref_google.protobuf.Value`", "", "" - - - - - - - -.. _ref_google.protobuf.Value: - -Value ------------------------------------------------------------------- - -`Value` represents a dynamically typed value which can be either -null, a number, a string, a boolean, a recursive struct value, or a -list of values. A producer of value is expected to set one of these -variants. Absence of any variant indicates an error. - -The JSON representation for `Value` is JSON value. - - - -.. csv-table:: Value type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "null_value", ":ref:`ref_google.protobuf.NullValue`", "", "Represents a null value." - "number_value", ":ref:`ref_double`", "", "Represents a double value." - "string_value", ":ref:`ref_string`", "", "Represents a string value." - "bool_value", ":ref:`ref_bool`", "", "Represents a boolean value." - "struct_value", ":ref:`ref_google.protobuf.Struct`", "", "Represents a structured value." - "list_value", ":ref:`ref_google.protobuf.ListValue`", "", "Represents a repeated `Value`." - - - - - - -.. - end messages - - - -.. _ref_google.protobuf.NullValue: - -NullValue ------------------------------------------------------------------- - -`NullValue` is a singleton enumeration to represent the null value for the -`Value` type union. - - The JSON representation for `NullValue` is JSON `null`. - -.. csv-table:: Enum NullValue values - :header: "Name", "Number", "Description" - :widths: auto - - "NULL_VALUE", "0", "Null value." - - -.. - end enums - - -.. - end HasExtensions - - -.. - end services - - - -.. _ref_scala_types: - -Scalar Value Types -================== - - - -.. _ref_double: - -double ------------------------------ - - - -.. csv-table:: double language representation - :header: ".proto Type", "C++", "Java", "Python", "Go", "C#", "PHP", "Ruby" - :widths: auto - - "double", "double", "double", "float", "float64", "double", "float", "Float" - - - -.. _ref_float: - -float ------------------------------ - - - -.. csv-table:: float language representation - :header: ".proto Type", "C++", "Java", "Python", "Go", "C#", "PHP", "Ruby" - :widths: auto - - "float", "float", "float", "float", "float32", "float", "float", "Float" - - - -.. _ref_int32: - -int32 ------------------------------ - -Uses variable-length encoding. Inefficient for encoding negative numbers – if your field is likely to have negative values, use sint32 instead. - -.. csv-table:: int32 language representation - :header: ".proto Type", "C++", "Java", "Python", "Go", "C#", "PHP", "Ruby" - :widths: auto - - "int32", "int32", "int", "int", "int32", "int", "integer", "Bignum or Fixnum (as required)" - - - -.. _ref_int64: - -int64 ------------------------------ - -Uses variable-length encoding. Inefficient for encoding negative numbers – if your field is likely to have negative values, use sint64 instead. - -.. csv-table:: int64 language representation - :header: ".proto Type", "C++", "Java", "Python", "Go", "C#", "PHP", "Ruby" - :widths: auto - - "int64", "int64", "long", "int/long", "int64", "long", "integer/string", "Bignum" - - - -.. _ref_uint32: - -uint32 ------------------------------ - -Uses variable-length encoding. - -.. csv-table:: uint32 language representation - :header: ".proto Type", "C++", "Java", "Python", "Go", "C#", "PHP", "Ruby" - :widths: auto - - "uint32", "uint32", "int", "int/long", "uint32", "uint", "integer", "Bignum or Fixnum (as required)" - - - -.. _ref_uint64: - -uint64 ------------------------------ - -Uses variable-length encoding. - -.. csv-table:: uint64 language representation - :header: ".proto Type", "C++", "Java", "Python", "Go", "C#", "PHP", "Ruby" - :widths: auto - - "uint64", "uint64", "long", "int/long", "uint64", "ulong", "integer/string", "Bignum or Fixnum (as required)" - - - -.. _ref_sint32: - -sint32 ------------------------------ - -Uses variable-length encoding. Signed int value. These more efficiently encode negative numbers than regular int32s. - -.. csv-table:: sint32 language representation - :header: ".proto Type", "C++", "Java", "Python", "Go", "C#", "PHP", "Ruby" - :widths: auto - - "sint32", "int32", "int", "int", "int32", "int", "integer", "Bignum or Fixnum (as required)" - - - -.. _ref_sint64: - -sint64 ------------------------------ - -Uses variable-length encoding. Signed int value. These more efficiently encode negative numbers than regular int64s. - -.. csv-table:: sint64 language representation - :header: ".proto Type", "C++", "Java", "Python", "Go", "C#", "PHP", "Ruby" - :widths: auto - - "sint64", "int64", "long", "int/long", "int64", "long", "integer/string", "Bignum" - - - -.. _ref_fixed32: - -fixed32 ------------------------------ - -Always four bytes. More efficient than uint32 if values are often greater than 2^28. - -.. csv-table:: fixed32 language representation - :header: ".proto Type", "C++", "Java", "Python", "Go", "C#", "PHP", "Ruby" - :widths: auto - - "fixed32", "uint32", "int", "int", "uint32", "uint", "integer", "Bignum or Fixnum (as required)" - - - -.. _ref_fixed64: - -fixed64 ------------------------------ - -Always eight bytes. More efficient than uint64 if values are often greater than 2^56. - -.. csv-table:: fixed64 language representation - :header: ".proto Type", "C++", "Java", "Python", "Go", "C#", "PHP", "Ruby" - :widths: auto - - "fixed64", "uint64", "long", "int/long", "uint64", "ulong", "integer/string", "Bignum" - - - -.. _ref_sfixed32: - -sfixed32 ------------------------------ - -Always four bytes. - -.. csv-table:: sfixed32 language representation - :header: ".proto Type", "C++", "Java", "Python", "Go", "C#", "PHP", "Ruby" - :widths: auto - - "sfixed32", "int32", "int", "int", "int32", "int", "integer", "Bignum or Fixnum (as required)" - - - -.. _ref_sfixed64: - -sfixed64 ------------------------------ - -Always eight bytes. - -.. csv-table:: sfixed64 language representation - :header: ".proto Type", "C++", "Java", "Python", "Go", "C#", "PHP", "Ruby" - :widths: auto - - "sfixed64", "int64", "long", "int/long", "int64", "long", "integer/string", "Bignum" - - - -.. _ref_bool: - -bool ------------------------------ - - - -.. csv-table:: bool language representation - :header: ".proto Type", "C++", "Java", "Python", "Go", "C#", "PHP", "Ruby" - :widths: auto - - "bool", "bool", "boolean", "boolean", "bool", "bool", "boolean", "TrueClass/FalseClass" - - - -.. _ref_string: - -string ------------------------------ - -A string must always contain UTF-8 encoded or 7-bit ASCII text. - -.. csv-table:: string language representation - :header: ".proto Type", "C++", "Java", "Python", "Go", "C#", "PHP", "Ruby" - :widths: auto - - "string", "string", "String", "str/unicode", "string", "string", "string", "String (UTF-8)" - - - -.. _ref_bytes: - -bytes ------------------------------ - -May contain any arbitrary sequence of bytes. - -.. csv-table:: bytes language representation - :header: ".proto Type", "C++", "Java", "Python", "Go", "C#", "PHP", "Ruby" - :widths: auto - - "bytes", "string", "ByteString", "str", "[]byte", "ByteString", "string", "String (ASCII-8BIT)" - - -.. - end scalars \ No newline at end of file diff --git a/docs/api/flyteidl/docs/core/index.rst b/docs/api/flyteidl/docs/core/index.rst deleted file mode 100644 index 7d2ce06617..0000000000 --- a/docs/api/flyteidl/docs/core/index.rst +++ /dev/null @@ -1,15 +0,0 @@ -Core Flyte language specification -================================= - -Protocol buffers provide details about core data -structures like :ref:`workflows `, :ref:`tasks `, :ref:`nodes `, and Literals. They are the specifications -of the various entities in Flyte and the type system. - -`Core raw protos `__ - -.. toctree:: - :maxdepth: 1 - :caption: core - :name: coretoc - - core diff --git a/docs/api/flyteidl/docs/datacatalog/datacatalog.rst b/docs/api/flyteidl/docs/datacatalog/datacatalog.rst deleted file mode 100644 index 6ca4328f95..0000000000 --- a/docs/api/flyteidl/docs/datacatalog/datacatalog.rst +++ /dev/null @@ -1,1313 +0,0 @@ -###################### -Protocol Documentation -###################### - - - - -.. _ref_flyteidl/datacatalog/datacatalog.proto: - -flyteidl/datacatalog/datacatalog.proto -================================================================== - - - - - -.. _ref_datacatalog.AddTagRequest: - -AddTagRequest ------------------------------------------------------------------- - -Request message for tagging an Artifact. - - - -.. csv-table:: AddTagRequest type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "tag", ":ref:`ref_datacatalog.Tag`", "", "" - - - - - - - -.. _ref_datacatalog.AddTagResponse: - -AddTagResponse ------------------------------------------------------------------- - -Response message for tagging an Artifact. - - - - - - - - -.. _ref_datacatalog.Artifact: - -Artifact ------------------------------------------------------------------- - -Artifact message. It is composed of several string fields. - - - -.. csv-table:: Artifact type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "id", ":ref:`ref_string`", "", "The unique ID of the artifact" - "dataset", ":ref:`ref_datacatalog.DatasetID`", "", "The Dataset that the artifact belongs to" - "data", ":ref:`ref_datacatalog.ArtifactData`", "repeated", "A list of data that is associated with the artifact" - "metadata", ":ref:`ref_datacatalog.Metadata`", "", "Free-form metadata associated with the artifact" - "partitions", ":ref:`ref_datacatalog.Partition`", "repeated", "" - "tags", ":ref:`ref_datacatalog.Tag`", "repeated", "" - "created_at", ":ref:`ref_google.protobuf.Timestamp`", "", "creation timestamp of artifact, autogenerated by service" - - - - - - - -.. _ref_datacatalog.ArtifactData: - -ArtifactData ------------------------------------------------------------------- - -ArtifactData that belongs to an artifact - - - -.. csv-table:: ArtifactData type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "name", ":ref:`ref_string`", "", "" - "value", ":ref:`ref_flyteidl.core.Literal`", "", "" - - - - - - - -.. _ref_datacatalog.ArtifactPropertyFilter: - -ArtifactPropertyFilter ------------------------------------------------------------------- - -Artifact properties we can filter by - - - -.. csv-table:: ArtifactPropertyFilter type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "artifact_id", ":ref:`ref_string`", "", "" - - - - - - - -.. _ref_datacatalog.CreateArtifactRequest: - -CreateArtifactRequest ------------------------------------------------------------------- - -Request message for creating an Artifact and its associated artifact Data. - - - -.. csv-table:: CreateArtifactRequest type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "artifact", ":ref:`ref_datacatalog.Artifact`", "", "" - - - - - - - -.. _ref_datacatalog.CreateArtifactResponse: - -CreateArtifactResponse ------------------------------------------------------------------- - -Response message for creating an Artifact. - - - - - - - - -.. _ref_datacatalog.CreateDatasetRequest: - -CreateDatasetRequest ------------------------------------------------------------------- - -Request message for creating a Dataset. - - - -.. csv-table:: CreateDatasetRequest type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "dataset", ":ref:`ref_datacatalog.Dataset`", "", "" - - - - - - - -.. _ref_datacatalog.CreateDatasetResponse: - -CreateDatasetResponse ------------------------------------------------------------------- - -Response message for creating a Dataset - - - - - - - - -.. _ref_datacatalog.Dataset: - -Dataset ------------------------------------------------------------------- - -Dataset message. It is uniquely identified by DatasetID. - - - -.. csv-table:: Dataset type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "id", ":ref:`ref_datacatalog.DatasetID`", "", "" - "metadata", ":ref:`ref_datacatalog.Metadata`", "", "" - "partitionKeys", ":ref:`ref_string`", "repeated", "" - - - - - - - -.. _ref_datacatalog.DatasetID: - -DatasetID ------------------------------------------------------------------- - -DatasetID message that is composed of several string fields. - - - -.. csv-table:: DatasetID type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "project", ":ref:`ref_string`", "", "The name of the project" - "name", ":ref:`ref_string`", "", "The name of the dataset" - "domain", ":ref:`ref_string`", "", "The domain (eg. environment)" - "version", ":ref:`ref_string`", "", "Version of the data schema" - "UUID", ":ref:`ref_string`", "", "UUID for the dataset (if set the above fields are optional)" - - - - - - - -.. _ref_datacatalog.DatasetPropertyFilter: - -DatasetPropertyFilter ------------------------------------------------------------------- - -Dataset properties we can filter by - - - -.. csv-table:: DatasetPropertyFilter type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "project", ":ref:`ref_string`", "", "" - "name", ":ref:`ref_string`", "", "" - "domain", ":ref:`ref_string`", "", "" - "version", ":ref:`ref_string`", "", "" - - - - - - - -.. _ref_datacatalog.FilterExpression: - -FilterExpression ------------------------------------------------------------------- - -Filter expression that is composed of a combination of single filters - - - -.. csv-table:: FilterExpression type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "filters", ":ref:`ref_datacatalog.SinglePropertyFilter`", "repeated", "" - - - - - - - -.. _ref_datacatalog.GetArtifactRequest: - -GetArtifactRequest ------------------------------------------------------------------- - -Request message for retrieving an Artifact. Retrieve an artifact based on a query handle that -can be one of artifact_id or tag. The result returned will include the artifact data and metadata -associated with the artifact. - - - -.. csv-table:: GetArtifactRequest type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "dataset", ":ref:`ref_datacatalog.DatasetID`", "", "" - "artifact_id", ":ref:`ref_string`", "", "" - "tag_name", ":ref:`ref_string`", "", "" - - - - - - - -.. _ref_datacatalog.GetArtifactResponse: - -GetArtifactResponse ------------------------------------------------------------------- - -Response message for retrieving an Artifact. The result returned will include the artifact data -and metadata associated with the artifact. - - - -.. csv-table:: GetArtifactResponse type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "artifact", ":ref:`ref_datacatalog.Artifact`", "", "" - - - - - - - -.. _ref_datacatalog.GetDatasetRequest: - -GetDatasetRequest ------------------------------------------------------------------- - -Request message for retrieving a Dataset. The Dataset is retrieved by it's unique identifier -which is a combination of several fields. - - - -.. csv-table:: GetDatasetRequest type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "dataset", ":ref:`ref_datacatalog.DatasetID`", "", "" - - - - - - - -.. _ref_datacatalog.GetDatasetResponse: - -GetDatasetResponse ------------------------------------------------------------------- - -Response message for retrieving a Dataset. The response will include the metadata for the -Dataset. - - - -.. csv-table:: GetDatasetResponse type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "dataset", ":ref:`ref_datacatalog.Dataset`", "", "" - - - - - - - -.. _ref_datacatalog.GetOrExtendReservationRequest: - -GetOrExtendReservationRequest ------------------------------------------------------------------- - -Try to acquire or extend an artifact reservation. If an active reservation exists, retrieve that instance. - - - -.. csv-table:: GetOrExtendReservationRequest type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "reservation_id", ":ref:`ref_datacatalog.ReservationID`", "", "" - "owner_id", ":ref:`ref_string`", "", "" - "heartbeat_interval", ":ref:`ref_google.protobuf.Duration`", "", "Requested reservation extension heartbeat interval" - - - - - - - -.. _ref_datacatalog.GetOrExtendReservationResponse: - -GetOrExtendReservationResponse ------------------------------------------------------------------- - -Response including either a newly minted reservation or the existing reservation - - - -.. csv-table:: GetOrExtendReservationResponse type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "reservation", ":ref:`ref_datacatalog.Reservation`", "", "" - - - - - - - -.. _ref_datacatalog.KeyValuePair: - -KeyValuePair ------------------------------------------------------------------- - - - - - -.. csv-table:: KeyValuePair type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "key", ":ref:`ref_string`", "", "" - "value", ":ref:`ref_string`", "", "" - - - - - - - -.. _ref_datacatalog.ListArtifactsRequest: - -ListArtifactsRequest ------------------------------------------------------------------- - -List the artifacts that belong to the Dataset, optionally filtered using filtered expression. - - - -.. csv-table:: ListArtifactsRequest type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "dataset", ":ref:`ref_datacatalog.DatasetID`", "", "Use a datasetID for which you want to retrieve the artifacts" - "filter", ":ref:`ref_datacatalog.FilterExpression`", "", "Apply the filter expression to this query" - "pagination", ":ref:`ref_datacatalog.PaginationOptions`", "", "Pagination options to get a page of artifacts" - - - - - - - -.. _ref_datacatalog.ListArtifactsResponse: - -ListArtifactsResponse ------------------------------------------------------------------- - -Response to list artifacts - - - -.. csv-table:: ListArtifactsResponse type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "artifacts", ":ref:`ref_datacatalog.Artifact`", "repeated", "The list of artifacts" - "next_token", ":ref:`ref_string`", "", "Token to use to request the next page, pass this into the next requests PaginationOptions" - - - - - - - -.. _ref_datacatalog.ListDatasetsRequest: - -ListDatasetsRequest ------------------------------------------------------------------- - -List the datasets for the given query - - - -.. csv-table:: ListDatasetsRequest type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "filter", ":ref:`ref_datacatalog.FilterExpression`", "", "Apply the filter expression to this query" - "pagination", ":ref:`ref_datacatalog.PaginationOptions`", "", "Pagination options to get a page of datasets" - - - - - - - -.. _ref_datacatalog.ListDatasetsResponse: - -ListDatasetsResponse ------------------------------------------------------------------- - -List the datasets response with token for next pagination - - - -.. csv-table:: ListDatasetsResponse type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "datasets", ":ref:`ref_datacatalog.Dataset`", "repeated", "The list of datasets" - "next_token", ":ref:`ref_string`", "", "Token to use to request the next page, pass this into the next requests PaginationOptions" - - - - - - - -.. _ref_datacatalog.Metadata: - -Metadata ------------------------------------------------------------------- - -Metadata representation for artifacts and datasets - - - -.. csv-table:: Metadata type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "key_map", ":ref:`ref_datacatalog.Metadata.KeyMapEntry`", "repeated", "key map is a dictionary of key/val strings that represent metadata" - - - - - - - -.. _ref_datacatalog.Metadata.KeyMapEntry: - -Metadata.KeyMapEntry ------------------------------------------------------------------- - - - - - -.. csv-table:: Metadata.KeyMapEntry type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "key", ":ref:`ref_string`", "", "" - "value", ":ref:`ref_string`", "", "" - - - - - - - -.. _ref_datacatalog.PaginationOptions: - -PaginationOptions ------------------------------------------------------------------- - -Pagination options for making list requests - - - -.. csv-table:: PaginationOptions type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "limit", ":ref:`ref_uint32`", "", "the max number of results to return" - "token", ":ref:`ref_string`", "", "the token to pass to fetch the next page" - "sortKey", ":ref:`ref_datacatalog.PaginationOptions.SortKey`", "", "the property that we want to sort the results by" - "sortOrder", ":ref:`ref_datacatalog.PaginationOptions.SortOrder`", "", "the sort order of the results" - - - - - - - -.. _ref_datacatalog.Partition: - -Partition ------------------------------------------------------------------- - -An artifact could have multiple partitions and each partition can have an arbitrary string key/value pair - - - -.. csv-table:: Partition type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "key", ":ref:`ref_string`", "", "" - "value", ":ref:`ref_string`", "", "" - - - - - - - -.. _ref_datacatalog.PartitionPropertyFilter: - -PartitionPropertyFilter ------------------------------------------------------------------- - -Partition properties we can filter by - - - -.. csv-table:: PartitionPropertyFilter type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "key_val", ":ref:`ref_datacatalog.KeyValuePair`", "", "" - - - - - - - -.. _ref_datacatalog.ReleaseReservationRequest: - -ReleaseReservationRequest ------------------------------------------------------------------- - -Request to release reservation - - - -.. csv-table:: ReleaseReservationRequest type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "reservation_id", ":ref:`ref_datacatalog.ReservationID`", "", "" - "owner_id", ":ref:`ref_string`", "", "" - - - - - - - -.. _ref_datacatalog.ReleaseReservationResponse: - -ReleaseReservationResponse ------------------------------------------------------------------- - -Response to release reservation - - - - - - - - -.. _ref_datacatalog.Reservation: - -Reservation ------------------------------------------------------------------- - -A reservation including owner, heartbeat interval, expiration timestamp, and various metadata. - - - -.. csv-table:: Reservation type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "reservation_id", ":ref:`ref_datacatalog.ReservationID`", "", "" - "owner_id", ":ref:`ref_string`", "", "" - "heartbeat_interval", ":ref:`ref_google.protobuf.Duration`", "", "Recommended heartbeat interval to extend reservation" - "expires_at", ":ref:`ref_google.protobuf.Timestamp`", "", "Expiration timestamp of this reservation" - "metadata", ":ref:`ref_datacatalog.Metadata`", "", "" - - - - - - - -.. _ref_datacatalog.ReservationID: - -ReservationID ------------------------------------------------------------------- - -ReservationID message that is composed of several string fields. - - - -.. csv-table:: ReservationID type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "dataset_id", ":ref:`ref_datacatalog.DatasetID`", "", "" - "tag_name", ":ref:`ref_string`", "", "" - - - - - - - -.. _ref_datacatalog.SinglePropertyFilter: - -SinglePropertyFilter ------------------------------------------------------------------- - -A single property to filter on. - - - -.. csv-table:: SinglePropertyFilter type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "tag_filter", ":ref:`ref_datacatalog.TagPropertyFilter`", "", "" - "partition_filter", ":ref:`ref_datacatalog.PartitionPropertyFilter`", "", "" - "artifact_filter", ":ref:`ref_datacatalog.ArtifactPropertyFilter`", "", "" - "dataset_filter", ":ref:`ref_datacatalog.DatasetPropertyFilter`", "", "" - "operator", ":ref:`ref_datacatalog.SinglePropertyFilter.ComparisonOperator`", "", "field 10 in case we add more entities to query" - - - - - - - -.. _ref_datacatalog.Tag: - -Tag ------------------------------------------------------------------- - -Tag message that is unique to a Dataset. It is associated to a single artifact and -can be retrieved by name later. - - - -.. csv-table:: Tag type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "name", ":ref:`ref_string`", "", "Name of tag" - "artifact_id", ":ref:`ref_string`", "", "The tagged artifact" - "dataset", ":ref:`ref_datacatalog.DatasetID`", "", "The Dataset that this tag belongs to" - - - - - - - -.. _ref_datacatalog.TagPropertyFilter: - -TagPropertyFilter ------------------------------------------------------------------- - -Tag properties we can filter by - - - -.. csv-table:: TagPropertyFilter type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "tag_name", ":ref:`ref_string`", "", "" - - - - - - - -.. _ref_datacatalog.UpdateArtifactRequest: - -UpdateArtifactRequest ------------------------------------------------------------------- - -Request message for updating an Artifact and overwriting its associated ArtifactData. - - - -.. csv-table:: UpdateArtifactRequest type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "dataset", ":ref:`ref_datacatalog.DatasetID`", "", "ID of dataset the artifact is associated with" - "artifact_id", ":ref:`ref_string`", "", "" - "tag_name", ":ref:`ref_string`", "", "" - "data", ":ref:`ref_datacatalog.ArtifactData`", "repeated", "List of data to overwrite stored artifact data with. Must contain ALL data for updated Artifact as any missing ArtifactData entries will be removed from the underlying blob storage and database." - - - - - - - -.. _ref_datacatalog.UpdateArtifactResponse: - -UpdateArtifactResponse ------------------------------------------------------------------- - -Response message for updating an Artifact. - - - -.. csv-table:: UpdateArtifactResponse type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "artifact_id", ":ref:`ref_string`", "", "The unique ID of the artifact updated" - - - - - - -.. - end messages - - - -.. _ref_datacatalog.PaginationOptions.SortKey: - -PaginationOptions.SortKey ------------------------------------------------------------------- - - - -.. csv-table:: Enum PaginationOptions.SortKey values - :header: "Name", "Number", "Description" - :widths: auto - - "CREATION_TIME", "0", "" - - - -.. _ref_datacatalog.PaginationOptions.SortOrder: - -PaginationOptions.SortOrder ------------------------------------------------------------------- - - - -.. csv-table:: Enum PaginationOptions.SortOrder values - :header: "Name", "Number", "Description" - :widths: auto - - "DESCENDING", "0", "" - "ASCENDING", "1", "" - - - -.. _ref_datacatalog.SinglePropertyFilter.ComparisonOperator: - -SinglePropertyFilter.ComparisonOperator ------------------------------------------------------------------- - -as use-cases come up we can add more operators, ex: gte, like, not eq etc. - -.. csv-table:: Enum SinglePropertyFilter.ComparisonOperator values - :header: "Name", "Number", "Description" - :widths: auto - - "EQUALS", "0", "" - - -.. - end enums - - -.. - end HasExtensions - - - -.. _ref_datacatalog.DataCatalog: - -DataCatalog ------------------------------------------------------------------- - -Data Catalog service definition -Data Catalog is a service for indexing parameterized, strongly-typed data artifacts across revisions. -Artifacts are associated with a Dataset, and can be tagged for retrieval. - -.. csv-table:: DataCatalog service methods - :header: "Method Name", "Request Type", "Response Type", "Description" - :widths: auto - - "CreateDataset", ":ref:`ref_datacatalog.CreateDatasetRequest`", ":ref:`ref_datacatalog.CreateDatasetResponse`", "Create a new Dataset. Datasets are unique based on the DatasetID. Datasets are logical groupings of artifacts. Each dataset can have one or more artifacts" - "GetDataset", ":ref:`ref_datacatalog.GetDatasetRequest`", ":ref:`ref_datacatalog.GetDatasetResponse`", "Get a Dataset by the DatasetID. This returns the Dataset with the associated metadata." - "CreateArtifact", ":ref:`ref_datacatalog.CreateArtifactRequest`", ":ref:`ref_datacatalog.CreateArtifactResponse`", "Create an artifact and the artifact data associated with it. An artifact can be a hive partition or arbitrary files or data values" - "GetArtifact", ":ref:`ref_datacatalog.GetArtifactRequest`", ":ref:`ref_datacatalog.GetArtifactResponse`", "Retrieve an artifact by an identifying handle. This returns an artifact along with the artifact data." - "AddTag", ":ref:`ref_datacatalog.AddTagRequest`", ":ref:`ref_datacatalog.AddTagResponse`", "Associate a tag with an artifact. Tags are unique within a Dataset." - "ListArtifacts", ":ref:`ref_datacatalog.ListArtifactsRequest`", ":ref:`ref_datacatalog.ListArtifactsResponse`", "Return a paginated list of artifacts" - "ListDatasets", ":ref:`ref_datacatalog.ListDatasetsRequest`", ":ref:`ref_datacatalog.ListDatasetsResponse`", "Return a paginated list of datasets" - "UpdateArtifact", ":ref:`ref_datacatalog.UpdateArtifactRequest`", ":ref:`ref_datacatalog.UpdateArtifactResponse`", "Updates an existing artifact, overwriting the stored artifact data in the underlying blob storage." - "GetOrExtendReservation", ":ref:`ref_datacatalog.GetOrExtendReservationRequest`", ":ref:`ref_datacatalog.GetOrExtendReservationResponse`", "Attempts to get or extend a reservation for the corresponding artifact. If one already exists (ie. another entity owns the reservation) then that reservation is retrieved. Once you acquire a reservation, you need to periodically extend the reservation with an identical call. If the reservation is not extended before the defined expiration, it may be acquired by another task. Note: We may have multiple concurrent tasks with the same signature and the same input that try to populate the same artifact at the same time. Thus with reservation, only one task can run at a time, until the reservation expires. Note: If task A does not extend the reservation in time and the reservation expires, another task B may take over the reservation, resulting in two tasks A and B running in parallel. So a third task C may get the Artifact from A or B, whichever writes last." - "ReleaseReservation", ":ref:`ref_datacatalog.ReleaseReservationRequest`", ":ref:`ref_datacatalog.ReleaseReservationResponse`", "Release the reservation when the task holding the spot fails so that the other tasks can grab the spot." - -.. - end services - - - - -.. _ref_google/protobuf/timestamp.proto: - -google/protobuf/timestamp.proto -================================================================== - - - - - -.. _ref_google.protobuf.Timestamp: - -Timestamp ------------------------------------------------------------------- - -A Timestamp represents a point in time independent of any time zone or local -calendar, encoded as a count of seconds and fractions of seconds at -nanosecond resolution. The count is relative to an epoch at UTC midnight on -January 1, 1970, in the proleptic Gregorian calendar which extends the -Gregorian calendar backwards to year one. - -All minutes are 60 seconds long. Leap seconds are "smeared" so that no leap -second table is needed for interpretation, using a [24-hour linear -smear](https://developers.google.com/time/smear). - -The range is from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59.999999999Z. By -restricting to that range, we ensure that we can convert to and from [RFC -3339](https://www.ietf.org/rfc/rfc3339.txt) date strings. - -# Examples - -Example 1: Compute Timestamp from POSIX `time()`. - - Timestamp timestamp; - timestamp.set_seconds(time(NULL)); - timestamp.set_nanos(0); - -Example 2: Compute Timestamp from POSIX `gettimeofday()`. - - struct timeval tv; - gettimeofday(&tv, NULL); - - Timestamp timestamp; - timestamp.set_seconds(tv.tv_sec); - timestamp.set_nanos(tv.tv_usec * 1000); - -Example 3: Compute Timestamp from Win32 `GetSystemTimeAsFileTime()`. - - FILETIME ft; - GetSystemTimeAsFileTime(&ft); - UINT64 ticks = (((UINT64)ft.dwHighDateTime) << 32) | ft.dwLowDateTime; - - // A Windows tick is 100 nanoseconds. Windows epoch 1601-01-01T00:00:00Z - // is 11644473600 seconds before Unix epoch 1970-01-01T00:00:00Z. - Timestamp timestamp; - timestamp.set_seconds((INT64) ((ticks / 10000000) - 11644473600LL)); - timestamp.set_nanos((INT32) ((ticks % 10000000) * 100)); - -Example 4: Compute Timestamp from Java `System.currentTimeMillis()`. - - long millis = System.currentTimeMillis(); - - Timestamp timestamp = Timestamp.newBuilder().setSeconds(millis / 1000) - .setNanos((int) ((millis % 1000) * 1000000)).build(); - -Example 5: Compute Timestamp from Java `Instant.now()`. - - Instant now = Instant.now(); - - Timestamp timestamp = - Timestamp.newBuilder().setSeconds(now.getEpochSecond()) - .setNanos(now.getNano()).build(); - -Example 6: Compute Timestamp from current time in Python. - - timestamp = Timestamp() - timestamp.GetCurrentTime() - -# JSON Mapping - -In JSON format, the Timestamp type is encoded as a string in the -[RFC 3339](https://www.ietf.org/rfc/rfc3339.txt) format. That is, the -format is "{year}-{month}-{day}T{hour}:{min}:{sec}[.{frac_sec}]Z" -where {year} is always expressed using four digits while {month}, {day}, -{hour}, {min}, and {sec} are zero-padded to two digits each. The fractional -seconds, which can go up to 9 digits (i.e. up to 1 nanosecond resolution), -are optional. The "Z" suffix indicates the timezone ("UTC"); the timezone -is required. A proto3 JSON serializer should always use UTC (as indicated by -"Z") when printing the Timestamp type and a proto3 JSON parser should be -able to accept both UTC and other timezones (as indicated by an offset). - -For example, "2017-01-15T01:30:15.01Z" encodes 15.01 seconds past -01:30 UTC on January 15, 2017. - -In JavaScript, one can convert a Date object to this format using the -standard -[toISOString()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Date/toISOString) -method. In Python, a standard `datetime.datetime` object can be converted -to this format using -[`strftime`](https://docs.python.org/2/library/time.html#time.strftime) with -the time format spec '%Y-%m-%dT%H:%M:%S.%fZ'. Likewise, in Java, one can use -the Joda Time's [`ISODateTimeFormat.dateTime()`]( -http://www.joda.org/joda-time/apidocs/org/joda/time/format/ISODateTimeFormat.html#dateTime%2D%2D -) to obtain a formatter capable of generating timestamps in this format. - - - -.. csv-table:: Timestamp type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "seconds", ":ref:`ref_int64`", "", "Represents seconds of UTC time since Unix epoch 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59Z inclusive." - "nanos", ":ref:`ref_int32`", "", "Non-negative fractions of a second at nanosecond resolution. Negative second values with fractions must still have non-negative nanos values that count forward in time. Must be from 0 to 999,999,999 inclusive." - - - - - - -.. - end messages - - -.. - end enums - - -.. - end HasExtensions - - -.. - end services - - - - -.. _ref_google/protobuf/duration.proto: - -google/protobuf/duration.proto -================================================================== - - - - - -.. _ref_google.protobuf.Duration: - -Duration ------------------------------------------------------------------- - -A Duration represents a signed, fixed-length span of time represented -as a count of seconds and fractions of seconds at nanosecond -resolution. It is independent of any calendar and concepts like "day" -or "month". It is related to Timestamp in that the difference between -two Timestamp values is a Duration and it can be added or subtracted -from a Timestamp. Range is approximately +-10,000 years. - -# Examples - -Example 1: Compute Duration from two Timestamps in pseudo code. - - Timestamp start = ...; - Timestamp end = ...; - Duration duration = ...; - - duration.seconds = end.seconds - start.seconds; - duration.nanos = end.nanos - start.nanos; - - if (duration.seconds < 0 && duration.nanos > 0) { - duration.seconds += 1; - duration.nanos -= 1000000000; - } else if (duration.seconds > 0 && duration.nanos < 0) { - duration.seconds -= 1; - duration.nanos += 1000000000; - } - -Example 2: Compute Timestamp from Timestamp + Duration in pseudo code. - - Timestamp start = ...; - Duration duration = ...; - Timestamp end = ...; - - end.seconds = start.seconds + duration.seconds; - end.nanos = start.nanos + duration.nanos; - - if (end.nanos < 0) { - end.seconds -= 1; - end.nanos += 1000000000; - } else if (end.nanos >= 1000000000) { - end.seconds += 1; - end.nanos -= 1000000000; - } - -Example 3: Compute Duration from datetime.timedelta in Python. - - td = datetime.timedelta(days=3, minutes=10) - duration = Duration() - duration.FromTimedelta(td) - -# JSON Mapping - -In JSON format, the Duration type is encoded as a string rather than an -object, where the string ends in the suffix "s" (indicating seconds) and -is preceded by the number of seconds, with nanoseconds expressed as -fractional seconds. For example, 3 seconds with 0 nanoseconds should be -encoded in JSON format as "3s", while 3 seconds and 1 nanosecond should -be expressed in JSON format as "3.000000001s", and 3 seconds and 1 -microsecond should be expressed in JSON format as "3.000001s". - - - -.. csv-table:: Duration type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "seconds", ":ref:`ref_int64`", "", "Signed seconds of the span of time. Must be from -315,576,000,000 to +315,576,000,000 inclusive. Note: these bounds are computed from: 60 sec/min * 60 min/hr * 24 hr/day * 365.25 days/year * 10000 years" - "nanos", ":ref:`ref_int32`", "", "Signed fractions of a second at nanosecond resolution of the span of time. Durations less than one second are represented with a 0 `seconds` field and a positive or negative `nanos` field. For durations of one second or more, a non-zero value for the `nanos` field must be of the same sign as the `seconds` field. Must be from -999,999,999 to +999,999,999 inclusive." - - - - - - -.. - end messages - - -.. - end enums - - -.. - end HasExtensions - - -.. - end services - - - - -.. _ref_google/protobuf/struct.proto: - -google/protobuf/struct.proto -================================================================== - - - - - -.. _ref_google.protobuf.ListValue: - -ListValue ------------------------------------------------------------------- - -`ListValue` is a wrapper around a repeated field of values. - -The JSON representation for `ListValue` is JSON array. - - - -.. csv-table:: ListValue type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "values", ":ref:`ref_google.protobuf.Value`", "repeated", "Repeated field of dynamically typed values." - - - - - - - -.. _ref_google.protobuf.Struct: - -Struct ------------------------------------------------------------------- - -`Struct` represents a structured data value, consisting of fields -which map to dynamically typed values. In some languages, `Struct` -might be supported by a native representation. For example, in -scripting languages like JS a struct is represented as an -object. The details of that representation are described together -with the proto support for the language. - -The JSON representation for `Struct` is JSON object. - - - -.. csv-table:: Struct type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "fields", ":ref:`ref_google.protobuf.Struct.FieldsEntry`", "repeated", "Unordered map of dynamically typed values." - - - - - - - -.. _ref_google.protobuf.Struct.FieldsEntry: - -Struct.FieldsEntry ------------------------------------------------------------------- - - - - - -.. csv-table:: Struct.FieldsEntry type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "key", ":ref:`ref_string`", "", "" - "value", ":ref:`ref_google.protobuf.Value`", "", "" - - - - - - - -.. _ref_google.protobuf.Value: - -Value ------------------------------------------------------------------- - -`Value` represents a dynamically typed value which can be either -null, a number, a string, a boolean, a recursive struct value, or a -list of values. A producer of value is expected to set one of these -variants. Absence of any variant indicates an error. - -The JSON representation for `Value` is JSON value. - - - -.. csv-table:: Value type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "null_value", ":ref:`ref_google.protobuf.NullValue`", "", "Represents a null value." - "number_value", ":ref:`ref_double`", "", "Represents a double value." - "string_value", ":ref:`ref_string`", "", "Represents a string value." - "bool_value", ":ref:`ref_bool`", "", "Represents a boolean value." - "struct_value", ":ref:`ref_google.protobuf.Struct`", "", "Represents a structured value." - "list_value", ":ref:`ref_google.protobuf.ListValue`", "", "Represents a repeated `Value`." - - - - - - -.. - end messages - - - -.. _ref_google.protobuf.NullValue: - -NullValue ------------------------------------------------------------------- - -`NullValue` is a singleton enumeration to represent the null value for the -`Value` type union. - - The JSON representation for `NullValue` is JSON `null`. - -.. csv-table:: Enum NullValue values - :header: "Name", "Number", "Description" - :widths: auto - - "NULL_VALUE", "0", "Null value." - - -.. - end enums - - -.. - end HasExtensions - - -.. - end services - - diff --git a/docs/api/flyteidl/docs/datacatalog/index.rst b/docs/api/flyteidl/docs/datacatalog/index.rst deleted file mode 100644 index d64c2ddd9f..0000000000 --- a/docs/api/flyteidl/docs/datacatalog/index.rst +++ /dev/null @@ -1,16 +0,0 @@ -Flyte Data Catalog Service -============================ - -Protos provides the interface definition for the Data Catalog Service. Data Catalog is a service to -index parameterized, strongly-typed data artifacts across revisions. It is used in the Flyte ecosystem -to catalog artifacts generated by the task executions. The output generated by a task can be stored as artifact -data and tagged by the user so as to be retrieved later by that tag. - -`Datacatalog raw proto `__ - -.. toctree:: - :maxdepth: 1 - :caption: datacatalog - :name: datacatalogtoc - - datacatalog diff --git a/docs/api/flyteidl/docs/event/event.rst b/docs/api/flyteidl/docs/event/event.rst deleted file mode 100644 index df0a3b2e8b..0000000000 --- a/docs/api/flyteidl/docs/event/event.rst +++ /dev/null @@ -1,726 +0,0 @@ -###################### -Protocol Documentation -###################### - - - - -.. _ref_flyteidl/event/event.proto: - -flyteidl/event/event.proto -================================================================== - - - - - -.. _ref_flyteidl.event.DynamicWorkflowNodeMetadata: - -DynamicWorkflowNodeMetadata ------------------------------------------------------------------- - -For dynamic workflow nodes we send information about the dynamic workflow definition that gets generated. - - - -.. csv-table:: DynamicWorkflowNodeMetadata type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "id", ":ref:`ref_flyteidl.core.Identifier`", "", "id represents the unique identifier of the workflow." - "compiled_workflow", ":ref:`ref_flyteidl.core.CompiledWorkflowClosure`", "", "Represents the compiled representation of the embedded dynamic workflow." - - - - - - - -.. _ref_flyteidl.event.ExternalResourceInfo: - -ExternalResourceInfo ------------------------------------------------------------------- - -This message contains metadata about external resources produced or used by a specific task execution. - - - -.. csv-table:: ExternalResourceInfo type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "external_id", ":ref:`ref_string`", "", "Identifier for an external resource created by this task execution, for example Qubole query ID or presto query ids." - "index", ":ref:`ref_uint32`", "", "A unique index for the external resource with respect to all external resources for this task. Although the identifier may change between task reporting events or retries, this will remain the same to enable aggregating information from multiple reports." - "retry_attempt", ":ref:`ref_uint32`", "", "Retry attempt number for this external resource, ie., 2 for the second attempt" - "phase", ":ref:`ref_flyteidl.core.TaskExecution.Phase`", "", "Phase associated with the external resource" - "cache_status", ":ref:`ref_flyteidl.core.CatalogCacheStatus`", "", "Captures the status of caching for this external resource execution." - "logs", ":ref:`ref_flyteidl.core.TaskLog`", "repeated", "log information for the external resource execution" - - - - - - - -.. _ref_flyteidl.event.NodeExecutionEvent: - -NodeExecutionEvent ------------------------------------------------------------------- - - - - - -.. csv-table:: NodeExecutionEvent type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "id", ":ref:`ref_flyteidl.core.NodeExecutionIdentifier`", "", "Unique identifier for this node execution" - "producer_id", ":ref:`ref_string`", "", "the id of the originator (Propeller) of the event" - "phase", ":ref:`ref_flyteidl.core.NodeExecution.Phase`", "", "" - "occurred_at", ":ref:`ref_google.protobuf.Timestamp`", "", "This timestamp represents when the original event occurred, it is generated by the executor of the node." - "input_uri", ":ref:`ref_string`", "", "" - "output_uri", ":ref:`ref_string`", "", "URL to the output of the execution, it encodes all the information including Cloud source provider. ie., s3://..." - "error", ":ref:`ref_flyteidl.core.ExecutionError`", "", "Error information for the execution" - "output_data", ":ref:`ref_flyteidl.core.LiteralMap`", "", "Raw output data produced by this node execution." - "workflow_node_metadata", ":ref:`ref_flyteidl.event.WorkflowNodeMetadata`", "", "" - "task_node_metadata", ":ref:`ref_flyteidl.event.TaskNodeMetadata`", "", "" - "parent_task_metadata", ":ref:`ref_flyteidl.event.ParentTaskExecutionMetadata`", "", "[To be deprecated] Specifies which task (if any) launched this node." - "parent_node_metadata", ":ref:`ref_flyteidl.event.ParentNodeExecutionMetadata`", "", "Specifies the parent node of the current node execution. Node executions at level zero will not have a parent node." - "retry_group", ":ref:`ref_string`", "", "Retry group to indicate grouping of nodes by retries" - "spec_node_id", ":ref:`ref_string`", "", "Identifier of the node in the original workflow/graph This maps to value of WorkflowTemplate.nodes[X].id" - "node_name", ":ref:`ref_string`", "", "Friendly readable name for the node" - "event_version", ":ref:`ref_int32`", "", "" - "is_parent", ":ref:`ref_bool`", "", "Whether this node launched a subworkflow." - "is_dynamic", ":ref:`ref_bool`", "", "Whether this node yielded a dynamic workflow." - "deck_uri", ":ref:`ref_string`", "", "String location uniquely identifying where the deck HTML file is NativeUrl specifies the url in the format of the configured storage provider (e.g. s3://my-bucket/randomstring/suffix.tar)" - - - - - - - -.. _ref_flyteidl.event.ParentNodeExecutionMetadata: - -ParentNodeExecutionMetadata ------------------------------------------------------------------- - - - - - -.. csv-table:: ParentNodeExecutionMetadata type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "node_id", ":ref:`ref_string`", "", "Unique identifier of the parent node id within the execution This is value of core.NodeExecutionIdentifier.node_id of the parent node" - - - - - - - -.. _ref_flyteidl.event.ParentTaskExecutionMetadata: - -ParentTaskExecutionMetadata ------------------------------------------------------------------- - - - - - -.. csv-table:: ParentTaskExecutionMetadata type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "id", ":ref:`ref_flyteidl.core.TaskExecutionIdentifier`", "", "" - - - - - - - -.. _ref_flyteidl.event.ResourcePoolInfo: - -ResourcePoolInfo ------------------------------------------------------------------- - -This message holds task execution metadata specific to resource allocation used to manage concurrent -executions for a project namespace. - - - -.. csv-table:: ResourcePoolInfo type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "allocation_token", ":ref:`ref_string`", "", "Unique resource ID used to identify this execution when allocating a token." - "namespace", ":ref:`ref_string`", "", "Namespace under which this task execution requested an allocation token." - - - - - - - -.. _ref_flyteidl.event.TaskExecutionEvent: - -TaskExecutionEvent ------------------------------------------------------------------- - -Plugin specific execution event information. For tasks like Python, Hive, Spark, DynamicJob. - - - -.. csv-table:: TaskExecutionEvent type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "task_id", ":ref:`ref_flyteidl.core.Identifier`", "", "ID of the task. In combination with the retryAttempt this will indicate the task execution uniquely for a given parent node execution." - "parent_node_execution_id", ":ref:`ref_flyteidl.core.NodeExecutionIdentifier`", "", "A task execution is always kicked off by a node execution, the event consumer will use the parent_id to relate the task to it's parent node execution" - "retry_attempt", ":ref:`ref_uint32`", "", "retry attempt number for this task, ie., 2 for the second attempt" - "phase", ":ref:`ref_flyteidl.core.TaskExecution.Phase`", "", "Phase associated with the event" - "producer_id", ":ref:`ref_string`", "", "id of the process that sent this event, mainly for trace debugging" - "logs", ":ref:`ref_flyteidl.core.TaskLog`", "repeated", "log information for the task execution" - "occurred_at", ":ref:`ref_google.protobuf.Timestamp`", "", "This timestamp represents when the original event occurred, it is generated by the executor of the task." - "input_uri", ":ref:`ref_string`", "", "URI of the input file, it encodes all the information including Cloud source provider. ie., s3://..." - "output_uri", ":ref:`ref_string`", "", "URI to the output of the execution, it will be in a format that encodes all the information including Cloud source provider. ie., s3://..." - "error", ":ref:`ref_flyteidl.core.ExecutionError`", "", "Error information for the execution" - "output_data", ":ref:`ref_flyteidl.core.LiteralMap`", "", "Raw output data produced by this task execution." - "custom_info", ":ref:`ref_google.protobuf.Struct`", "", "Custom data that the task plugin sends back. This is extensible to allow various plugins in the system." - "phase_version", ":ref:`ref_uint32`", "", "Some phases, like RUNNING, can send multiple events with changed metadata (new logs, additional custom_info, etc) that should be recorded regardless of the lack of phase change. The version field should be incremented when metadata changes across the duration of an individual phase." - "reason", ":ref:`ref_string`", "", "An optional explanation for the phase transition." - "task_type", ":ref:`ref_string`", "", "A predefined yet extensible Task type identifier. If the task definition is already registered in flyte admin this type will be identical, but not all task executions necessarily use pre-registered definitions and this type is useful to render the task in the UI, filter task executions, etc." - "metadata", ":ref:`ref_flyteidl.event.TaskExecutionMetadata`", "", "Metadata around how a task was executed." - "event_version", ":ref:`ref_int32`", "", "The event version is used to indicate versioned changes in how data is reported using this proto message. For example, event_verison > 0 means that maps tasks report logs using the TaskExecutionMetadata ExternalResourceInfo fields for each subtask rather than the TaskLog in this message." - - - - - - - -.. _ref_flyteidl.event.TaskExecutionMetadata: - -TaskExecutionMetadata ------------------------------------------------------------------- - -Holds metadata around how a task was executed. -As a task transitions across event phases during execution some attributes, such its generated name, generated external resources, -and more may grow in size but not change necessarily based on the phase transition that sparked the event update. -Metadata is a container for these attributes across the task execution lifecycle. - - - -.. csv-table:: TaskExecutionMetadata type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "generated_name", ":ref:`ref_string`", "", "Unique, generated name for this task execution used by the backend." - "external_resources", ":ref:`ref_flyteidl.event.ExternalResourceInfo`", "repeated", "Additional data on external resources on other back-ends or platforms (e.g. Hive, Qubole, etc) launched by this task execution." - "resource_pool_info", ":ref:`ref_flyteidl.event.ResourcePoolInfo`", "repeated", "Includes additional data on concurrent resource management used during execution.. This is a repeated field because a plugin can request multiple resource allocations during execution." - "plugin_identifier", ":ref:`ref_string`", "", "The identifier of the plugin used to execute this task." - "instance_class", ":ref:`ref_flyteidl.event.TaskExecutionMetadata.InstanceClass`", "", "" - - - - - - - -.. _ref_flyteidl.event.TaskNodeMetadata: - -TaskNodeMetadata ------------------------------------------------------------------- - - - - - -.. csv-table:: TaskNodeMetadata type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "cache_status", ":ref:`ref_flyteidl.core.CatalogCacheStatus`", "", "Captures the status of caching for this execution." - "catalog_key", ":ref:`ref_flyteidl.core.CatalogMetadata`", "", "This structure carries the catalog artifact information" - "reservation_status", ":ref:`ref_flyteidl.core.CatalogReservation.Status`", "", "Captures the status of cache reservations for this execution." - "checkpoint_uri", ":ref:`ref_string`", "", "The latest checkpoint location" - "dynamic_workflow", ":ref:`ref_flyteidl.event.DynamicWorkflowNodeMetadata`", "", "In the case this task launched a dynamic workflow we capture its structure here." - - - - - - - -.. _ref_flyteidl.event.WorkflowExecutionEvent: - -WorkflowExecutionEvent ------------------------------------------------------------------- - - - - - -.. csv-table:: WorkflowExecutionEvent type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "execution_id", ":ref:`ref_flyteidl.core.WorkflowExecutionIdentifier`", "", "Workflow execution id" - "producer_id", ":ref:`ref_string`", "", "the id of the originator (Propeller) of the event" - "phase", ":ref:`ref_flyteidl.core.WorkflowExecution.Phase`", "", "" - "occurred_at", ":ref:`ref_google.protobuf.Timestamp`", "", "This timestamp represents when the original event occurred, it is generated by the executor of the workflow." - "output_uri", ":ref:`ref_string`", "", "URL to the output of the execution, it encodes all the information including Cloud source provider. ie., s3://..." - "error", ":ref:`ref_flyteidl.core.ExecutionError`", "", "Error information for the execution" - "output_data", ":ref:`ref_flyteidl.core.LiteralMap`", "", "Raw output data produced by this workflow execution." - - - - - - - -.. _ref_flyteidl.event.WorkflowNodeMetadata: - -WorkflowNodeMetadata ------------------------------------------------------------------- - -For Workflow Nodes we need to send information about the workflow that's launched - - - -.. csv-table:: WorkflowNodeMetadata type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "execution_id", ":ref:`ref_flyteidl.core.WorkflowExecutionIdentifier`", "", "" - - - - - - -.. - end messages - - - -.. _ref_flyteidl.event.TaskExecutionMetadata.InstanceClass: - -TaskExecutionMetadata.InstanceClass ------------------------------------------------------------------- - -Includes the broad category of machine used for this specific task execution. - -.. csv-table:: Enum TaskExecutionMetadata.InstanceClass values - :header: "Name", "Number", "Description" - :widths: auto - - "DEFAULT", "0", "The default instance class configured for the flyte application platform." - "INTERRUPTIBLE", "1", "The instance class configured for interruptible tasks." - - -.. - end enums - - -.. - end HasExtensions - - -.. - end services - - - - -.. _ref_google/protobuf/timestamp.proto: - -google/protobuf/timestamp.proto -================================================================== - - - - - -.. _ref_google.protobuf.Timestamp: - -Timestamp ------------------------------------------------------------------- - -A Timestamp represents a point in time independent of any time zone or local -calendar, encoded as a count of seconds and fractions of seconds at -nanosecond resolution. The count is relative to an epoch at UTC midnight on -January 1, 1970, in the proleptic Gregorian calendar which extends the -Gregorian calendar backwards to year one. - -All minutes are 60 seconds long. Leap seconds are "smeared" so that no leap -second table is needed for interpretation, using a [24-hour linear -smear](https://developers.google.com/time/smear). - -The range is from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59.999999999Z. By -restricting to that range, we ensure that we can convert to and from [RFC -3339](https://www.ietf.org/rfc/rfc3339.txt) date strings. - -# Examples - -Example 1: Compute Timestamp from POSIX `time()`. - - Timestamp timestamp; - timestamp.set_seconds(time(NULL)); - timestamp.set_nanos(0); - -Example 2: Compute Timestamp from POSIX `gettimeofday()`. - - struct timeval tv; - gettimeofday(&tv, NULL); - - Timestamp timestamp; - timestamp.set_seconds(tv.tv_sec); - timestamp.set_nanos(tv.tv_usec * 1000); - -Example 3: Compute Timestamp from Win32 `GetSystemTimeAsFileTime()`. - - FILETIME ft; - GetSystemTimeAsFileTime(&ft); - UINT64 ticks = (((UINT64)ft.dwHighDateTime) << 32) | ft.dwLowDateTime; - - // A Windows tick is 100 nanoseconds. Windows epoch 1601-01-01T00:00:00Z - // is 11644473600 seconds before Unix epoch 1970-01-01T00:00:00Z. - Timestamp timestamp; - timestamp.set_seconds((INT64) ((ticks / 10000000) - 11644473600LL)); - timestamp.set_nanos((INT32) ((ticks % 10000000) * 100)); - -Example 4: Compute Timestamp from Java `System.currentTimeMillis()`. - - long millis = System.currentTimeMillis(); - - Timestamp timestamp = Timestamp.newBuilder().setSeconds(millis / 1000) - .setNanos((int) ((millis % 1000) * 1000000)).build(); - -Example 5: Compute Timestamp from Java `Instant.now()`. - - Instant now = Instant.now(); - - Timestamp timestamp = - Timestamp.newBuilder().setSeconds(now.getEpochSecond()) - .setNanos(now.getNano()).build(); - -Example 6: Compute Timestamp from current time in Python. - - timestamp = Timestamp() - timestamp.GetCurrentTime() - -# JSON Mapping - -In JSON format, the Timestamp type is encoded as a string in the -[RFC 3339](https://www.ietf.org/rfc/rfc3339.txt) format. That is, the -format is "{year}-{month}-{day}T{hour}:{min}:{sec}[.{frac_sec}]Z" -where {year} is always expressed using four digits while {month}, {day}, -{hour}, {min}, and {sec} are zero-padded to two digits each. The fractional -seconds, which can go up to 9 digits (i.e. up to 1 nanosecond resolution), -are optional. The "Z" suffix indicates the timezone ("UTC"); the timezone -is required. A proto3 JSON serializer should always use UTC (as indicated by -"Z") when printing the Timestamp type and a proto3 JSON parser should be -able to accept both UTC and other timezones (as indicated by an offset). - -For example, "2017-01-15T01:30:15.01Z" encodes 15.01 seconds past -01:30 UTC on January 15, 2017. - -In JavaScript, one can convert a Date object to this format using the -standard -[toISOString()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Date/toISOString) -method. In Python, a standard `datetime.datetime` object can be converted -to this format using -[`strftime`](https://docs.python.org/2/library/time.html#time.strftime) with -the time format spec '%Y-%m-%dT%H:%M:%S.%fZ'. Likewise, in Java, one can use -the Joda Time's [`ISODateTimeFormat.dateTime()`]( -http://www.joda.org/joda-time/apidocs/org/joda/time/format/ISODateTimeFormat.html#dateTime%2D%2D -) to obtain a formatter capable of generating timestamps in this format. - - - -.. csv-table:: Timestamp type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "seconds", ":ref:`ref_int64`", "", "Represents seconds of UTC time since Unix epoch 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59Z inclusive." - "nanos", ":ref:`ref_int32`", "", "Non-negative fractions of a second at nanosecond resolution. Negative second values with fractions must still have non-negative nanos values that count forward in time. Must be from 0 to 999,999,999 inclusive." - - - - - - -.. - end messages - - -.. - end enums - - -.. - end HasExtensions - - -.. - end services - - - - -.. _ref_google/protobuf/duration.proto: - -google/protobuf/duration.proto -================================================================== - - - - - -.. _ref_google.protobuf.Duration: - -Duration ------------------------------------------------------------------- - -A Duration represents a signed, fixed-length span of time represented -as a count of seconds and fractions of seconds at nanosecond -resolution. It is independent of any calendar and concepts like "day" -or "month". It is related to Timestamp in that the difference between -two Timestamp values is a Duration and it can be added or subtracted -from a Timestamp. Range is approximately +-10,000 years. - -# Examples - -Example 1: Compute Duration from two Timestamps in pseudo code. - - Timestamp start = ...; - Timestamp end = ...; - Duration duration = ...; - - duration.seconds = end.seconds - start.seconds; - duration.nanos = end.nanos - start.nanos; - - if (duration.seconds < 0 && duration.nanos > 0) { - duration.seconds += 1; - duration.nanos -= 1000000000; - } else if (duration.seconds > 0 && duration.nanos < 0) { - duration.seconds -= 1; - duration.nanos += 1000000000; - } - -Example 2: Compute Timestamp from Timestamp + Duration in pseudo code. - - Timestamp start = ...; - Duration duration = ...; - Timestamp end = ...; - - end.seconds = start.seconds + duration.seconds; - end.nanos = start.nanos + duration.nanos; - - if (end.nanos < 0) { - end.seconds -= 1; - end.nanos += 1000000000; - } else if (end.nanos >= 1000000000) { - end.seconds += 1; - end.nanos -= 1000000000; - } - -Example 3: Compute Duration from datetime.timedelta in Python. - - td = datetime.timedelta(days=3, minutes=10) - duration = Duration() - duration.FromTimedelta(td) - -# JSON Mapping - -In JSON format, the Duration type is encoded as a string rather than an -object, where the string ends in the suffix "s" (indicating seconds) and -is preceded by the number of seconds, with nanoseconds expressed as -fractional seconds. For example, 3 seconds with 0 nanoseconds should be -encoded in JSON format as "3s", while 3 seconds and 1 nanosecond should -be expressed in JSON format as "3.000000001s", and 3 seconds and 1 -microsecond should be expressed in JSON format as "3.000001s". - - - -.. csv-table:: Duration type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "seconds", ":ref:`ref_int64`", "", "Signed seconds of the span of time. Must be from -315,576,000,000 to +315,576,000,000 inclusive. Note: these bounds are computed from: 60 sec/min * 60 min/hr * 24 hr/day * 365.25 days/year * 10000 years" - "nanos", ":ref:`ref_int32`", "", "Signed fractions of a second at nanosecond resolution of the span of time. Durations less than one second are represented with a 0 `seconds` field and a positive or negative `nanos` field. For durations of one second or more, a non-zero value for the `nanos` field must be of the same sign as the `seconds` field. Must be from -999,999,999 to +999,999,999 inclusive." - - - - - - -.. - end messages - - -.. - end enums - - -.. - end HasExtensions - - -.. - end services - - - - -.. _ref_google/protobuf/struct.proto: - -google/protobuf/struct.proto -================================================================== - - - - - -.. _ref_google.protobuf.ListValue: - -ListValue ------------------------------------------------------------------- - -`ListValue` is a wrapper around a repeated field of values. - -The JSON representation for `ListValue` is JSON array. - - - -.. csv-table:: ListValue type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "values", ":ref:`ref_google.protobuf.Value`", "repeated", "Repeated field of dynamically typed values." - - - - - - - -.. _ref_google.protobuf.Struct: - -Struct ------------------------------------------------------------------- - -`Struct` represents a structured data value, consisting of fields -which map to dynamically typed values. In some languages, `Struct` -might be supported by a native representation. For example, in -scripting languages like JS a struct is represented as an -object. The details of that representation are described together -with the proto support for the language. - -The JSON representation for `Struct` is JSON object. - - - -.. csv-table:: Struct type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "fields", ":ref:`ref_google.protobuf.Struct.FieldsEntry`", "repeated", "Unordered map of dynamically typed values." - - - - - - - -.. _ref_google.protobuf.Struct.FieldsEntry: - -Struct.FieldsEntry ------------------------------------------------------------------- - - - - - -.. csv-table:: Struct.FieldsEntry type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "key", ":ref:`ref_string`", "", "" - "value", ":ref:`ref_google.protobuf.Value`", "", "" - - - - - - - -.. _ref_google.protobuf.Value: - -Value ------------------------------------------------------------------- - -`Value` represents a dynamically typed value which can be either -null, a number, a string, a boolean, a recursive struct value, or a -list of values. A producer of value is expected to set one of these -variants. Absence of any variant indicates an error. - -The JSON representation for `Value` is JSON value. - - - -.. csv-table:: Value type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "null_value", ":ref:`ref_google.protobuf.NullValue`", "", "Represents a null value." - "number_value", ":ref:`ref_double`", "", "Represents a double value." - "string_value", ":ref:`ref_string`", "", "Represents a string value." - "bool_value", ":ref:`ref_bool`", "", "Represents a boolean value." - "struct_value", ":ref:`ref_google.protobuf.Struct`", "", "Represents a structured value." - "list_value", ":ref:`ref_google.protobuf.ListValue`", "", "Represents a repeated `Value`." - - - - - - -.. - end messages - - - -.. _ref_google.protobuf.NullValue: - -NullValue ------------------------------------------------------------------- - -`NullValue` is a singleton enumeration to represent the null value for the -`Value` type union. - - The JSON representation for `NullValue` is JSON `null`. - -.. csv-table:: Enum NullValue values - :header: "Name", "Number", "Description" - :widths: auto - - "NULL_VALUE", "0", "Null value." - - -.. - end enums - - -.. - end HasExtensions - - -.. - end services - - diff --git a/docs/api/flyteidl/docs/event/index.rst b/docs/api/flyteidl/docs/event/index.rst deleted file mode 100644 index b2c8abe50c..0000000000 --- a/docs/api/flyteidl/docs/event/index.rst +++ /dev/null @@ -1,27 +0,0 @@ - -############################################## -Flyte Internal and External Eventing interface -############################################## - -This section contains all the protocol buffer definitions for Internal and -External Eventing system. - -Flyte Internal Eventing -======================== - -This is the interface used by the dataplane (execution engine) to communicate with the control plane admin service about the workflow and task progress. - -Flyte External Eventing - Event Egress -======================================= - -This refers to the interface for all the event messages leaving the Flyte -**control plane** and reaching on the configured pubsub channel. - -`Event raw proto `__ - -.. toctree:: - :maxdepth: 1 - :caption: event - :name: eventtoc - - event diff --git a/docs/api/flyteidl/docs/plugins/index.rst b/docs/api/flyteidl/docs/plugins/index.rst deleted file mode 100644 index 90924ae451..0000000000 --- a/docs/api/flyteidl/docs/plugins/index.rst +++ /dev/null @@ -1,14 +0,0 @@ -Flyte Task Plugins -================== - -These protocol buffer specifications provide information about the various Task -Plugins available in the Flyte system. - -`Plugins raw protos `__ - -.. toctree:: - :maxdepth: 1 - :caption: plugins - :name: pluginstoc - - plugins diff --git a/docs/api/flyteidl/docs/plugins/plugins.rst b/docs/api/flyteidl/docs/plugins/plugins.rst deleted file mode 100644 index 995dc7c084..0000000000 --- a/docs/api/flyteidl/docs/plugins/plugins.rst +++ /dev/null @@ -1,780 +0,0 @@ -###################### -Protocol Documentation -###################### - - - - -.. _ref_flyteidl/plugins/array_job.proto: - -flyteidl/plugins/array_job.proto -================================================================== - - - - - -.. _ref_flyteidl.plugins.ArrayJob: - -ArrayJob ------------------------------------------------------------------- - -Describes a job that can process independent pieces of data concurrently. Multiple copies of the runnable component -will be executed concurrently. - - - -.. csv-table:: ArrayJob type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "parallelism", ":ref:`ref_int64`", "", "Defines the minimum number of instances to bring up concurrently at any given point. Note that this is an optimistic restriction and that, due to network partitioning or other failures, the actual number of currently running instances might be more. This has to be a positive number if assigned. Default value is size." - "size", ":ref:`ref_int64`", "", "Defines the number of instances to launch at most. This number should match the size of the input if the job requires processing of all input data. This has to be a positive number. In the case this is not defined, the back-end will determine the size at run-time by reading the inputs." - "min_successes", ":ref:`ref_int64`", "", "An absolute number of the minimum number of successful completions of subtasks. As soon as this criteria is met, the array job will be marked as successful and outputs will be computed. This has to be a non-negative number if assigned. Default value is size (if specified)." - "min_success_ratio", ":ref:`ref_float`", "", "If the array job size is not known beforehand, the min_success_ratio can instead be used to determine when an array job can be marked successful." - - - - - - -.. - end messages - - -.. - end enums - - -.. - end HasExtensions - - -.. - end services - - - - -.. _ref_flyteidl/plugins/dask.proto: - -flyteidl/plugins/dask.proto -================================================================== - - - - - -.. _ref_flyteidl.plugins.DaskCluster: - -DaskCluster ------------------------------------------------------------------- - - - - - -.. csv-table:: DaskCluster type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "image", ":ref:`ref_string`", "", "Optional image to use for the scheduler as well as the default worker group. If unset, will use the default image." - "nWorkers", ":ref:`ref_int32`", "", "Number of workers in the default worker group" - "resources", ":ref:`ref_flyteidl.core.Resources`", "", "Resources assigned to the scheduler as well as all pods of the default worker group. As per https://kubernetes.dask.org/en/latest/kubecluster.html?highlight=limit#best-practices it is advised to only set limits. If requests are not explicitly set, the plugin will make sure to set requests==limits. The plugin sets ` --memory-limit` as well as `--nthreads` for the workers according to the limit." - - - - - - - -.. _ref_flyteidl.plugins.DaskJob: - -DaskJob ------------------------------------------------------------------- - -Custom Proto for Dask Plugin - - - -.. csv-table:: DaskJob type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "namespace", ":ref:`ref_string`", "", "Optional namespace to use for the dask pods. If none is given, the namespace of the Flyte task is used" - "jobPodSpec", ":ref:`ref_flyteidl.plugins.JobPodSpec`", "", "Spec for the job pod" - "cluster", ":ref:`ref_flyteidl.plugins.DaskCluster`", "", "Cluster" - - - - - - - -.. _ref_flyteidl.plugins.JobPodSpec: - -JobPodSpec ------------------------------------------------------------------- - -Specification for the job pod - - - -.. csv-table:: JobPodSpec type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "image", ":ref:`ref_string`", "", "Optional image to use. If unset, will use the default image." - "resources", ":ref:`ref_flyteidl.core.Resources`", "", "Resources assigned to the job pod." - - - - - - -.. - end messages - - -.. - end enums - - -.. - end HasExtensions - - -.. - end services - - - - -.. _ref_flyteidl/plugins/mpi.proto: - -flyteidl/plugins/mpi.proto -================================================================== - - - - - -.. _ref_flyteidl.plugins.DistributedMPITrainingTask: - -DistributedMPITrainingTask ------------------------------------------------------------------- - -MPI operator proposal https://github.com/kubeflow/community/blob/master/proposals/mpi-operator-proposal.md -Custom proto for plugin that enables distributed training using https://github.com/kubeflow/mpi-operator - - - -.. csv-table:: DistributedMPITrainingTask type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "num_workers", ":ref:`ref_int32`", "", "number of worker spawned in the cluster for this job" - "num_launcher_replicas", ":ref:`ref_int32`", "", "number of launcher replicas spawned in the cluster for this job The launcher pod invokes mpirun and communicates with worker pods through MPI." - "slots", ":ref:`ref_int32`", "", "number of slots per worker used in hostfile. The available slots (GPUs) in each pod." - - - - - - -.. - end messages - - -.. - end enums - - -.. - end HasExtensions - - -.. - end services - - - - -.. _ref_flyteidl/plugins/presto.proto: - -flyteidl/plugins/presto.proto -================================================================== - - - - - -.. _ref_flyteidl.plugins.PrestoQuery: - -PrestoQuery ------------------------------------------------------------------- - -This message works with the 'presto' task type in the SDK and is the object that will be in the 'custom' field -of a Presto task's TaskTemplate - - - -.. csv-table:: PrestoQuery type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "routing_group", ":ref:`ref_string`", "", "" - "catalog", ":ref:`ref_string`", "", "" - "schema", ":ref:`ref_string`", "", "" - "statement", ":ref:`ref_string`", "", "" - - - - - - -.. - end messages - - -.. - end enums - - -.. - end HasExtensions - - -.. - end services - - - - -.. _ref_flyteidl/plugins/pytorch.proto: - -flyteidl/plugins/pytorch.proto -================================================================== - - - - - -.. _ref_flyteidl.plugins.DistributedPyTorchTrainingTask: - -DistributedPyTorchTrainingTask ------------------------------------------------------------------- - -Custom proto for plugin that enables distributed training using https://github.com/kubeflow/pytorch-operator - - - -.. csv-table:: DistributedPyTorchTrainingTask type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "workers", ":ref:`ref_int32`", "", "number of worker replicas spawned in the cluster for this job" - - - - - - -.. - end messages - - -.. - end enums - - -.. - end HasExtensions - - -.. - end services - - - - -.. _ref_flyteidl/plugins/qubole.proto: - -flyteidl/plugins/qubole.proto -================================================================== - - - - - -.. _ref_flyteidl.plugins.HiveQuery: - -HiveQuery ------------------------------------------------------------------- - -Defines a query to execute on a hive cluster. - - - -.. csv-table:: HiveQuery type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "query", ":ref:`ref_string`", "", "" - "timeout_sec", ":ref:`ref_uint32`", "", "" - "retryCount", ":ref:`ref_uint32`", "", "" - - - - - - - -.. _ref_flyteidl.plugins.HiveQueryCollection: - -HiveQueryCollection ------------------------------------------------------------------- - -Defines a collection of hive queries. - - - -.. csv-table:: HiveQueryCollection type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "queries", ":ref:`ref_flyteidl.plugins.HiveQuery`", "repeated", "" - - - - - - - -.. _ref_flyteidl.plugins.QuboleHiveJob: - -QuboleHiveJob ------------------------------------------------------------------- - -This message works with the 'hive' task type in the SDK and is the object that will be in the 'custom' field -of a hive task's TaskTemplate - - - -.. csv-table:: QuboleHiveJob type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "cluster_label", ":ref:`ref_string`", "", "" - "query_collection", ":ref:`ref_flyteidl.plugins.HiveQueryCollection`", "", "**Deprecated.** " - "tags", ":ref:`ref_string`", "repeated", "" - "query", ":ref:`ref_flyteidl.plugins.HiveQuery`", "", "" - - - - - - -.. - end messages - - -.. - end enums - - -.. - end HasExtensions - - -.. - end services - - - - -.. _ref_flyteidl/plugins/ray.proto: - -flyteidl/plugins/ray.proto -================================================================== - - - - - -.. _ref_flyteidl.plugins.HeadGroupSpec: - -HeadGroupSpec ------------------------------------------------------------------- - -HeadGroupSpec are the spec for the head pod - - - -.. csv-table:: HeadGroupSpec type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "ray_start_params", ":ref:`ref_flyteidl.plugins.HeadGroupSpec.RayStartParamsEntry`", "repeated", "Optional. RayStartParams are the params of the start command: address, object-store-memory. Refer to https://docs.ray.io/en/latest/ray-core/package-ref.html#ray-start" - - - - - - - -.. _ref_flyteidl.plugins.HeadGroupSpec.RayStartParamsEntry: - -HeadGroupSpec.RayStartParamsEntry ------------------------------------------------------------------- - - - - - -.. csv-table:: HeadGroupSpec.RayStartParamsEntry type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "key", ":ref:`ref_string`", "", "" - "value", ":ref:`ref_string`", "", "" - - - - - - - -.. _ref_flyteidl.plugins.RayCluster: - -RayCluster ------------------------------------------------------------------- - -Define Ray cluster defines the desired state of RayCluster - - - -.. csv-table:: RayCluster type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "head_group_spec", ":ref:`ref_flyteidl.plugins.HeadGroupSpec`", "", "HeadGroupSpecs are the spec for the head pod" - "worker_group_spec", ":ref:`ref_flyteidl.plugins.WorkerGroupSpec`", "repeated", "WorkerGroupSpecs are the specs for the worker pods" - - - - - - - -.. _ref_flyteidl.plugins.RayJob: - -RayJob ------------------------------------------------------------------- - -RayJobSpec defines the desired state of RayJob - - - -.. csv-table:: RayJob type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "ray_cluster", ":ref:`ref_flyteidl.plugins.RayCluster`", "", "RayClusterSpec is the cluster template to run the job" - "runtime_env", ":ref:`ref_string`", "", "runtime_env is base64 encoded. Ray runtime environments: https://docs.ray.io/en/latest/ray-core/handling-dependencies.html#runtime-environments" - - - - - - - -.. _ref_flyteidl.plugins.WorkerGroupSpec: - -WorkerGroupSpec ------------------------------------------------------------------- - -WorkerGroupSpec are the specs for the worker pods - - - -.. csv-table:: WorkerGroupSpec type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "group_name", ":ref:`ref_string`", "", "Required. RayCluster can have multiple worker groups, and it distinguishes them by name" - "replicas", ":ref:`ref_int32`", "", "Required. Desired replicas of the worker group. Defaults to 1." - "min_replicas", ":ref:`ref_int32`", "", "Optional. Min replicas of the worker group. MinReplicas defaults to 1." - "max_replicas", ":ref:`ref_int32`", "", "Optional. Max replicas of the worker group. MaxReplicas defaults to maxInt32" - "ray_start_params", ":ref:`ref_flyteidl.plugins.WorkerGroupSpec.RayStartParamsEntry`", "repeated", "Optional. RayStartParams are the params of the start command: address, object-store-memory. Refer to https://docs.ray.io/en/latest/ray-core/package-ref.html#ray-start" - - - - - - - -.. _ref_flyteidl.plugins.WorkerGroupSpec.RayStartParamsEntry: - -WorkerGroupSpec.RayStartParamsEntry ------------------------------------------------------------------- - - - - - -.. csv-table:: WorkerGroupSpec.RayStartParamsEntry type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "key", ":ref:`ref_string`", "", "" - "value", ":ref:`ref_string`", "", "" - - - - - - -.. - end messages - - -.. - end enums - - -.. - end HasExtensions - - -.. - end services - - - - -.. _ref_flyteidl/plugins/spark.proto: - -flyteidl/plugins/spark.proto -================================================================== - - - - - -.. _ref_flyteidl.plugins.SparkApplication: - -SparkApplication ------------------------------------------------------------------- - - - - - - - - - - -.. _ref_flyteidl.plugins.SparkJob: - -SparkJob ------------------------------------------------------------------- - -Custom Proto for Spark Plugin. - - - -.. csv-table:: SparkJob type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "applicationType", ":ref:`ref_flyteidl.plugins.SparkApplication.Type`", "", "" - "mainApplicationFile", ":ref:`ref_string`", "", "" - "mainClass", ":ref:`ref_string`", "", "" - "sparkConf", ":ref:`ref_flyteidl.plugins.SparkJob.SparkConfEntry`", "repeated", "" - "hadoopConf", ":ref:`ref_flyteidl.plugins.SparkJob.HadoopConfEntry`", "repeated", "" - "executorPath", ":ref:`ref_string`", "", "Executor path for Python jobs." - "databricksConf", ":ref:`ref_string`", "", "databricksConf is base64 encoded string which stores databricks job configuration. Config structure can be found here. https://docs.databricks.com/dev-tools/api/2.0/jobs.html#request-structure The config is automatically encoded by flytekit, and decoded in the propeller." - - - - - - - -.. _ref_flyteidl.plugins.SparkJob.HadoopConfEntry: - -SparkJob.HadoopConfEntry ------------------------------------------------------------------- - - - - - -.. csv-table:: SparkJob.HadoopConfEntry type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "key", ":ref:`ref_string`", "", "" - "value", ":ref:`ref_string`", "", "" - - - - - - - -.. _ref_flyteidl.plugins.SparkJob.SparkConfEntry: - -SparkJob.SparkConfEntry ------------------------------------------------------------------- - - - - - -.. csv-table:: SparkJob.SparkConfEntry type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "key", ":ref:`ref_string`", "", "" - "value", ":ref:`ref_string`", "", "" - - - - - - -.. - end messages - - - -.. _ref_flyteidl.plugins.SparkApplication.Type: - -SparkApplication.Type ------------------------------------------------------------------- - - - -.. csv-table:: Enum SparkApplication.Type values - :header: "Name", "Number", "Description" - :widths: auto - - "PYTHON", "0", "" - "JAVA", "1", "" - "SCALA", "2", "" - "R", "3", "" - - -.. - end enums - - -.. - end HasExtensions - - -.. - end services - - - - -.. _ref_flyteidl/plugins/tensorflow.proto: - -flyteidl/plugins/tensorflow.proto -================================================================== - - - - - -.. _ref_flyteidl.plugins.DistributedTensorflowTrainingTask: - -DistributedTensorflowTrainingTask ------------------------------------------------------------------- - -Custom proto for plugin that enables distributed training using https://github.com/kubeflow/tf-operator - - - -.. csv-table:: DistributedTensorflowTrainingTask type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "workers", ":ref:`ref_int32`", "", "number of worker, ps, chief replicas spawned in the cluster for this job" - "ps_replicas", ":ref:`ref_int32`", "", "PS -> Parameter server" - "chief_replicas", ":ref:`ref_int32`", "", "" - - - - - - -.. - end messages - - -.. - end enums - - -.. - end HasExtensions - - -.. - end services - - - - -.. _ref_flyteidl/plugins/waitable.proto: - -flyteidl/plugins/waitable.proto -================================================================== - - - - - -.. _ref_flyteidl.plugins.Waitable: - -Waitable ------------------------------------------------------------------- - -Represents an Execution that was launched and could be waited on. - - - -.. csv-table:: Waitable type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "wf_exec_id", ":ref:`ref_flyteidl.core.WorkflowExecutionIdentifier`", "", "" - "phase", ":ref:`ref_flyteidl.core.WorkflowExecution.Phase`", "", "" - "workflow_id", ":ref:`ref_string`", "", "" - - - - - - -.. - end messages - - -.. - end enums - - -.. - end HasExtensions - - -.. - end services - - diff --git a/docs/api/flyteidl/docs/restructuredtext.tmpl b/docs/api/flyteidl/docs/restructuredtext.tmpl deleted file mode 100644 index a408a70db0..0000000000 --- a/docs/api/flyteidl/docs/restructuredtext.tmpl +++ /dev/null @@ -1,129 +0,0 @@ -###################### -Protocol Documentation -###################### - -{{range .Files}} -{{$file_name := .Name}} - -.. _ref_{{.Name}}: - -{{.Name}} -================================================================== - -{{.Description}} - -{{range .Messages}} - -.. _ref_{{.FullName}}: - -{{.LongName}} ------------------------------------------------------------------- - -{{.Description}} - -{{if .HasFields}} - -.. csv-table:: {{.LongName}} type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto -{{range .Fields }} - "{{.Name}}", ":ref:`ref_{{.FullType}}`", "{{.Label}}", "{{if (index .Options "deprecated"|default false)}}**Deprecated.** {{end}}{{nobr .Description}}{{if .DefaultValue}} Default: {{.DefaultValue}}{{end}}" -{{- end}} -{{end}} - - -{{if .HasExtensions}} - -.. csv-table:: {{.LongName}} type extensions - :header: "Extension", "Type", "Base", "Number", "Description" - :widths: auto -{{range .Extensions }} - "{{.Name}}", "{{.LongType}}", "{{.ContainingLongType}}", "{{.Number}}", "{{nobr .Description}}{{if .DefaultValue}} Default: {{.DefaultValue}}{{end}}" -{{- end}} -{{end}} - -{{end}} -.. - end messages - -{{range .Enums}} - -.. _ref_{{.FullName}}: - -{{.LongName}} ------------------------------------------------------------------- - -{{.Description}} - -.. csv-table:: Enum {{.LongName}} values - :header: "Name", "Number", "Description" - :widths: auto -{{range .Values }} - "{{.Name}}", "{{.Number}}", "{{nobr .Description}}" -{{- end}} - -{{end}} -.. - end enums - -{{if .HasExtensions}} - -.. _ref_{{$file_name}}_extensions: - -File-level Extensions --------------------------------------------------------------------------------- - -.. csv-table:: {{.Name}} file-level Extensions - :header: "Extension", "Type", "Base", "Number", "Description" - :widths: auto -{{range .Extensions}} - "{{.Name}}", "{{.LongType}}", "{{.ContainingLongType}}", "{{.Number}}", "{{nobr .Description}}{{if .DefaultValue}} Default: `{{.DefaultValue}}`{{end}}" -{{- end}} -{{end}} -.. - end HasExtensions - -{{range .Services}} - -.. _ref_{{.FullName}}: - -{{.Name}} ------------------------------------------------------------------- - -{{.Description}} - -.. csv-table:: {{.Name}} service methods - :header: "Method Name", "Request Type", "Response Type", "Description" - :widths: auto -{{range .Methods}} - "{{.Name}}", ":ref:`ref_{{.RequestFullType}}`{{if .RequestStreaming}} stream{{end}}", ":ref:`ref_{{.ResponseFullType}}`{{if .ResponseStreaming}} stream{{end}}", "{{nobr .Description}}" -{{- end}} -{{end}} -.. - end services - -{{end}} - -.. _ref_scala_types: - -Scalar Value Types -================== - -{{range .Scalars}} - -.. _ref_{{.ProtoType}}: - -{{.ProtoType}} ------------------------------ - -{{.Notes}} - -.. csv-table:: {{.ProtoType}} language representation - :header: ".proto Type", "C++", "Java", "Python", "Go", "C#", "PHP", "Ruby" - :widths: auto - - "{{.ProtoType}}", "{{.CppType}}", "{{.JavaType}}", "{{.PythonType}}", "{{.GoType}}", "{{.CSharp}}", "{{.PhpType}}", "{{.RubyType}}" - -{{end}} -.. - end scalars \ No newline at end of file diff --git a/docs/api/flyteidl/docs/service/index.rst b/docs/api/flyteidl/docs/service/index.rst deleted file mode 100644 index 6c5bebe6b2..0000000000 --- a/docs/api/flyteidl/docs/service/index.rst +++ /dev/null @@ -1,13 +0,0 @@ -REST and gRPC interface for the Flyte Admin Service -=================================================== - -This section provides all endpoint definitions that are implemented by the Admin service. - -`Admin service raw protos `__ - -.. toctree:: - :maxdepth: 1 - :caption: service - :name: servicetoc - - service diff --git a/docs/api/flyteidl/docs/service/service.rst b/docs/api/flyteidl/docs/service/service.rst deleted file mode 100644 index 3ca8ff500c..0000000000 --- a/docs/api/flyteidl/docs/service/service.rst +++ /dev/null @@ -1,543 +0,0 @@ -###################### -Protocol Documentation -###################### - - - - -.. _ref_flyteidl/service/admin.proto: - -flyteidl/service/admin.proto -================================================================== - - - - -.. - end messages - - -.. - end enums - - -.. - end HasExtensions - - - -.. _ref_flyteidl.service.AdminService: - -AdminService ------------------------------------------------------------------- - -The following defines an RPC service that is also served over HTTP via grpc-gateway. -Standard response codes for both are defined here: https://github.com/grpc-ecosystem/grpc-gateway/blob/master/runtime/errors.go - -.. csv-table:: AdminService service methods - :header: "Method Name", "Request Type", "Response Type", "Description" - :widths: auto - - "CreateTask", ":ref:`ref_flyteidl.admin.TaskCreateRequest`", ":ref:`ref_flyteidl.admin.TaskCreateResponse`", "Create and upload a :ref:`ref_flyteidl.admin.Task` definition" - "GetTask", ":ref:`ref_flyteidl.admin.ObjectGetRequest`", ":ref:`ref_flyteidl.admin.Task`", "Fetch a :ref:`ref_flyteidl.admin.Task` definition." - "ListTaskIds", ":ref:`ref_flyteidl.admin.NamedEntityIdentifierListRequest`", ":ref:`ref_flyteidl.admin.NamedEntityIdentifierList`", "Fetch a list of :ref:`ref_flyteidl.admin.NamedEntityIdentifier` of task objects." - "ListTasks", ":ref:`ref_flyteidl.admin.ResourceListRequest`", ":ref:`ref_flyteidl.admin.TaskList`", "Fetch a list of :ref:`ref_flyteidl.admin.Task` definitions." - "CreateWorkflow", ":ref:`ref_flyteidl.admin.WorkflowCreateRequest`", ":ref:`ref_flyteidl.admin.WorkflowCreateResponse`", "Create and upload a :ref:`ref_flyteidl.admin.Workflow` definition" - "GetWorkflow", ":ref:`ref_flyteidl.admin.ObjectGetRequest`", ":ref:`ref_flyteidl.admin.Workflow`", "Fetch a :ref:`ref_flyteidl.admin.Workflow` definition." - "ListWorkflowIds", ":ref:`ref_flyteidl.admin.NamedEntityIdentifierListRequest`", ":ref:`ref_flyteidl.admin.NamedEntityIdentifierList`", "Fetch a list of :ref:`ref_flyteidl.admin.NamedEntityIdentifier` of workflow objects." - "ListWorkflows", ":ref:`ref_flyteidl.admin.ResourceListRequest`", ":ref:`ref_flyteidl.admin.WorkflowList`", "Fetch a list of :ref:`ref_flyteidl.admin.Workflow` definitions." - "CreateLaunchPlan", ":ref:`ref_flyteidl.admin.LaunchPlanCreateRequest`", ":ref:`ref_flyteidl.admin.LaunchPlanCreateResponse`", "Create and upload a :ref:`ref_flyteidl.admin.LaunchPlan` definition" - "GetLaunchPlan", ":ref:`ref_flyteidl.admin.ObjectGetRequest`", ":ref:`ref_flyteidl.admin.LaunchPlan`", "Fetch a :ref:`ref_flyteidl.admin.LaunchPlan` definition." - "GetActiveLaunchPlan", ":ref:`ref_flyteidl.admin.ActiveLaunchPlanRequest`", ":ref:`ref_flyteidl.admin.LaunchPlan`", "Fetch the active version of a :ref:`ref_flyteidl.admin.LaunchPlan`." - "ListActiveLaunchPlans", ":ref:`ref_flyteidl.admin.ActiveLaunchPlanListRequest`", ":ref:`ref_flyteidl.admin.LaunchPlanList`", "List active versions of :ref:`ref_flyteidl.admin.LaunchPlan`." - "ListLaunchPlanIds", ":ref:`ref_flyteidl.admin.NamedEntityIdentifierListRequest`", ":ref:`ref_flyteidl.admin.NamedEntityIdentifierList`", "Fetch a list of :ref:`ref_flyteidl.admin.NamedEntityIdentifier` of launch plan objects." - "ListLaunchPlans", ":ref:`ref_flyteidl.admin.ResourceListRequest`", ":ref:`ref_flyteidl.admin.LaunchPlanList`", "Fetch a list of :ref:`ref_flyteidl.admin.LaunchPlan` definitions." - "UpdateLaunchPlan", ":ref:`ref_flyteidl.admin.LaunchPlanUpdateRequest`", ":ref:`ref_flyteidl.admin.LaunchPlanUpdateResponse`", "Updates the status of a registered :ref:`ref_flyteidl.admin.LaunchPlan`." - "CreateExecution", ":ref:`ref_flyteidl.admin.ExecutionCreateRequest`", ":ref:`ref_flyteidl.admin.ExecutionCreateResponse`", "Triggers the creation of a :ref:`ref_flyteidl.admin.Execution`" - "RelaunchExecution", ":ref:`ref_flyteidl.admin.ExecutionRelaunchRequest`", ":ref:`ref_flyteidl.admin.ExecutionCreateResponse`", "Triggers the creation of an identical :ref:`ref_flyteidl.admin.Execution`" - "RecoverExecution", ":ref:`ref_flyteidl.admin.ExecutionRecoverRequest`", ":ref:`ref_flyteidl.admin.ExecutionCreateResponse`", "Recreates a previously-run workflow execution that will only start executing from the last known failure point. In Recover mode, users cannot change any input parameters or update the version of the execution. This is extremely useful to recover from system errors and byzantine faults like - Loss of K8s cluster, bugs in platform or instability, machine failures, downstream system failures (downstream services), or simply to recover executions that failed because of retry exhaustion and should complete if tried again. See :ref:`ref_flyteidl.admin.ExecutionRecoverRequest` for more details." - "GetExecution", ":ref:`ref_flyteidl.admin.WorkflowExecutionGetRequest`", ":ref:`ref_flyteidl.admin.Execution`", "Fetches a :ref:`ref_flyteidl.admin.Execution`." - "UpdateExecution", ":ref:`ref_flyteidl.admin.ExecutionUpdateRequest`", ":ref:`ref_flyteidl.admin.ExecutionUpdateResponse`", "Update execution belonging to project domain :ref:`ref_flyteidl.admin.Execution`." - "GetExecutionData", ":ref:`ref_flyteidl.admin.WorkflowExecutionGetDataRequest`", ":ref:`ref_flyteidl.admin.WorkflowExecutionGetDataResponse`", "Fetches input and output data for a :ref:`ref_flyteidl.admin.Execution`." - "ListExecutions", ":ref:`ref_flyteidl.admin.ResourceListRequest`", ":ref:`ref_flyteidl.admin.ExecutionList`", "Fetch a list of :ref:`ref_flyteidl.admin.Execution`." - "TerminateExecution", ":ref:`ref_flyteidl.admin.ExecutionTerminateRequest`", ":ref:`ref_flyteidl.admin.ExecutionTerminateResponse`", "Terminates an in-progress :ref:`ref_flyteidl.admin.Execution`." - "GetNodeExecution", ":ref:`ref_flyteidl.admin.NodeExecutionGetRequest`", ":ref:`ref_flyteidl.admin.NodeExecution`", "Fetches a :ref:`ref_flyteidl.admin.NodeExecution`." - "ListNodeExecutions", ":ref:`ref_flyteidl.admin.NodeExecutionListRequest`", ":ref:`ref_flyteidl.admin.NodeExecutionList`", "Fetch a list of :ref:`ref_flyteidl.admin.NodeExecution`." - "ListNodeExecutionsForTask", ":ref:`ref_flyteidl.admin.NodeExecutionForTaskListRequest`", ":ref:`ref_flyteidl.admin.NodeExecutionList`", "Fetch a list of :ref:`ref_flyteidl.admin.NodeExecution` launched by the reference :ref:`ref_flyteidl.admin.TaskExecution`." - "GetNodeExecutionData", ":ref:`ref_flyteidl.admin.NodeExecutionGetDataRequest`", ":ref:`ref_flyteidl.admin.NodeExecutionGetDataResponse`", "Fetches input and output data for a :ref:`ref_flyteidl.admin.NodeExecution`." - "RegisterProject", ":ref:`ref_flyteidl.admin.ProjectRegisterRequest`", ":ref:`ref_flyteidl.admin.ProjectRegisterResponse`", "Registers a :ref:`ref_flyteidl.admin.Project` with the Flyte deployment." - "UpdateProject", ":ref:`ref_flyteidl.admin.Project`", ":ref:`ref_flyteidl.admin.ProjectUpdateResponse`", "Updates an existing :ref:`ref_flyteidl.admin.Project` flyteidl.admin.Project should be passed but the domains property should be empty; it will be ignored in the handler as domains cannot be updated via this API." - "ListProjects", ":ref:`ref_flyteidl.admin.ProjectListRequest`", ":ref:`ref_flyteidl.admin.Projects`", "Fetches a list of :ref:`ref_flyteidl.admin.Project`" - "CreateWorkflowEvent", ":ref:`ref_flyteidl.admin.WorkflowExecutionEventRequest`", ":ref:`ref_flyteidl.admin.WorkflowExecutionEventResponse`", "Indicates a :ref:`ref_flyteidl.event.WorkflowExecutionEvent` has occurred." - "CreateNodeEvent", ":ref:`ref_flyteidl.admin.NodeExecutionEventRequest`", ":ref:`ref_flyteidl.admin.NodeExecutionEventResponse`", "Indicates a :ref:`ref_flyteidl.event.NodeExecutionEvent` has occurred." - "CreateTaskEvent", ":ref:`ref_flyteidl.admin.TaskExecutionEventRequest`", ":ref:`ref_flyteidl.admin.TaskExecutionEventResponse`", "Indicates a :ref:`ref_flyteidl.event.TaskExecutionEvent` has occurred." - "GetTaskExecution", ":ref:`ref_flyteidl.admin.TaskExecutionGetRequest`", ":ref:`ref_flyteidl.admin.TaskExecution`", "Fetches a :ref:`ref_flyteidl.admin.TaskExecution`." - "ListTaskExecutions", ":ref:`ref_flyteidl.admin.TaskExecutionListRequest`", ":ref:`ref_flyteidl.admin.TaskExecutionList`", "Fetches a list of :ref:`ref_flyteidl.admin.TaskExecution`." - "GetTaskExecutionData", ":ref:`ref_flyteidl.admin.TaskExecutionGetDataRequest`", ":ref:`ref_flyteidl.admin.TaskExecutionGetDataResponse`", "Fetches input and output data for a :ref:`ref_flyteidl.admin.TaskExecution`." - "UpdateProjectDomainAttributes", ":ref:`ref_flyteidl.admin.ProjectDomainAttributesUpdateRequest`", ":ref:`ref_flyteidl.admin.ProjectDomainAttributesUpdateResponse`", "Creates or updates custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project and domain." - "GetProjectDomainAttributes", ":ref:`ref_flyteidl.admin.ProjectDomainAttributesGetRequest`", ":ref:`ref_flyteidl.admin.ProjectDomainAttributesGetResponse`", "Fetches custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project and domain." - "DeleteProjectDomainAttributes", ":ref:`ref_flyteidl.admin.ProjectDomainAttributesDeleteRequest`", ":ref:`ref_flyteidl.admin.ProjectDomainAttributesDeleteResponse`", "Deletes custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project and domain." - "UpdateProjectAttributes", ":ref:`ref_flyteidl.admin.ProjectAttributesUpdateRequest`", ":ref:`ref_flyteidl.admin.ProjectAttributesUpdateResponse`", "Creates or updates custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` at the project level" - "GetProjectAttributes", ":ref:`ref_flyteidl.admin.ProjectAttributesGetRequest`", ":ref:`ref_flyteidl.admin.ProjectAttributesGetResponse`", "Fetches custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project and domain." - "DeleteProjectAttributes", ":ref:`ref_flyteidl.admin.ProjectAttributesDeleteRequest`", ":ref:`ref_flyteidl.admin.ProjectAttributesDeleteResponse`", "Deletes custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project and domain." - "UpdateWorkflowAttributes", ":ref:`ref_flyteidl.admin.WorkflowAttributesUpdateRequest`", ":ref:`ref_flyteidl.admin.WorkflowAttributesUpdateResponse`", "Creates or updates custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project, domain and workflow." - "GetWorkflowAttributes", ":ref:`ref_flyteidl.admin.WorkflowAttributesGetRequest`", ":ref:`ref_flyteidl.admin.WorkflowAttributesGetResponse`", "Fetches custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project, domain and workflow." - "DeleteWorkflowAttributes", ":ref:`ref_flyteidl.admin.WorkflowAttributesDeleteRequest`", ":ref:`ref_flyteidl.admin.WorkflowAttributesDeleteResponse`", "Deletes custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project, domain and workflow." - "ListMatchableAttributes", ":ref:`ref_flyteidl.admin.ListMatchableAttributesRequest`", ":ref:`ref_flyteidl.admin.ListMatchableAttributesResponse`", "Lists custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a specific resource type." - "ListNamedEntities", ":ref:`ref_flyteidl.admin.NamedEntityListRequest`", ":ref:`ref_flyteidl.admin.NamedEntityList`", "Returns a list of :ref:`ref_flyteidl.admin.NamedEntity` objects." - "GetNamedEntity", ":ref:`ref_flyteidl.admin.NamedEntityGetRequest`", ":ref:`ref_flyteidl.admin.NamedEntity`", "Returns a :ref:`ref_flyteidl.admin.NamedEntity` object." - "UpdateNamedEntity", ":ref:`ref_flyteidl.admin.NamedEntityUpdateRequest`", ":ref:`ref_flyteidl.admin.NamedEntityUpdateResponse`", "Updates a :ref:`ref_flyteidl.admin.NamedEntity` object." - "GetVersion", ":ref:`ref_flyteidl.admin.GetVersionRequest`", ":ref:`ref_flyteidl.admin.GetVersionResponse`", "" - "GetDescriptionEntity", ":ref:`ref_flyteidl.admin.ObjectGetRequest`", ":ref:`ref_flyteidl.admin.DescriptionEntity`", "Fetch a :ref:`ref_flyteidl.admin.DescriptionEntity` object." - "ListDescriptionEntities", ":ref:`ref_flyteidl.admin.DescriptionEntityListRequest`", ":ref:`ref_flyteidl.admin.DescriptionEntityList`", "Fetch a list of :ref:`ref_flyteidl.admin.DescriptionEntity` definitions." - -.. - end services - - - - -.. _ref_flyteidl/service/auth.proto: - -flyteidl/service/auth.proto -================================================================== - - - - - -.. _ref_flyteidl.service.OAuth2MetadataRequest: - -OAuth2MetadataRequest ------------------------------------------------------------------- - - - - - - - - - - -.. _ref_flyteidl.service.OAuth2MetadataResponse: - -OAuth2MetadataResponse ------------------------------------------------------------------- - -OAuth2MetadataResponse defines an RFC-Compliant response for /.well-known/oauth-authorization-server metadata -as defined in https://tools.ietf.org/html/rfc8414 - - - -.. csv-table:: OAuth2MetadataResponse type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "issuer", ":ref:`ref_string`", "", "Defines the issuer string in all JWT tokens this server issues. The issuer can be admin itself or an external issuer." - "authorization_endpoint", ":ref:`ref_string`", "", "URL of the authorization server's authorization endpoint [RFC6749]. This is REQUIRED unless no grant types are supported that use the authorization endpoint." - "token_endpoint", ":ref:`ref_string`", "", "URL of the authorization server's token endpoint [RFC6749]." - "response_types_supported", ":ref:`ref_string`", "repeated", "Array containing a list of the OAuth 2.0 response_type values that this authorization server supports." - "scopes_supported", ":ref:`ref_string`", "repeated", "JSON array containing a list of the OAuth 2.0 [RFC6749] scope values that this authorization server supports." - "token_endpoint_auth_methods_supported", ":ref:`ref_string`", "repeated", "JSON array containing a list of client authentication methods supported by this token endpoint." - "jwks_uri", ":ref:`ref_string`", "", "URL of the authorization server's JWK Set [JWK] document. The referenced document contains the signing key(s) the client uses to validate signatures from the authorization server." - "code_challenge_methods_supported", ":ref:`ref_string`", "repeated", "JSON array containing a list of Proof Key for Code Exchange (PKCE) [RFC7636] code challenge methods supported by this authorization server." - "grant_types_supported", ":ref:`ref_string`", "repeated", "JSON array containing a list of the OAuth 2.0 grant type values that this authorization server supports." - "device_authorization_endpoint", ":ref:`ref_string`", "", "URL of the authorization server's device authorization endpoint, as defined in Section 3.1 of [RFC8628]" - - - - - - - -.. _ref_flyteidl.service.PublicClientAuthConfigRequest: - -PublicClientAuthConfigRequest ------------------------------------------------------------------- - - - - - - - - - - -.. _ref_flyteidl.service.PublicClientAuthConfigResponse: - -PublicClientAuthConfigResponse ------------------------------------------------------------------- - -FlyteClientResponse encapsulates public information that flyte clients (CLIs... etc.) can use to authenticate users. - - - -.. csv-table:: PublicClientAuthConfigResponse type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "client_id", ":ref:`ref_string`", "", "client_id to use when initiating OAuth2 authorization requests." - "redirect_uri", ":ref:`ref_string`", "", "redirect uri to use when initiating OAuth2 authorization requests." - "scopes", ":ref:`ref_string`", "repeated", "scopes to request when initiating OAuth2 authorization requests." - "authorization_metadata_key", ":ref:`ref_string`", "", "Authorization Header to use when passing Access Tokens to the server. If not provided, the client should use the default http `Authorization` header." - "service_http_endpoint", ":ref:`ref_string`", "", "ServiceHttpEndpoint points to the http endpoint for the backend. If empty, clients can assume the endpoint used to configure the gRPC connection can be used for the http one respecting the insecure flag to choose between SSL or no SSL connections." - "audience", ":ref:`ref_string`", "", "audience to use when initiating OAuth2 authorization requests." - - - - - - -.. - end messages - - -.. - end enums - - -.. - end HasExtensions - - - -.. _ref_flyteidl.service.AuthMetadataService: - -AuthMetadataService ------------------------------------------------------------------- - -The following defines an RPC service that is also served over HTTP via grpc-gateway. -Standard response codes for both are defined here: https://github.com/grpc-ecosystem/grpc-gateway/blob/master/runtime/errors.go -RPCs defined in this service must be anonymously accessible. - -.. csv-table:: AuthMetadataService service methods - :header: "Method Name", "Request Type", "Response Type", "Description" - :widths: auto - - "GetOAuth2Metadata", ":ref:`ref_flyteidl.service.OAuth2MetadataRequest`", ":ref:`ref_flyteidl.service.OAuth2MetadataResponse`", "Anonymously accessible. Retrieves local or external oauth authorization server metadata." - "GetPublicClientConfig", ":ref:`ref_flyteidl.service.PublicClientAuthConfigRequest`", ":ref:`ref_flyteidl.service.PublicClientAuthConfigResponse`", "Anonymously accessible. Retrieves the client information clients should use when initiating OAuth2 authorization requests." - -.. - end services - - - - -.. _ref_flyteidl/service/dataproxy.proto: - -flyteidl/service/dataproxy.proto -================================================================== - - - - - -.. _ref_flyteidl.service.CreateDownloadLinkRequest: - -CreateDownloadLinkRequest ------------------------------------------------------------------- - -CreateDownloadLinkRequest defines the request parameters to create a download link (signed url) - - - -.. csv-table:: CreateDownloadLinkRequest type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "artifact_type", ":ref:`ref_flyteidl.service.ArtifactType`", "", "ArtifactType of the artifact requested." - "expires_in", ":ref:`ref_google.protobuf.Duration`", "", "ExpiresIn defines a requested expiration duration for the generated url. The request will be rejected if this exceeds the platform allowed max. +optional. The default value comes from a global config." - "node_execution_id", ":ref:`ref_flyteidl.core.NodeExecutionIdentifier`", "", "NodeId is the unique identifier for the node execution. For a task node, this will retrieve the output of the most recent attempt of the task." - - - - - - - -.. _ref_flyteidl.service.CreateDownloadLinkResponse: - -CreateDownloadLinkResponse ------------------------------------------------------------------- - -CreateDownloadLinkResponse defines the response for the generated links - - - -.. csv-table:: CreateDownloadLinkResponse type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "signed_url", ":ref:`ref_string`", "repeated", "SignedUrl specifies the url to use to download content from (e.g. https://my-bucket.s3.amazonaws.com/randomstring/suffix.tar?X-...)" - "expires_at", ":ref:`ref_google.protobuf.Timestamp`", "", "ExpiresAt defines when will the signed URL expire." - - - - - - - -.. _ref_flyteidl.service.CreateDownloadLocationRequest: - -CreateDownloadLocationRequest ------------------------------------------------------------------- - -CreateDownloadLocationRequest specified request for the CreateDownloadLocation API. - - - -.. csv-table:: CreateDownloadLocationRequest type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "native_url", ":ref:`ref_string`", "", "NativeUrl specifies the url in the format of the configured storage provider (e.g. s3://my-bucket/randomstring/suffix.tar)" - "expires_in", ":ref:`ref_google.protobuf.Duration`", "", "ExpiresIn defines a requested expiration duration for the generated url. The request will be rejected if this exceeds the platform allowed max. +optional. The default value comes from a global config." - - - - - - - -.. _ref_flyteidl.service.CreateDownloadLocationResponse: - -CreateDownloadLocationResponse ------------------------------------------------------------------- - - - - - -.. csv-table:: CreateDownloadLocationResponse type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "signed_url", ":ref:`ref_string`", "", "SignedUrl specifies the url to use to download content from (e.g. https://my-bucket.s3.amazonaws.com/randomstring/suffix.tar?X-...)" - "expires_at", ":ref:`ref_google.protobuf.Timestamp`", "", "ExpiresAt defines when will the signed URL expires." - - - - - - - -.. _ref_flyteidl.service.CreateUploadLocationRequest: - -CreateUploadLocationRequest ------------------------------------------------------------------- - -CreateUploadLocationRequest specified request for the CreateUploadLocation API. - - - -.. csv-table:: CreateUploadLocationRequest type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "project", ":ref:`ref_string`", "", "Project to create the upload location for +required" - "domain", ":ref:`ref_string`", "", "Domain to create the upload location for. +required" - "filename", ":ref:`ref_string`", "", "Filename specifies a desired suffix for the generated location. E.g. `file.py` or `pre/fix/file.zip`. +optional. By default, the service will generate a consistent name based on the provided parameters." - "expires_in", ":ref:`ref_google.protobuf.Duration`", "", "ExpiresIn defines a requested expiration duration for the generated url. The request will be rejected if this exceeds the platform allowed max. +optional. The default value comes from a global config." - "content_md5", ":ref:`ref_bytes`", "", "ContentMD5 restricts the upload location to the specific MD5 provided. The ContentMD5 will also appear in the generated path. +required" - - - - - - - -.. _ref_flyteidl.service.CreateUploadLocationResponse: - -CreateUploadLocationResponse ------------------------------------------------------------------- - - - - - -.. csv-table:: CreateUploadLocationResponse type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "signed_url", ":ref:`ref_string`", "", "SignedUrl specifies the url to use to upload content to (e.g. https://my-bucket.s3.amazonaws.com/randomstring/suffix.tar?X-...)" - "native_url", ":ref:`ref_string`", "", "NativeUrl specifies the url in the format of the configured storage provider (e.g. s3://my-bucket/randomstring/suffix.tar)" - "expires_at", ":ref:`ref_google.protobuf.Timestamp`", "", "ExpiresAt defines when will the signed URL expires." - - - - - - -.. - end messages - - - -.. _ref_flyteidl.service.ArtifactType: - -ArtifactType ------------------------------------------------------------------- - -ArtifactType - -.. csv-table:: Enum ArtifactType values - :header: "Name", "Number", "Description" - :widths: auto - - "ARTIFACT_TYPE_UNDEFINED", "0", "ARTIFACT_TYPE_UNDEFINED is the default, often invalid, value for the enum." - "ARTIFACT_TYPE_DECK", "1", "ARTIFACT_TYPE_DECK refers to the deck html file optionally generated after a task, a workflow or a launch plan finishes executing." - - -.. - end enums - - -.. - end HasExtensions - - - -.. _ref_flyteidl.service.DataProxyService: - -DataProxyService ------------------------------------------------------------------- - -DataProxyService defines an RPC Service that allows access to user-data in a controlled manner. - -.. csv-table:: DataProxyService service methods - :header: "Method Name", "Request Type", "Response Type", "Description" - :widths: auto - - "CreateUploadLocation", ":ref:`ref_flyteidl.service.CreateUploadLocationRequest`", ":ref:`ref_flyteidl.service.CreateUploadLocationResponse`", "CreateUploadLocation creates a signed url to upload artifacts to for a given project/domain." - "CreateDownloadLocation", ":ref:`ref_flyteidl.service.CreateDownloadLocationRequest`", ":ref:`ref_flyteidl.service.CreateDownloadLocationResponse`", "CreateDownloadLocation creates a signed url to download artifacts." - "CreateDownloadLink", ":ref:`ref_flyteidl.service.CreateDownloadLinkRequest`", ":ref:`ref_flyteidl.service.CreateDownloadLinkResponse`", "CreateDownloadLocation creates a signed url to download artifacts." - -.. - end services - - - - -.. _ref_flyteidl/service/identity.proto: - -flyteidl/service/identity.proto -================================================================== - - - - - -.. _ref_flyteidl.service.UserInfoRequest: - -UserInfoRequest ------------------------------------------------------------------- - - - - - - - - - - -.. _ref_flyteidl.service.UserInfoResponse: - -UserInfoResponse ------------------------------------------------------------------- - -See the OpenID Connect spec at https://openid.net/specs/openid-connect-core-1_0.html#UserInfoResponse for more information. - - - -.. csv-table:: UserInfoResponse type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "subject", ":ref:`ref_string`", "", "Locally unique and never reassigned identifier within the Issuer for the End-User, which is intended to be consumed by the Client." - "name", ":ref:`ref_string`", "", "Full name" - "preferred_username", ":ref:`ref_string`", "", "Shorthand name by which the End-User wishes to be referred to" - "given_name", ":ref:`ref_string`", "", "Given name(s) or first name(s)" - "family_name", ":ref:`ref_string`", "", "Surname(s) or last name(s)" - "email", ":ref:`ref_string`", "", "Preferred e-mail address" - "picture", ":ref:`ref_string`", "", "Profile picture URL" - - - - - - -.. - end messages - - -.. - end enums - - -.. - end HasExtensions - - - -.. _ref_flyteidl.service.IdentityService: - -IdentityService ------------------------------------------------------------------- - -IdentityService defines an RPC Service that interacts with user/app identities. - -.. csv-table:: IdentityService service methods - :header: "Method Name", "Request Type", "Response Type", "Description" - :widths: auto - - "UserInfo", ":ref:`ref_flyteidl.service.UserInfoRequest`", ":ref:`ref_flyteidl.service.UserInfoResponse`", "Retrieves user information about the currently logged in user." - -.. - end services - - - - -.. _ref_flyteidl/service/signal.proto: - -flyteidl/service/signal.proto -================================================================== - - - - -.. - end messages - - -.. - end enums - - -.. - end HasExtensions - - - -.. _ref_flyteidl.service.SignalService: - -SignalService ------------------------------------------------------------------- - -SignalService defines an RPC Service that may create, update, and retrieve signal(s). - -.. csv-table:: SignalService service methods - :header: "Method Name", "Request Type", "Response Type", "Description" - :widths: auto - - "GetOrCreateSignal", ":ref:`ref_flyteidl.admin.SignalGetOrCreateRequest`", ":ref:`ref_flyteidl.admin.Signal`", "Fetches or creates a :ref:`ref_flyteidl.admin.Signal`." - "ListSignals", ":ref:`ref_flyteidl.admin.SignalListRequest`", ":ref:`ref_flyteidl.admin.SignalList`", "Fetch a list of :ref:`ref_flyteidl.admin.Signal` definitions." - "SetSignal", ":ref:`ref_flyteidl.admin.SignalSetRequest`", ":ref:`ref_flyteidl.admin.SignalSetResponse`", "Sets the value on a :ref:`ref_flyteidl.admin.Signal` definition" - -.. - end services - - diff --git a/docs/api/flyteidl/docs/withoutscalar_restructuredtext.tmpl b/docs/api/flyteidl/docs/withoutscalar_restructuredtext.tmpl deleted file mode 100644 index 9fef938d99..0000000000 --- a/docs/api/flyteidl/docs/withoutscalar_restructuredtext.tmpl +++ /dev/null @@ -1,105 +0,0 @@ -###################### -Protocol Documentation -###################### - -{{range .Files}} -{{$file_name := .Name}} - -.. _ref_{{.Name}}: - -{{.Name}} -================================================================== - -{{.Description}} - -{{range .Messages}} - -.. _ref_{{.FullName}}: - -{{.LongName}} ------------------------------------------------------------------- - -{{.Description}} - -{{if .HasFields}} - -.. csv-table:: {{.LongName}} type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto -{{range .Fields }} - "{{.Name}}", ":ref:`ref_{{.FullType}}`", "{{.Label}}", "{{if (index .Options "deprecated"|default false)}}**Deprecated.** {{end}}{{nobr .Description}}{{if .DefaultValue}} Default: {{.DefaultValue}}{{end}}" -{{- end}} -{{end}} - - -{{if .HasExtensions}} - -.. csv-table:: {{.LongName}} type extensions - :header: "Extension", "Type", "Base", "Number", "Description" - :widths: auto -{{range .Extensions }} - "{{.Name}}", "{{.LongType}}", "{{.ContainingLongType}}", "{{.Number}}", "{{nobr .Description}}{{if .DefaultValue}} Default: {{.DefaultValue}}{{end}}" -{{- end}} -{{end}} - -{{end}} -.. - end messages - -{{range .Enums}} - -.. _ref_{{.FullName}}: - -{{.LongName}} ------------------------------------------------------------------- - -{{.Description}} - -.. csv-table:: Enum {{.LongName}} values - :header: "Name", "Number", "Description" - :widths: auto -{{range .Values }} - "{{.Name}}", "{{.Number}}", "{{nobr .Description}}" -{{- end}} - -{{end}} -.. - end enums - -{{if .HasExtensions}} - -.. _ref_{{$file_name}}_extensions: - -File-level Extensions --------------------------------------------------------------------------------- - -.. csv-table:: {{.Name}} file-level Extensions - :header: "Extension", "Type", "Base", "Number", "Description" - :widths: auto -{{range .Extensions}} - "{{.Name}}", "{{.LongType}}", "{{.ContainingLongType}}", "{{.Number}}", "{{nobr .Description}}{{if .DefaultValue}} Default: `{{.DefaultValue}}`{{end}}" -{{- end}} -{{end}} -.. - end HasExtensions - -{{range .Services}} - -.. _ref_{{.FullName}}: - -{{.Name}} ------------------------------------------------------------------- - -{{.Description}} - -.. csv-table:: {{.Name}} service methods - :header: "Method Name", "Request Type", "Response Type", "Description" - :widths: auto -{{range .Methods}} - "{{.Name}}", ":ref:`ref_{{.RequestFullType}}`{{if .RequestStreaming}} stream{{end}}", ":ref:`ref_{{.ResponseFullType}}`{{if .ResponseStreaming}} stream{{end}}", "{{nobr .Description}}" -{{- end}} -{{end}} -.. - end services - -{{end}} diff --git a/docs/api/flyteidl/docs_index.rst b/docs/api/flyteidl/docs_index.rst deleted file mode 100644 index 27383673de..0000000000 --- a/docs/api/flyteidl/docs_index.rst +++ /dev/null @@ -1,19 +0,0 @@ -Flyte Language and API specification -==================================== - -The protocol buffers defined here provide a high level specification of various -entities in Flyte control plane and data plane. It provides detailed definition -and documentation of all these entities. - -.. toctree:: - :maxdepth: 1 - :caption: flyteidl - :name: flyteidltoc - - docs/admin/index - docs/core/index - docs/datacatalog/index - docs/event/index - docs/plugins/index - docs/service/index - docs/contributing diff --git a/docs/api/flyteidl/flyteidl/admin/agent.proto b/docs/api/flyteidl/flyteidl/admin/agent.proto deleted file mode 100644 index 931c27785f..0000000000 --- a/docs/api/flyteidl/flyteidl/admin/agent.proto +++ /dev/null @@ -1,258 +0,0 @@ -syntax = "proto3"; - -package flyteidl.admin; -option go_package = "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/admin"; - -import "flyteidl/core/literals.proto"; -import "flyteidl/core/tasks.proto"; -import "flyteidl/core/workflow.proto"; -import "flyteidl/core/identifier.proto"; -import "flyteidl/core/execution.proto"; -import "flyteidl/core/metrics.proto"; -import "flyteidl/core/security.proto"; -import "google/protobuf/duration.proto"; -import "google/protobuf/timestamp.proto"; -import "google/protobuf/struct.proto"; - -// The state of the execution is used to control its visibility in the UI/CLI. -enum State { - option deprecated = true; - RETRYABLE_FAILURE = 0; - PERMANENT_FAILURE = 1; - PENDING = 2; - RUNNING = 3; - SUCCEEDED = 4; -} - -// Represents a subset of runtime task execution metadata that are relevant to external plugins. -message TaskExecutionMetadata { - // ID of the task execution - - core.TaskExecutionIdentifier task_execution_id = 1; - // k8s namespace where the task is executed in - string namespace = 2; - // Labels attached to the task execution - map labels = 3; - // Annotations attached to the task execution - map annotations = 4; - // k8s service account associated with the task execution - string k8s_service_account = 5; - // Environment variables attached to the task execution - map environment_variables = 6; - // Represents the maximum number of attempts allowed for a task. - // If a task fails, it can be retried up to this maximum number of attempts. - int32 max_attempts = 7; - // Indicates whether the task execution can be interrupted. - // If set to true, the task can be stopped before completion. - bool interruptible = 8; - // Specifies the threshold for failure count at which the interruptible property - // will take effect. If the number of consecutive task failures exceeds this threshold, - // interruptible behavior will be activated. - int32 interruptible_failure_threshold = 9; - // Overrides for specific properties of the task node. - // These overrides can be used to customize the behavior of the task node. - core.TaskNodeOverrides overrides = 10; - // Identity of user running this task execution - core.Identity identity = 11; -} - -// Represents a request structure to create task. -message CreateTaskRequest { - // The inputs required to start the execution. All required inputs must be - // included in this map. If not required and not provided, defaults apply. - // +optional - core.LiteralMap inputs = 1; - // Template of the task that encapsulates all the metadata of the task. - core.TaskTemplate template = 2; - // Prefix for where task output data will be written. (e.g. s3://my-bucket/randomstring) - string output_prefix = 3; - // subset of runtime task execution metadata. - TaskExecutionMetadata task_execution_metadata = 4; -} - -// Represents a create response structure. -message CreateTaskResponse { - // ResourceMeta is created by the agent. It could be a string (jobId) or a dict (more complex metadata). - bytes resource_meta = 1; -} - -message CreateRequestHeader { - // Template of the task that encapsulates all the metadata of the task. - core.TaskTemplate template = 1; - // Prefix for where task output data will be written. (e.g. s3://my-bucket/randomstring) - string output_prefix = 2; - // subset of runtime task execution metadata. - TaskExecutionMetadata task_execution_metadata = 3; - // MaxDatasetSizeBytes is the maximum size of the dataset that can be generated by the task. - int64 max_dataset_size_bytes = 4; -} - - -message ExecuteTaskSyncRequest { - oneof part { - CreateRequestHeader header = 1; - core.LiteralMap inputs = 2; - } -} - -message ExecuteTaskSyncResponseHeader { - Resource resource = 1; -} - -message ExecuteTaskSyncResponse { - // Metadata is created by the agent. It could be a string (jobId) or a dict (more complex metadata). - // Resource is for synchronous task execution. - oneof res { - ExecuteTaskSyncResponseHeader header = 1; - core.LiteralMap outputs = 2; - } -} - -// A message used to fetch a job resource from flyte agent server. -message GetTaskRequest { - // A predefined yet extensible Task type identifier. - string task_type = 1 [deprecated = true]; - // Metadata about the resource to be pass to the agent. - bytes resource_meta = 2; - // A predefined yet extensible Task type identifier. - TaskCategory task_category = 3; -} - -// Response to get an individual task resource. -message GetTaskResponse { - Resource resource = 1; -} - -message Resource { - // DEPRECATED. The state of the execution is used to control its visibility in the UI/CLI. - State state = 1 [deprecated = true]; - // The outputs of the execution. It's typically used by sql task. Agent service will create a - // Structured dataset pointing to the query result table. - // +optional - core.LiteralMap outputs = 2; - // A descriptive message for the current state. e.g. waiting for cluster. - string message = 3; - // log information for the task execution. - repeated core.TaskLog log_links = 4; - // The phase of the execution is used to determine the phase of the plugin's execution. - core.TaskExecution.Phase phase = 5; - // Custom data specific to the agent. - google.protobuf.Struct custom_info = 6; -} - -// A message used to delete a task. -message DeleteTaskRequest { - // A predefined yet extensible Task type identifier. - string task_type = 1 [deprecated = true]; - // Metadata about the resource to be pass to the agent. - bytes resource_meta = 2; - // A predefined yet extensible Task type identifier. - TaskCategory task_category = 3; -} - -// Response to delete a task. -message DeleteTaskResponse {} - -// A message containing the agent metadata. -message Agent { - // Name is the developer-assigned name of the agent. - string name = 1; - - // SupportedTaskTypes are the types of the tasks that the agent can handle. - repeated string supported_task_types = 2 [deprecated = true]; - - // IsSync indicates whether this agent is a sync agent. Sync agents are expected to return their - // results synchronously when called by propeller. Given that sync agents can affect the performance - // of the system, it's important to enforce strict timeout policies. - // An Async agent, on the other hand, is required to be able to identify jobs by an - // identifier and query for job statuses as jobs progress. - bool is_sync = 3; - - // Supported_task_categories are the categories of the tasks that the agent can handle. - repeated TaskCategory supported_task_categories = 4; -} - -message TaskCategory { - // The name of the task type. - string name = 1; - // The version of the task type. - int32 version = 2; -} - -// A request to get an agent. -message GetAgentRequest { - // The name of the agent. - string name = 1; -} - -// A response containing an agent. -message GetAgentResponse { - Agent agent = 1; -} - -// A request to list all agents. -message ListAgentsRequest {} - -// A response containing a list of agents. -message ListAgentsResponse { - repeated Agent agents = 1; -} - -// A request to get the metrics from a task execution. -message GetTaskMetricsRequest { - // A predefined yet extensible Task type identifier. - string task_type = 1 [deprecated = true]; - // Metadata is created by the agent. It could be a string (jobId) or a dict (more complex metadata). - bytes resource_meta = 2; - // The metrics to query. If empty, will return a default set of metrics. - // e.g. EXECUTION_METRIC_USED_CPU_AVG or EXECUTION_METRIC_USED_MEMORY_BYTES_AVG - repeated string queries = 3; - // Start timestamp, inclusive. - google.protobuf.Timestamp start_time = 4; - // End timestamp, inclusive.. - google.protobuf.Timestamp end_time = 5; - // Query resolution step width in duration format or float number of seconds. - google.protobuf.Duration step = 6; - // A predefined yet extensible Task type identifier. - TaskCategory task_category = 7; -} - -// A response containing a list of metrics for a task execution. -message GetTaskMetricsResponse { - // The execution metric results. - repeated core.ExecutionMetricResult results = 1; -} - -// A request to get the log from a task execution. -message GetTaskLogsRequest { - // A predefined yet extensible Task type identifier. - string task_type = 1 [deprecated = true]; - // Metadata is created by the agent. It could be a string (jobId) or a dict (more complex metadata). - bytes resource_meta = 2; - // Number of lines to return. - uint64 lines = 3; - // In the case of multiple pages of results, the server-provided token can be used to fetch the next page - // in a query. If there are no more results, this value will be empty. - string token = 4; - // A predefined yet extensible Task type identifier. - TaskCategory task_category = 5; -} - -message GetTaskLogsResponseHeader { - // In the case of multiple pages of results, the server-provided token can be used to fetch the next page - // in a query. If there are no more results, this value will be empty. - string token = 1; -} - -message GetTaskLogsResponseBody { - // The execution log results. - repeated string results = 1; -} - -// A response containing the logs for a task execution. -message GetTaskLogsResponse { - oneof part { - GetTaskLogsResponseHeader header = 1; - GetTaskLogsResponseBody body = 2; - } -} diff --git a/docs/api/flyteidl/flyteidl/admin/cluster_assignment.proto b/docs/api/flyteidl/flyteidl/admin/cluster_assignment.proto deleted file mode 100644 index 6a55798436..0000000000 --- a/docs/api/flyteidl/flyteidl/admin/cluster_assignment.proto +++ /dev/null @@ -1,11 +0,0 @@ -syntax = "proto3"; - -package flyteidl.admin; -option go_package = "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/admin"; - - -// Encapsulates specifications for routing an execution onto a specific cluster. -message ClusterAssignment { - reserved 1, 2; - string cluster_pool_name = 3; -} diff --git a/docs/api/flyteidl/flyteidl/admin/common.proto b/docs/api/flyteidl/flyteidl/admin/common.proto deleted file mode 100644 index 6c04b0531a..0000000000 --- a/docs/api/flyteidl/flyteidl/admin/common.proto +++ /dev/null @@ -1,327 +0,0 @@ -syntax = "proto3"; - -package flyteidl.admin; -option go_package = "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/admin"; - -import "flyteidl/core/execution.proto"; -import "flyteidl/core/identifier.proto"; -import "flyteidl/core/literals.proto"; -import "google/protobuf/timestamp.proto"; - -// Encapsulation of fields that identifies a Flyte resource. -// A Flyte resource can be a task, workflow or launch plan. -// A resource can internally have multiple versions and is uniquely identified -// by project, domain, and name. -message NamedEntityIdentifier { - // Name of the project the resource belongs to. - string project = 1; - // Name of the domain the resource belongs to. - // A domain can be considered as a subset within a specific project. - string domain = 2; - // User provided value for the resource. - // The combination of project + domain + name uniquely identifies the resource. - // +optional - in certain contexts - like 'List API', 'Launch plans' - string name = 3; - - // Optional, org key applied to the resource. - string org = 4; -} - -// The status of the named entity is used to control its visibility in the UI. -enum NamedEntityState { - // By default, all named entities are considered active and under development. - NAMED_ENTITY_ACTIVE = 0; - - // Archived named entities are no longer visible in the UI. - NAMED_ENTITY_ARCHIVED = 1; - - // System generated entities that aren't explicitly created or managed by a user. - SYSTEM_GENERATED = 2; -} - -// Additional metadata around a named entity. -message NamedEntityMetadata { - // Common description across all versions of the entity - // +optional - string description = 1; - - // Shared state across all version of the entity - // At this point in time, only workflow entities can have their state archived. - NamedEntityState state = 2; -} - -// Encapsulates information common to a NamedEntity, a Flyte resource such as a task, -// workflow or launch plan. A NamedEntity is exclusively identified by its resource type -// and identifier. -message NamedEntity { - // Resource type of the named entity. One of Task, Workflow or LaunchPlan. - flyteidl.core.ResourceType resource_type = 1; - NamedEntityIdentifier id = 2; - - // Additional metadata around a named entity. - NamedEntityMetadata metadata = 3; -} - -// Specifies sort ordering in a list request. -message Sort { - enum Direction { - - // By default, fields are sorted in descending order. - DESCENDING = 0; - ASCENDING = 1; - } - // Indicates an attribute to sort the response values. - // +required - string key = 1; - - // Indicates the direction to apply sort key for response values. - // +optional - Direction direction = 2; -} - -// Represents a request structure to list NamedEntityIdentifiers. -message NamedEntityIdentifierListRequest { - // Name of the project that contains the identifiers. - // +required - string project = 1; - - // Name of the domain the identifiers belongs to within the project. - // +required - string domain = 2; - - // Indicates the number of resources to be returned. - // +required - uint32 limit = 3; - - // In the case of multiple pages of results, the server-provided token can be used to fetch the next page - // in a query. - // +optional - string token = 4; - - // Specifies how listed entities should be sorted in the response. - // +optional - Sort sort_by = 5; - - // Indicates a list of filters passed as string. - // +optional - string filters = 6; - - // Optional, org key applied to the resource. - string org = 7; -} - -// Represents a request structure to list NamedEntity objects -message NamedEntityListRequest { - // Resource type of the metadata to query. One of Task, Workflow or LaunchPlan. - // +required - flyteidl.core.ResourceType resource_type = 1; - // Name of the project that contains the identifiers. - // +required - string project = 2; - // Name of the domain the identifiers belongs to within the project. - string domain = 3; - // Indicates the number of resources to be returned. - uint32 limit = 4; - // In the case of multiple pages of results, the server-provided token can be used to fetch the next page - // in a query. - // +optional - string token = 5; - - // Specifies how listed entities should be sorted in the response. - // +optional - Sort sort_by = 6; - - // Indicates a list of filters passed as string. - // +optional - string filters = 7; - - // Optional, org key applied to the resource. - string org = 8; -} - -// Represents a list of NamedEntityIdentifiers. -message NamedEntityIdentifierList { - // A list of identifiers. - repeated NamedEntityIdentifier entities = 1; - - // In the case of multiple pages of results, the server-provided token can be used to fetch the next page - // in a query. If there are no more results, this value will be empty. - string token = 2; -} - -// Represents a list of NamedEntityIdentifiers. -message NamedEntityList { - // A list of NamedEntity objects - repeated NamedEntity entities = 1; - - // In the case of multiple pages of results, the server-provided token can be used to fetch the next page - // in a query. If there are no more results, this value will be empty. - string token = 2; -} - -// A request to retrieve the metadata associated with a NamedEntityIdentifier -message NamedEntityGetRequest { - // Resource type of the metadata to get. One of Task, Workflow or LaunchPlan. - // +required - flyteidl.core.ResourceType resource_type = 1; - - // The identifier for the named entity for which to fetch metadata. - // +required - NamedEntityIdentifier id = 2; -} - -// Request to set the referenced named entity state to the configured value. -message NamedEntityUpdateRequest { - // Resource type of the metadata to update - // +required - flyteidl.core.ResourceType resource_type = 1; - - // Identifier of the metadata to update - // +required - NamedEntityIdentifier id = 2; - - // Metadata object to set as the new value - // +required - NamedEntityMetadata metadata = 3; -} - -// Purposefully empty, may be populated in the future. -message NamedEntityUpdateResponse { -} - -// Shared request structure to fetch a single resource. -// Resources include: Task, Workflow, LaunchPlan -message ObjectGetRequest { - // Indicates a unique version of resource. - // +required - core.Identifier id = 1; -} - -// Shared request structure to retrieve a list of resources. -// Resources include: Task, Workflow, LaunchPlan -message ResourceListRequest { - // id represents the unique identifier of the resource. - // +required - NamedEntityIdentifier id = 1; - - // Indicates the number of resources to be returned. - // +required - uint32 limit = 2; - - // In the case of multiple pages of results, this server-provided token can be used to fetch the next page - // in a query. - // +optional - string token = 3; - - // Indicates a list of filters passed as string. - // More info on constructing filters : - // +optional - string filters = 4; - - // Sort ordering. - // +optional - Sort sort_by = 5; -} - -// Defines an email notification specification. -message EmailNotification { - // The list of email addresses recipients for this notification. - // +required - repeated string recipients_email = 1; -} - -// Defines a pager duty notification specification. -message PagerDutyNotification { - // Currently, PagerDuty notifications leverage email to trigger a notification. - // +required - repeated string recipients_email = 1; -} - -// Defines a slack notification specification. -message SlackNotification { - // Currently, Slack notifications leverage email to trigger a notification. - // +required - repeated string recipients_email = 1; -} - -// Represents a structure for notifications based on execution status. -// The notification content is configured within flyte admin but can be templatized. -// Future iterations could expose configuring notifications with custom content. -message Notification { - // A list of phases to which users can associate the notifications to. - // +required - repeated core.WorkflowExecution.Phase phases = 1; - - // The type of notification to trigger. - // +required - oneof type { - EmailNotification email = 2; - PagerDutyNotification pager_duty = 3; - SlackNotification slack = 4; - } - -} - -// Represents a string url and associated metadata used throughout the platform. -message UrlBlob { - option deprecated = true; - - // Actual url value. - string url = 1; - - // Represents the size of the file accessible at the above url. - int64 bytes = 2; -} - -// Label values to be applied to an execution resource. -// In the future a mode (e.g. OVERRIDE, APPEND, etc) can be defined -// to specify how to merge labels defined at registration and execution time. -message Labels { - // Map of custom labels to be applied to the execution resource. - map values = 1; -} - -// Annotation values to be applied to an execution resource. -// In the future a mode (e.g. OVERRIDE, APPEND, etc) can be defined -// to specify how to merge annotations defined at registration and execution time. -message Annotations { - // Map of custom annotations to be applied to the execution resource. - map values = 1; -} - -// Environment variable values to be applied to an execution resource. -// In the future a mode (e.g. OVERRIDE, APPEND, etc) can be defined -// to specify how to merge environment variables defined at registration and execution time. -message Envs { - // Map of custom environment variables to be applied to the execution resource. - repeated flyteidl.core.KeyValuePair values = 1; -} - -// Defines permissions associated with executions created by this launch plan spec. -// Use either of these roles when they have permissions required by your workflow execution. -// Deprecated. -message AuthRole { - option deprecated = true; - - // Defines an optional iam role which will be used for tasks run in executions created with this launch plan. - string assumable_iam_role = 1; - - // Defines an optional kubernetes service account which will be used for tasks run in executions created with this launch plan. - string kubernetes_service_account = 2; -} - - -// Encapsulates user settings pertaining to offloaded data (i.e. Blobs, Schema, query data, etc.). -// See https://github.com/flyteorg/flyte/issues/211 for more background information. -message RawOutputDataConfig { - // Prefix for where offloaded data from user workflows will be written - // e.g. s3://bucket/key or s3://bucket/ - string output_location_prefix = 1; -} - -// These URLs are returned as part of node and task execution data requests. -message FlyteURLs { - string inputs = 1; - string outputs = 2; - string deck = 3; -} diff --git a/docs/api/flyteidl/flyteidl/admin/description_entity.proto b/docs/api/flyteidl/flyteidl/admin/description_entity.proto deleted file mode 100644 index 055ca0f4b6..0000000000 --- a/docs/api/flyteidl/flyteidl/admin/description_entity.proto +++ /dev/null @@ -1,95 +0,0 @@ -syntax = "proto3"; - -package flyteidl.admin; -option go_package = "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/admin"; - -import "flyteidl/core/identifier.proto"; -import "flyteidl/admin/common.proto"; - -// DescriptionEntity contains detailed description for the task/workflow. -// Documentation could provide insight into the algorithms, business use case, etc. -message DescriptionEntity { - // id represents the unique identifier of the description entity. - core.Identifier id = 1; - // One-liner overview of the entity. - string short_description = 2; - // Full user description with formatting preserved. - Description long_description = 3; - // Optional link to source code used to define this entity. - SourceCode source_code = 4; - // User-specified tags. These are arbitrary and can be used for searching - // filtering and discovering tasks. - repeated string tags = 5; -} - -// The format of the long description -enum DescriptionFormat { - DESCRIPTION_FORMAT_UNKNOWN = 0; - DESCRIPTION_FORMAT_MARKDOWN = 1; - DESCRIPTION_FORMAT_HTML = 2; - // python default documentation - comments is rst - DESCRIPTION_FORMAT_RST = 3; -} - -// Full user description with formatting preserved. This can be rendered -// by clients, such as the console or command line tools with in-tact -// formatting. -message Description { - oneof content { - // long description - no more than 4KB - string value = 1; - // if the description sizes exceed some threshold we can offload the entire - // description proto altogether to an external data store, like S3 rather than store inline in the db - string uri = 2; - } - - // Format of the long description - DescriptionFormat format = 3; - // Optional link to an icon for the entity - string icon_link = 4; -} - -// Link to source code used to define this entity -message SourceCode { - string link = 1; -} - -// Represents a list of DescriptionEntities returned from the admin. -// See :ref:`ref_flyteidl.admin.DescriptionEntity` for more details -message DescriptionEntityList { - // A list of DescriptionEntities returned based on the request. - repeated DescriptionEntity descriptionEntities = 1; - - // In the case of multiple pages of results, the server-provided token can be used to fetch the next page - // in a query. If there are no more results, this value will be empty. - string token = 2; -} - -// Represents a request structure to retrieve a list of DescriptionEntities. -// See :ref:`ref_flyteidl.admin.DescriptionEntity` for more details -message DescriptionEntityListRequest { - // Identifies the specific type of resource that this identifier corresponds to. - flyteidl.core.ResourceType resource_type = 1; - - // The identifier for the description entity. - // +required - NamedEntityIdentifier id = 2; - - // Indicates the number of resources to be returned. - // +required - uint32 limit = 3; - - // In the case of multiple pages of results, the server-provided token can be used to fetch the next page - // in a query. - // +optional - string token = 4; - - // Indicates a list of filters passed as string. - // More info on constructing filters : - // +optional - string filters = 5; - - // Sort ordering for returned list. - // +optional - Sort sort_by = 6; -} diff --git a/docs/api/flyteidl/flyteidl/admin/event.proto b/docs/api/flyteidl/flyteidl/admin/event.proto deleted file mode 100644 index c1eea1e045..0000000000 --- a/docs/api/flyteidl/flyteidl/admin/event.proto +++ /dev/null @@ -1,70 +0,0 @@ -syntax = "proto3"; - -package flyteidl.admin; -option go_package = "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/admin"; - -import "flyteidl/event/event.proto"; - -// Indicates that a sent event was not used to update execution state due to -// the referenced execution already being terminated (and therefore ineligible -// for further state transitions). -message EventErrorAlreadyInTerminalState { - // +required - string current_phase = 1; -} - -// Indicates an event was rejected because it came from a different cluster than -// is on record as running the execution. -message EventErrorIncompatibleCluster { - // The cluster which has been recorded as processing the execution. - // +required - string cluster = 1; -} - -// Indicates why a sent event was not used to update execution. -message EventFailureReason { - // +required - oneof reason { - EventErrorAlreadyInTerminalState already_in_terminal_state = 1; - EventErrorIncompatibleCluster incompatible_cluster = 2; - } -} - -// Request to send a notification that a workflow execution event has occurred. -message WorkflowExecutionEventRequest { - // Unique ID for this request that can be traced between services - string request_id = 1; - - // Details about the event that occurred. - event.WorkflowExecutionEvent event = 2; -} - -message WorkflowExecutionEventResponse { - // Purposefully empty, may be populated in the future. -} - -// Request to send a notification that a node execution event has occurred. -message NodeExecutionEventRequest { - // Unique ID for this request that can be traced between services - string request_id = 1; - - // Details about the event that occurred. - event.NodeExecutionEvent event = 2; -} - -message NodeExecutionEventResponse { - // Purposefully empty, may be populated in the future. -} - -// Request to send a notification that a task execution event has occurred. -message TaskExecutionEventRequest { - // Unique ID for this request that can be traced between services - string request_id = 1; - - // Details about the event that occurred. - event.TaskExecutionEvent event = 2; -} - -message TaskExecutionEventResponse { - // Purposefully empty, may be populated in the future. -} diff --git a/docs/api/flyteidl/flyteidl/admin/execution.proto b/docs/api/flyteidl/flyteidl/admin/execution.proto deleted file mode 100644 index 6197576bd9..0000000000 --- a/docs/api/flyteidl/flyteidl/admin/execution.proto +++ /dev/null @@ -1,428 +0,0 @@ -syntax = "proto3"; - -package flyteidl.admin; -option go_package = "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/admin"; - -import "flyteidl/admin/cluster_assignment.proto"; -import "flyteidl/admin/common.proto"; -import "flyteidl/core/literals.proto"; -import "flyteidl/core/execution.proto"; -import "flyteidl/core/execution_envs.proto"; -import "flyteidl/core/artifact_id.proto"; -import "flyteidl/core/identifier.proto"; -import "flyteidl/core/metrics.proto"; -import "flyteidl/core/security.proto"; -import "google/protobuf/duration.proto"; -import "google/protobuf/timestamp.proto"; -import "google/protobuf/wrappers.proto"; -import "flyteidl/admin/matchable_resource.proto"; - -// Request to launch an execution with the given project, domain and optionally-assigned name. -message ExecutionCreateRequest { - // Name of the project the execution belongs to. - // +required - string project = 1; - - // Name of the domain the execution belongs to. - // A domain can be considered as a subset within a specific project. - // +required - string domain = 2; - - // User provided value for the resource. - // If none is provided the system will generate a unique string. - // +optional - string name = 3; - - // Additional fields necessary to launch the execution. - // +optional - ExecutionSpec spec = 4; - - // The inputs required to start the execution. All required inputs must be - // included in this map. If not required and not provided, defaults apply. - // +optional - core.LiteralMap inputs = 5; - - // Optional, org key applied to the resource. - string org = 6; -} - -// Request to relaunch the referenced execution. -message ExecutionRelaunchRequest { - // Identifier of the workflow execution to relaunch. - // +required - core.WorkflowExecutionIdentifier id = 1; - - // Deprecated field, do not use. - reserved 2; - - // User provided value for the relaunched execution. - // If none is provided the system will generate a unique string. - // +optional - string name = 3; - - // Allows for all cached values of a workflow and its tasks to be overwritten for a single execution. - // If enabled, all calculations are performed even if cached results would be available, overwriting the stored - // data once execution finishes successfully. - bool overwrite_cache = 4; -} - -// Request to recover the referenced execution. -message ExecutionRecoverRequest { - // Identifier of the workflow execution to recover. - core.WorkflowExecutionIdentifier id = 1; - - // User provided value for the recovered execution. - // If none is provided the system will generate a unique string. - // +optional - string name = 2; - - // Additional metadata which will be used to overwrite any metadata in the reference execution when triggering a recovery execution. - ExecutionMetadata metadata = 3; -} - -// The unique identifier for a successfully created execution. -// If the name was *not* specified in the create request, this identifier will include a generated name. -message ExecutionCreateResponse { - core.WorkflowExecutionIdentifier id = 1; -} - -// A message used to fetch a single workflow execution entity. -// See :ref:`ref_flyteidl.admin.Execution` for more details -message WorkflowExecutionGetRequest { - // Uniquely identifies an individual workflow execution. - core.WorkflowExecutionIdentifier id = 1; -} - -// A workflow execution represents an instantiated workflow, including all inputs and additional -// metadata as well as computed results included state, outputs, and duration-based attributes. -// Used as a response object used in Get and List execution requests. -message Execution { - // Unique identifier of the workflow execution. - core.WorkflowExecutionIdentifier id = 1; - - // User-provided configuration and inputs for launching the execution. - ExecutionSpec spec = 2; - - // Execution results. - ExecutionClosure closure = 3; -} - -// Used as a response for request to list executions. -// See :ref:`ref_flyteidl.admin.Execution` for more details -message ExecutionList { - repeated Execution executions = 1; - - // In the case of multiple pages of results, the server-provided token can be used to fetch the next page - // in a query. If there are no more results, this value will be empty. - string token = 2; -} - -// Input/output data can represented by actual values or a link to where values are stored -message LiteralMapBlob { - oneof data { - // Data in LiteralMap format - core.LiteralMap values = 1 [deprecated = true]; - - // In the event that the map is too large, we return a uri to the data - string uri = 2; - } -} - -// Specifies metadata around an aborted workflow execution. -message AbortMetadata { - // In the case of a user-specified abort, this will pass along the user-supplied cause. - string cause = 1; - - // Identifies the entity (if any) responsible for terminating the execution - string principal = 2; -} - -// Encapsulates the results of the Execution -message ExecutionClosure { - // A result produced by a terminated execution. - // A pending (non-terminal) execution will not have any output result. - oneof output_result { - // Output URI in the case of a successful execution. - // DEPRECATED. Use GetExecutionData to fetch output data instead. - LiteralMapBlob outputs = 1 [deprecated = true]; - - // Error information in the case of a failed execution. - core.ExecutionError error = 2; - - // In the case of a user-specified abort, this will pass along the user-supplied cause. - string abort_cause = 10 [deprecated = true]; - - // In the case of a user-specified abort, this will pass along the user and their supplied cause. - AbortMetadata abort_metadata = 12; - - // Raw output data produced by this execution. - // DEPRECATED. Use GetExecutionData to fetch output data instead. - core.LiteralMap output_data = 13 [deprecated = true]; - } - - // Inputs computed and passed for execution. - // computed_inputs depends on inputs in ExecutionSpec, fixed and default inputs in launch plan - core.LiteralMap computed_inputs = 3 [deprecated = true]; - - // Most recent recorded phase for the execution. - core.WorkflowExecution.Phase phase = 4; - - // Reported time at which the execution began running. - google.protobuf.Timestamp started_at = 5; - - // The amount of time the execution spent running. - google.protobuf.Duration duration = 6; - - // Reported time at which the execution was created. - google.protobuf.Timestamp created_at = 7; - - // Reported time at which the execution was last updated. - google.protobuf.Timestamp updated_at = 8; - - // The notification settings to use after merging the CreateExecutionRequest and the launch plan - // notification settings. An execution launched with notifications will always prefer that definition - // to notifications defined statically in a launch plan. - repeated Notification notifications = 9; - - // Identifies the workflow definition for this execution. - core.Identifier workflow_id = 11; - - // Provides the details of the last stage change - ExecutionStateChangeDetails state_change_details = 14; -} - -// Represents system, rather than user-facing, metadata about an execution. -message SystemMetadata { - - // Which execution cluster this execution ran on. - string execution_cluster = 1; - - // Which kubernetes namespace the execution ran under. - string namespace = 2; -} - -// Represents attributes about an execution which are not required to launch the execution but are useful to record. -// These attributes are assigned at launch time and do not change. -message ExecutionMetadata { - // The method by which this execution was launched. - enum ExecutionMode { - // The default execution mode, MANUAL implies that an execution was launched by an individual. - MANUAL = 0; - - // A schedule triggered this execution launch. - SCHEDULED = 1; - - // A system process was responsible for launching this execution rather an individual. - SYSTEM = 2; - - // This execution was launched with identical inputs as a previous execution. - RELAUNCH = 3; - - // This execution was triggered by another execution. - CHILD_WORKFLOW = 4; - - // This execution was recovered from another execution. - RECOVERED = 5; - - // Execution was kicked off by the artifact trigger system - TRIGGER = 6; - } - ExecutionMode mode = 1; - - // Identifier of the entity that triggered this execution. - // For systems using back-end authentication any value set here will be discarded in favor of the - // authenticated user context. - string principal = 2; - - // Indicates the nestedness of this execution. - // If a user launches a workflow execution, the default nesting is 0. - // If this execution further launches a workflow (child workflow), the nesting level is incremented by 0 => 1 - // Generally, if workflow at nesting level k launches a workflow then the child workflow will have - // nesting = k + 1. - uint32 nesting = 3; - - // For scheduled executions, the requested time for execution for this specific schedule invocation. - google.protobuf.Timestamp scheduled_at = 4; - - // Which subworkflow node (if any) launched this execution - core.NodeExecutionIdentifier parent_node_execution = 5; - - // Optional, a reference workflow execution related to this execution. - // In the case of a relaunch, this references the original workflow execution. - core.WorkflowExecutionIdentifier reference_execution = 16; - - // Optional, platform-specific metadata about the execution. - // In this the future this may be gated behind an ACL or some sort of authorization. - SystemMetadata system_metadata = 17; - - // Save a list of the artifacts used in this execution for now. This is a list only rather than a mapping - // since we don't have a structure to handle nested ones anyways. - repeated core.ArtifactID artifact_ids = 18; -} - -message NotificationList { - repeated Notification notifications = 1; -} - -// An ExecutionSpec encompasses all data used to launch this execution. The Spec does not change over the lifetime -// of an execution as it progresses across phase changes. -message ExecutionSpec { - // Launch plan to be executed - core.Identifier launch_plan = 1; - - // Input values to be passed for the execution - core.LiteralMap inputs = 2 [deprecated = true]; - - // Metadata for the execution - ExecutionMetadata metadata = 3; - - // This field is deprecated. Do not use. - reserved 4; - - oneof notification_overrides { - // List of notifications based on Execution status transitions - // When this list is not empty it is used rather than any notifications defined in the referenced launch plan. - // When this list is empty, the notifications defined for the launch plan will be applied. - NotificationList notifications = 5; - - // This should be set to true if all notifications are intended to be disabled for this execution. - bool disable_all = 6; - } - - // Labels to apply to the execution resource. - Labels labels = 7; - - // Annotations to apply to the execution resource. - Annotations annotations = 8; - - // Optional: security context override to apply this execution. - core.SecurityContext security_context = 10; - - // Optional: auth override to apply this execution. - AuthRole auth_role = 16 [deprecated = true]; - - // Indicates the runtime priority of the execution. - core.QualityOfService quality_of_service = 17; - - // Controls the maximum number of task nodes that can be run in parallel for the entire workflow. - // This is useful to achieve fairness. Note: MapTasks are regarded as one unit, - // and parallelism/concurrency of MapTasks is independent from this. - int32 max_parallelism = 18; - - // User setting to configure where to store offloaded data (i.e. Blobs, structured datasets, query data, etc.). - // This should be a prefix like s3://my-bucket/my-data - RawOutputDataConfig raw_output_data_config = 19; - - // Controls how to select an available cluster on which this execution should run. - ClusterAssignment cluster_assignment = 20; - - // Allows for the interruptible flag of a workflow to be overwritten for a single execution. - // Omitting this field uses the workflow's value as a default. - // As we need to distinguish between the field not being provided and its default value false, we have to use a wrapper - // around the bool field. - google.protobuf.BoolValue interruptible = 21; - - // Allows for all cached values of a workflow and its tasks to be overwritten for a single execution. - // If enabled, all calculations are performed even if cached results would be available, overwriting the stored - // data once execution finishes successfully. - bool overwrite_cache = 22; - - // Environment variables to be set for the execution. - Envs envs = 23; - - // Tags to be set for the execution. - repeated string tags = 24 [deprecated = true]; - - // Execution cluster label to be set for the execution. - ExecutionClusterLabel execution_cluster_label = 25; - - // Execution environment assignments to be set for the execution. - repeated core.ExecutionEnvAssignment execution_env_assignments = 26; -} - -// Request to terminate an in-progress execution. This action is irreversible. -// If an execution is already terminated, this request will simply be a no-op. -// This request will fail if it references a non-existent execution. -// If the request succeeds the phase "ABORTED" will be recorded for the termination -// with the optional cause added to the output_result. -message ExecutionTerminateRequest { - // Uniquely identifies the individual workflow execution to be terminated. - core.WorkflowExecutionIdentifier id = 1; - - // Optional reason for aborting. - string cause = 2; -} - -message ExecutionTerminateResponse { - // Purposefully empty, may be populated in the future. -} - -// Request structure to fetch inputs, output and other data produced by an execution. -// By default this data is not returned inline in :ref:`ref_flyteidl.admin.WorkflowExecutionGetRequest` -message WorkflowExecutionGetDataRequest { - // The identifier of the execution for which to fetch inputs and outputs. - core.WorkflowExecutionIdentifier id = 1; -} - -// Response structure for WorkflowExecutionGetDataRequest which contains inputs and outputs for an execution. -message WorkflowExecutionGetDataResponse { - // Signed url to fetch a core.LiteralMap of execution outputs. - // Deprecated: Please use full_outputs instead. - UrlBlob outputs = 1 [deprecated = true]; - - // Signed url to fetch a core.LiteralMap of execution inputs. - // Deprecated: Please use full_inputs instead. - UrlBlob inputs = 2 [deprecated = true]; - - // Full_inputs will only be populated if they are under a configured size threshold. - core.LiteralMap full_inputs = 3; - - // Full_outputs will only be populated if they are under a configured size threshold. - core.LiteralMap full_outputs = 4; -} - -// The state of the execution is used to control its visibility in the UI/CLI. -enum ExecutionState { - // By default, all executions are considered active. - EXECUTION_ACTIVE = 0; - - // Archived executions are no longer visible in the UI. - EXECUTION_ARCHIVED = 1; -} - -message ExecutionUpdateRequest { - // Identifier of the execution to update - core.WorkflowExecutionIdentifier id = 1; - - // State to set as the new value active/archive - ExecutionState state = 2; -} - -message ExecutionStateChangeDetails { - // The state of the execution is used to control its visibility in the UI/CLI. - ExecutionState state = 1; - - // This timestamp represents when the state changed. - google.protobuf.Timestamp occurred_at = 2; - - // Identifies the entity (if any) responsible for causing the state change of the execution - string principal = 3; -} - -message ExecutionUpdateResponse {} - -// WorkflowExecutionGetMetricsRequest represents a request to retrieve metrics for the specified workflow execution. -message WorkflowExecutionGetMetricsRequest { - // id defines the workflow execution to query for. - core.WorkflowExecutionIdentifier id = 1; - - // depth defines the number of Flyte entity levels to traverse when breaking down execution details. - int32 depth = 2; -} - -// WorkflowExecutionGetMetricsResponse represents the response containing metrics for the specified workflow execution. -message WorkflowExecutionGetMetricsResponse { - // Span defines the top-level breakdown of the workflows execution. More precise information is nested in a - // hierarchical structure using Flyte entity references. - core.Span span = 1; -} diff --git a/docs/api/flyteidl/flyteidl/admin/launch_plan.proto b/docs/api/flyteidl/flyteidl/admin/launch_plan.proto deleted file mode 100644 index 4be8dedb91..0000000000 --- a/docs/api/flyteidl/flyteidl/admin/launch_plan.proto +++ /dev/null @@ -1,226 +0,0 @@ -syntax = "proto3"; - -package flyteidl.admin; -option go_package = "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/admin"; - -import "flyteidl/core/execution.proto"; -import "flyteidl/core/execution_envs.proto"; -import "flyteidl/core/literals.proto"; -import "flyteidl/core/identifier.proto"; -import "flyteidl/core/interface.proto"; -import "flyteidl/core/security.proto"; -import "flyteidl/admin/schedule.proto"; -import "flyteidl/admin/common.proto"; -import "google/protobuf/any.proto"; -import "google/protobuf/timestamp.proto"; -import "google/protobuf/wrappers.proto"; - - -// Request to register a launch plan. The included LaunchPlanSpec may have a complete or incomplete set of inputs required -// to launch a workflow execution. By default all launch plans are registered in state INACTIVE. If you wish to -// set the state to ACTIVE, you must submit a LaunchPlanUpdateRequest, after you have successfully created a launch plan. -message LaunchPlanCreateRequest { - // Uniquely identifies a launch plan entity. - core.Identifier id = 1; - - // User-provided launch plan details, including reference workflow, inputs and other metadata. - LaunchPlanSpec spec = 2; -} - -message LaunchPlanCreateResponse { - // Purposefully empty, may be populated in the future. -} - -// By default any launch plan regardless of state can be used to launch a workflow execution. -// However, at most one version of a launch plan -// (e.g. a NamedEntityIdentifier set of shared project, domain and name values) can be -// active at a time in regards to *schedules*. That is, at most one schedule in a NamedEntityIdentifier -// group will be observed and trigger executions at a defined cadence. -enum LaunchPlanState { - INACTIVE = 0; - ACTIVE = 1; -} - -// A LaunchPlan provides the capability to templatize workflow executions. -// Launch plans simplify associating one or more schedules, inputs and notifications with your workflows. -// Launch plans can be shared and used to trigger executions with predefined inputs even when a workflow -// definition doesn't necessarily have a default value for said input. -message LaunchPlan { - // Uniquely identifies a launch plan entity. - core.Identifier id = 1; - - // User-provided launch plan details, including reference workflow, inputs and other metadata. - LaunchPlanSpec spec = 2; - - // Values computed by the flyte platform after launch plan registration. - LaunchPlanClosure closure = 3; -} - -// Response object for list launch plan requests. -// See :ref:`ref_flyteidl.admin.LaunchPlan` for more details -message LaunchPlanList { - repeated LaunchPlan launch_plans = 1; - - // In the case of multiple pages of results, the server-provided token can be used to fetch the next page - // in a query. If there are no more results, this value will be empty. - string token = 2; -} - -// Defines permissions associated with executions created by this launch plan spec. -// Use either of these roles when they have permissions required by your workflow execution. -// Deprecated. -message Auth { - option deprecated = true; - - // Defines an optional iam role which will be used for tasks run in executions created with this launch plan. - string assumable_iam_role = 1; - - // Defines an optional kubernetes service account which will be used for tasks run in executions created with this launch plan. - string kubernetes_service_account = 2; -} - -// User-provided launch plan definition and configuration values. -message LaunchPlanSpec { - // Reference to the Workflow template that the launch plan references - core.Identifier workflow_id = 1; - - // Metadata for the Launch Plan - LaunchPlanMetadata entity_metadata = 2; - - // Input values to be passed for the execution. - // These can be overridden when an execution is created with this launch plan. - core.ParameterMap default_inputs = 3; - - // Fixed, non-overridable inputs for the Launch Plan. - // These can not be overridden when an execution is created with this launch plan. - core.LiteralMap fixed_inputs = 4; - - // String to indicate the role to use to execute the workflow underneath - string role = 5 [deprecated = true]; - - // Custom labels to be applied to the execution resource. - Labels labels = 6; - - // Custom annotations to be applied to the execution resource. - Annotations annotations = 7; - - // Indicates the permission associated with workflow executions triggered with this launch plan. - Auth auth = 8 [deprecated = true]; - - AuthRole auth_role = 9 [deprecated = true]; - - // Indicates security context for permissions triggered with this launch plan - core.SecurityContext security_context = 10; - - // Indicates the runtime priority of the execution. - core.QualityOfService quality_of_service = 16; - - // Encapsulates user settings pertaining to offloaded data (i.e. Blobs, Schema, query data, etc.). - RawOutputDataConfig raw_output_data_config = 17; - - // Controls the maximum number of tasknodes that can be run in parallel for the entire workflow. - // This is useful to achieve fairness. Note: MapTasks are regarded as one unit, - // and parallelism/concurrency of MapTasks is independent from this. - int32 max_parallelism = 18; - - // Allows for the interruptible flag of a workflow to be overwritten for a single execution. - // Omitting this field uses the workflow's value as a default. - // As we need to distinguish between the field not being provided and its default value false, we have to use a wrapper - // around the bool field. - google.protobuf.BoolValue interruptible = 19; - - // Allows for all cached values of a workflow and its tasks to be overwritten for a single execution. - // If enabled, all calculations are performed even if cached results would be available, overwriting the stored - // data once execution finishes successfully. - bool overwrite_cache = 20; - - // Environment variables to be set for the execution. - Envs envs = 21; - - // Execution environment assignments to be set for the execution. - repeated core.ExecutionEnvAssignment execution_env_assignments = 22; -} - -// Values computed by the flyte platform after launch plan registration. -// These include expected_inputs required to be present in a CreateExecutionRequest -// to launch the reference workflow as well timestamp values associated with the launch plan. -message LaunchPlanClosure { - // Indicate the Launch plan state. - LaunchPlanState state = 1; - - // Indicates the set of inputs expected when creating an execution with the Launch plan - core.ParameterMap expected_inputs = 2; - - // Indicates the set of outputs expected to be produced by creating an execution with the Launch plan - core.VariableMap expected_outputs = 3; - - // Time at which the launch plan was created. - google.protobuf.Timestamp created_at = 4; - - // Time at which the launch plan was last updated. - google.protobuf.Timestamp updated_at = 5; -} - -// Additional launch plan attributes included in the LaunchPlanSpec not strictly required to launch -// the reference workflow. -message LaunchPlanMetadata { - // Schedule to execute the Launch Plan - Schedule schedule = 1; - - // List of notifications based on Execution status transitions - repeated Notification notifications = 2; - - // Additional metadata for how to launch the launch plan - google.protobuf.Any launch_conditions = 3; -} - -// Request to set the referenced launch plan state to the configured value. -// See :ref:`ref_flyteidl.admin.LaunchPlan` for more details -message LaunchPlanUpdateRequest { - // Identifier of launch plan for which to change state. - // +required. - core.Identifier id = 1; - - // Desired state to apply to the launch plan. - // +required. - LaunchPlanState state = 2; -} - -// Purposefully empty, may be populated in the future. -message LaunchPlanUpdateResponse { -} - -// Represents a request struct for finding an active launch plan for a given NamedEntityIdentifier -// See :ref:`ref_flyteidl.admin.LaunchPlan` for more details -message ActiveLaunchPlanRequest { - // +required. - NamedEntityIdentifier id = 1; -} - -// Represents a request structure to list active launch plans within a project/domain and optional org. -// See :ref:`ref_flyteidl.admin.LaunchPlan` for more details -message ActiveLaunchPlanListRequest { - // Name of the project that contains the identifiers. - // +required. - string project = 1; - - // Name of the domain the identifiers belongs to within the project. - // +required. - string domain = 2; - - // Indicates the number of resources to be returned. - // +required. - uint32 limit = 3; - - // In the case of multiple pages of results, the server-provided token can be used to fetch the next page - // in a query. - // +optional - string token = 4; - - // Sort ordering. - // +optional - Sort sort_by = 5; - - // Optional, org key applied to the resource. - string org = 6; -} diff --git a/docs/api/flyteidl/flyteidl/admin/matchable_resource.proto b/docs/api/flyteidl/flyteidl/admin/matchable_resource.proto deleted file mode 100644 index 812d75fe4b..0000000000 --- a/docs/api/flyteidl/flyteidl/admin/matchable_resource.proto +++ /dev/null @@ -1,194 +0,0 @@ -syntax = "proto3"; - -package flyteidl.admin; -option go_package = "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/admin"; - -import "flyteidl/admin/common.proto"; -import "flyteidl/admin/cluster_assignment.proto"; -import "flyteidl/core/execution.proto"; -import "flyteidl/core/execution_envs.proto"; -import "flyteidl/core/security.proto"; -import "google/protobuf/wrappers.proto"; - -// Defines a resource that can be configured by customizable Project-, ProjectDomain- or WorkflowAttributes -// based on matching tags. -enum MatchableResource { - // Applies to customizable task resource requests and limits. - TASK_RESOURCE = 0; - - // Applies to configuring templated kubernetes cluster resources. - CLUSTER_RESOURCE = 1; - - // Configures task and dynamic task execution queue assignment. - EXECUTION_QUEUE = 2; - - // Configures the K8s cluster label to be used for execution to be run - EXECUTION_CLUSTER_LABEL = 3; - - // Configures default quality of service when undefined in an execution spec. - QUALITY_OF_SERVICE_SPECIFICATION = 4; - - // Selects configurable plugin implementation behavior for a given task type. - PLUGIN_OVERRIDE = 5; - - // Adds defaults for customizable workflow-execution specifications and overrides. - WORKFLOW_EXECUTION_CONFIG = 6; - - // Controls how to select an available cluster on which this execution should run. - CLUSTER_ASSIGNMENT = 7; -} - -// Defines a set of overridable task resource attributes set during task registration. -message TaskResourceSpec { - string cpu = 1; - - string gpu = 2; - - string memory = 3; - - string storage = 4; - - string ephemeral_storage = 5; -} - -// Defines task resource defaults and limits that will be applied at task registration. -message TaskResourceAttributes { - TaskResourceSpec defaults = 1; - - TaskResourceSpec limits = 2; -} - -message ClusterResourceAttributes { - // Custom resource attributes which will be applied in cluster resource creation (e.g. quotas). - // Map keys are the *case-sensitive* names of variables in templatized resource files. - // Map values should be the custom values which get substituted during resource creation. - map attributes = 1; -} - -message ExecutionQueueAttributes { - // Tags used for assigning execution queues for tasks defined within this project. - repeated string tags = 1; -} - -message ExecutionClusterLabel { - // Label value to determine where the execution will be run - string value = 1; -} - -// This MatchableAttribute configures selecting alternate plugin implementations for a given task type. -// In addition to an override implementation a selection of fallbacks can be provided or other modes -// for handling cases where the desired plugin override is not enabled in a given Flyte deployment. -message PluginOverride { - // A predefined yet extensible Task type identifier. - string task_type = 1; - - // A set of plugin ids which should handle tasks of this type instead of the default registered plugin. The list will be tried in order until a plugin is found with that id. - repeated string plugin_id = 2; - - enum MissingPluginBehavior { - // By default, if this plugin is not enabled for a Flyte deployment then execution will fail. - FAIL = 0; - - // Uses the system-configured default implementation. - USE_DEFAULT = 1; - } - - // Defines the behavior when no plugin from the plugin_id list is not found. - MissingPluginBehavior missing_plugin_behavior = 4; -} - - -message PluginOverrides { - repeated PluginOverride overrides = 1; -} - -// Adds defaults for customizable workflow-execution specifications and overrides. -message WorkflowExecutionConfig { - // Can be used to control the number of parallel nodes to run within the workflow. This is useful to achieve fairness. - int32 max_parallelism = 1; - - // Indicates security context permissions for executions triggered with this matchable attribute. - core.SecurityContext security_context = 2; - - // Encapsulates user settings pertaining to offloaded data (i.e. Blobs, Schema, query data, etc.). - RawOutputDataConfig raw_output_data_config = 3; - - // Custom labels to be applied to a triggered execution resource. - Labels labels = 4; - - // Custom annotations to be applied to a triggered execution resource. - Annotations annotations = 5; - - // Allows for the interruptible flag of a workflow to be overwritten for a single execution. - // Omitting this field uses the workflow's value as a default. - // As we need to distinguish between the field not being provided and its default value false, we have to use a wrapper - // around the bool field. - google.protobuf.BoolValue interruptible = 6; - - // Allows for all cached values of a workflow and its tasks to be overwritten for a single execution. - // If enabled, all calculations are performed even if cached results would be available, overwriting the stored - // data once execution finishes successfully. - bool overwrite_cache = 7; - - // Environment variables to be set for the execution. - Envs envs = 8; - - // Execution environment assignments to be set for the execution. - repeated core.ExecutionEnvAssignment execution_env_assignments = 9; -} - -// Generic container for encapsulating all types of the above attributes messages. -message MatchingAttributes { - oneof target { - TaskResourceAttributes task_resource_attributes = 1; - - ClusterResourceAttributes cluster_resource_attributes = 2; - - ExecutionQueueAttributes execution_queue_attributes = 3; - - ExecutionClusterLabel execution_cluster_label = 4; - - core.QualityOfService quality_of_service = 5; - - PluginOverrides plugin_overrides = 6; - - WorkflowExecutionConfig workflow_execution_config = 7; - - ClusterAssignment cluster_assignment = 8; - } -} - -// Represents a custom set of attributes applied for either a domain (and optional org); a domain and project (and optional org); -// or domain, project and workflow name (and optional org). -// These are used to override system level defaults for kubernetes cluster resource management, -// default execution values, and more all across different levels of specificity. -message MatchableAttributesConfiguration { - MatchingAttributes attributes = 1; - - string domain = 2; - - string project = 3; - - string workflow = 4; - - string launch_plan = 5; - - // Optional, org key applied to the resource. - string org = 6; -} - -// Request all matching resource attributes for a resource type. -// See :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for more details -message ListMatchableAttributesRequest { - // +required - MatchableResource resource_type = 1; - - // Optional, org filter applied to list project requests. - string org = 2; -} - -// Response for a request for all matching resource attributes for a resource type. -// See :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for more details -message ListMatchableAttributesResponse { - repeated MatchableAttributesConfiguration configurations = 1; -} diff --git a/docs/api/flyteidl/flyteidl/admin/node_execution.proto b/docs/api/flyteidl/flyteidl/admin/node_execution.proto deleted file mode 100644 index 411201ea45..0000000000 --- a/docs/api/flyteidl/flyteidl/admin/node_execution.proto +++ /dev/null @@ -1,245 +0,0 @@ -syntax = "proto3"; - -package flyteidl.admin; -option go_package = "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/admin"; - -import "flyteidl/admin/common.proto"; -import "flyteidl/core/execution.proto"; -import "flyteidl/core/catalog.proto"; -import "flyteidl/core/compiler.proto"; -import "flyteidl/core/identifier.proto"; -import "flyteidl/core/literals.proto"; -import "google/protobuf/timestamp.proto"; -import "google/protobuf/duration.proto"; - -// A message used to fetch a single node execution entity. -// See :ref:`ref_flyteidl.admin.NodeExecution` for more details -message NodeExecutionGetRequest { - - // Uniquely identifies an individual node execution. - // +required - core.NodeExecutionIdentifier id = 1; -} - -// Represents a request structure to retrieve a list of node execution entities. -// See :ref:`ref_flyteidl.admin.NodeExecution` for more details -message NodeExecutionListRequest { - // Indicates the workflow execution to filter by. - // +required - core.WorkflowExecutionIdentifier workflow_execution_id = 1; - - // Indicates the number of resources to be returned. - // +required - uint32 limit = 2; - - // In the case of multiple pages of results, the, server-provided token can be used to fetch the next page - // in a query. - // +optional - - string token = 3; - // Indicates a list of filters passed as string. - // More info on constructing filters : - // +optional - string filters = 4; - - // Sort ordering. - // +optional - Sort sort_by = 5; - - // Unique identifier of the parent node in the execution - // +optional - string unique_parent_id = 6; -} - -// Represents a request structure to retrieve a list of node execution entities launched by a specific task. -// This can arise when a task yields a subworkflow. -message NodeExecutionForTaskListRequest { - // Indicates the node execution to filter by. - // +required - core.TaskExecutionIdentifier task_execution_id = 1; - - // Indicates the number of resources to be returned. - // +required - uint32 limit = 2; - - // In the case of multiple pages of results, the, server-provided token can be used to fetch the next page - // in a query. - // +optional - string token = 3; - - // Indicates a list of filters passed as string. - // More info on constructing filters : - // +optional - string filters = 4; - - // Sort ordering. - // +optional - Sort sort_by = 5; -} - -// Encapsulates all details for a single node execution entity. -// A node represents a component in the overall workflow graph. A node launch a task, multiple tasks, an entire nested -// sub-workflow, or even a separate child-workflow execution. -// The same task can be called repeatedly in a single workflow but each node is unique. -message NodeExecution { - - // Uniquely identifies an individual node execution. - core.NodeExecutionIdentifier id = 1; - - // Path to remote data store where input blob is stored. - string input_uri = 2; - - // Computed results associated with this node execution. - NodeExecutionClosure closure = 3; - - // Metadata for Node Execution - NodeExecutionMetaData metadata = 4; -} - -// Represents additional attributes related to a Node Execution -message NodeExecutionMetaData { - // Node executions are grouped depending on retries of the parent - // Retry group is unique within the context of a parent node. - string retry_group = 1; - - // Boolean flag indicating if the node has child nodes under it - // This can be true when a node contains a dynamic workflow which then produces - // child nodes. - bool is_parent_node = 2; - - // Node id of the node in the original workflow - // This maps to value of WorkflowTemplate.nodes[X].id - string spec_node_id = 3; - - // Boolean flag indicating if the node has contains a dynamic workflow which then produces child nodes. - // This is to distinguish between subworkflows and dynamic workflows which can both have is_parent_node as true. - bool is_dynamic = 4; - - // Boolean flag indicating if the node is an array node. This is intended to uniquely identify - // array nodes from other nodes which can have is_parent_node as true. - bool is_array = 5; -} - -// Request structure to retrieve a list of node execution entities. -// See :ref:`ref_flyteidl.admin.NodeExecution` for more details -message NodeExecutionList { - repeated NodeExecution node_executions = 1; - - // In the case of multiple pages of results, the server-provided token can be used to fetch the next page - // in a query. If there are no more results, this value will be empty. - string token = 2; -} - -// Container for node execution details and results. -message NodeExecutionClosure { - // Only a node in a terminal state will have a non-empty output_result. - oneof output_result { - // Links to a remotely stored, serialized core.LiteralMap of node execution outputs. - // DEPRECATED. Use GetNodeExecutionData to fetch output data instead. - string output_uri = 1 [deprecated = true]; - - // Error information for the Node - core.ExecutionError error = 2; - - // Raw output data produced by this node execution. - // DEPRECATED. Use GetNodeExecutionData to fetch output data instead. - core.LiteralMap output_data = 10 [deprecated = true]; - } - - // The last recorded phase for this node execution. - core.NodeExecution.Phase phase = 3; - - // Time at which the node execution began running. - google.protobuf.Timestamp started_at = 4; - - // The amount of time the node execution spent running. - google.protobuf.Duration duration = 5; - - // Time at which the node execution was created. - google.protobuf.Timestamp created_at = 6; - - // Time at which the node execution was last updated. - google.protobuf.Timestamp updated_at = 7; - - // Store metadata for what the node launched. - // for ex: if this is a workflow node, we store information for the launched workflow. - oneof target_metadata { - WorkflowNodeMetadata workflow_node_metadata = 8; - TaskNodeMetadata task_node_metadata = 9; - } - - // String location uniquely identifying where the deck HTML file is. - // NativeUrl specifies the url in the format of the configured storage provider (e.g. s3://my-bucket/randomstring/suffix.tar) - string deck_uri = 11; - - // dynamic_job_spec_uri is the location of the DynamicJobSpec proto message for a DynamicWorkflow. This is required - // to correctly recover partially completed executions where the subworkflow has already been compiled. - string dynamic_job_spec_uri = 12; -} - -// Metadata for a WorkflowNode -message WorkflowNodeMetadata { - // The identifier for a workflow execution launched by a node. - core.WorkflowExecutionIdentifier executionId = 1; -} - -// Metadata for the case in which the node is a TaskNode -message TaskNodeMetadata { - // Captures the status of caching for this execution. - core.CatalogCacheStatus cache_status = 1; - // This structure carries the catalog artifact information - core.CatalogMetadata catalog_key = 2; - // The latest checkpoint location - string checkpoint_uri = 4; -} - -// For dynamic workflow nodes we capture information about the dynamic workflow definition that gets generated. -message DynamicWorkflowNodeMetadata { - // id represents the unique identifier of the workflow. - core.Identifier id = 1; - - // Represents the compiled representation of the embedded dynamic workflow. - core.CompiledWorkflowClosure compiled_workflow = 2; - - // dynamic_job_spec_uri is the location of the DynamicJobSpec proto message for this DynamicWorkflow. This is - // required to correctly recover partially completed executions where the subworkflow has already been compiled. - string dynamic_job_spec_uri = 3; -} - -// Request structure to fetch inputs and output for a node execution. -// By default, these are not returned in :ref:`ref_flyteidl.admin.NodeExecutionGetRequest` -message NodeExecutionGetDataRequest { - // The identifier of the node execution for which to fetch inputs and outputs. - core.NodeExecutionIdentifier id = 1; -} - -// Response structure for NodeExecutionGetDataRequest which contains inputs and outputs for a node execution. -message NodeExecutionGetDataResponse { - // Signed url to fetch a core.LiteralMap of node execution inputs. - // Deprecated: Please use full_inputs instead. - UrlBlob inputs = 1 [deprecated = true]; - - // Signed url to fetch a core.LiteralMap of node execution outputs. - // Deprecated: Please use full_outputs instead. - UrlBlob outputs = 2 [deprecated = true]; - - // Full_inputs will only be populated if they are under a configured size threshold. - core.LiteralMap full_inputs = 3; - - // Full_outputs will only be populated if they are under a configured size threshold. - core.LiteralMap full_outputs = 4; - - // Optional Workflow closure for a dynamically generated workflow, in the case this node yields a dynamic workflow we return its structure here. - DynamicWorkflowNodeMetadata dynamic_workflow = 16; - - FlyteURLs flyte_urls = 17; - -} - -message GetDynamicNodeWorkflowRequest { - core.NodeExecutionIdentifier id = 1; -} - -message DynamicNodeWorkflowResponse { - core.CompiledWorkflowClosure compiled_workflow = 1; -} diff --git a/docs/api/flyteidl/flyteidl/admin/notification.proto b/docs/api/flyteidl/flyteidl/admin/notification.proto deleted file mode 100644 index 9ef54c9794..0000000000 --- a/docs/api/flyteidl/flyteidl/admin/notification.proto +++ /dev/null @@ -1,27 +0,0 @@ -syntax = "proto3"; - -package flyteidl.admin; - -option go_package = "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/admin"; - -// Represents the Email object that is sent to a publisher/subscriber -// to forward the notification. -// Note: This is internal to Admin and doesn't need to be exposed to other components. -message EmailMessage { - // The list of email addresses to receive an email with the content populated in the other fields. - // Currently, each email recipient will receive its own email. - // This populates the TO field. - repeated string recipients_email = 1; - - // The email of the sender. - // This populates the FROM field. - string sender_email = 2; - - // The content of the subject line. - // This populates the SUBJECT field. - string subject_line = 3; - - // The content of the email body. - // This populates the BODY field. - string body = 4; -} diff --git a/docs/api/flyteidl/flyteidl/admin/project.proto b/docs/api/flyteidl/flyteidl/admin/project.proto deleted file mode 100644 index 8b994b7267..0000000000 --- a/docs/api/flyteidl/flyteidl/admin/project.proto +++ /dev/null @@ -1,132 +0,0 @@ -syntax = "proto3"; - -package flyteidl.admin; -option go_package = "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/admin"; - - -import "flyteidl/admin/common.proto"; - -// Empty request for GetDomain -message GetDomainRequest {} - -// Namespace within a project commonly used to differentiate between different service instances. -// e.g. "production", "development", etc. -message Domain { - // Globally unique domain name. - string id = 1; - - // Display name. - string name = 2; -} - -// Represents a list of domains. -message GetDomainsResponse { - repeated Domain domains = 1; -} - -// Top-level namespace used to classify different entities like workflows and executions. -message Project { - // Globally unique project name. - string id = 1; - - // Display name. - string name = 2; - - repeated Domain domains = 3; - - string description = 4; - - // Leverage Labels from flyteidl.admin.common.proto to - // tag projects with ownership information. - Labels labels = 5; - - // The state of the project is used to control its visibility in the UI and validity. - enum ProjectState { - // By default, all projects are considered active. - ACTIVE = 0; - - // Archived projects are no longer visible in the UI and no longer valid. - ARCHIVED = 1; - - // System generated projects that aren't explicitly created or managed by a user. - SYSTEM_GENERATED = 2; - - // System archived projects that aren't explicitly archived by a user. - SYSTEM_ARCHIVED = 3; - } - ProjectState state = 6; - - // Optional, org key applied to the resource. - string org = 7; -} - -// Represents a list of projects. -// See :ref:`ref_flyteidl.admin.Project` for more details -message Projects { - repeated Project projects = 1; - - // In the case of multiple pages of results, the server-provided token can be used to fetch the next page - // in a query. If there are no more results, this value will be empty. - string token = 2; -} - -// Request to retrieve a list of projects matching specified filters. -// See :ref:`ref_flyteidl.admin.Project` for more details -message ProjectListRequest { - // Indicates the number of projects to be returned. - // +required - uint32 limit = 1; - - // In the case of multiple pages of results, this server-provided token can be used to fetch the next page - // in a query. - // +optional - string token = 2; - - // Indicates a list of filters passed as string. - // More info on constructing filters : - // +optional - string filters = 3; - - // Sort ordering. - // +optional - Sort sort_by = 4; - - // Optional, org filter applied to list project requests. - string org = 5; -} - -// Adds a new user-project within the Flyte deployment. -// See :ref:`ref_flyteidl.admin.Project` for more details -message ProjectRegisterRequest { - // +required - Project project = 1; -} - -// Purposefully empty, may be updated in the future. -message ProjectRegisterResponse { -} - -// Purposefully empty, may be updated in the future. -message ProjectUpdateResponse { -} - -message ProjectGetRequest { - // Indicates a unique project. - // +required - string id = 1; - - // Optional, org key applied to the resource. - string org = 2; -} - - -// Error returned for inactive projects -message InactiveProject { - // Indicates a unique project. - // +required - string id = 1; - - // Optional, org key applied to the resource. - string org = 2; -} - diff --git a/docs/api/flyteidl/flyteidl/admin/project_attributes.proto b/docs/api/flyteidl/flyteidl/admin/project_attributes.proto deleted file mode 100644 index 2656ab25f5..0000000000 --- a/docs/api/flyteidl/flyteidl/admin/project_attributes.proto +++ /dev/null @@ -1,69 +0,0 @@ -syntax = "proto3"; - -package flyteidl.admin; -option go_package = "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/admin"; - -import "flyteidl/admin/matchable_resource.proto"; - -// Defines a set of custom matching attributes at the project level. -// For more info on matchable attributes, see :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` -message ProjectAttributes { - // Unique project id for which this set of attributes will be applied. - string project = 1; - - MatchingAttributes matching_attributes = 2; - - // Optional, org key applied to the project. - string org = 3; -} - -// Sets custom attributes for a project -// For more info on matchable attributes, see :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` -message ProjectAttributesUpdateRequest { - // +required - ProjectAttributes attributes = 1; -} - -// Purposefully empty, may be populated in the future. -message ProjectAttributesUpdateResponse { -} - -// Request to get an individual project level attribute override. -// For more info on matchable attributes, see :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` -message ProjectAttributesGetRequest { - // Unique project id which this set of attributes references. - // +required - string project = 1; - - // Which type of matchable attributes to return. - // +required - MatchableResource resource_type = 2; - - // Optional, org key applied to the project. - string org = 3; -} - -// Response to get an individual project level attribute override. -// For more info on matchable attributes, see :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` -message ProjectAttributesGetResponse { - ProjectAttributes attributes = 1; -} - -// Request to delete a set matchable project level attribute override. -// For more info on matchable attributes, see :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` -message ProjectAttributesDeleteRequest { - // Unique project id which this set of attributes references. - // +required - string project = 1; - - // Which type of matchable attributes to delete. - // +required - MatchableResource resource_type = 2; - - // Optional, org key applied to the project. - string org = 3; -} - -// Purposefully empty, may be populated in the future. -message ProjectAttributesDeleteResponse { -} diff --git a/docs/api/flyteidl/flyteidl/admin/project_domain_attributes.proto b/docs/api/flyteidl/flyteidl/admin/project_domain_attributes.proto deleted file mode 100644 index b493ae1178..0000000000 --- a/docs/api/flyteidl/flyteidl/admin/project_domain_attributes.proto +++ /dev/null @@ -1,80 +0,0 @@ -syntax = "proto3"; - -package flyteidl.admin; -option go_package = "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/admin"; - -import "flyteidl/admin/matchable_resource.proto"; - -// Defines a set of custom matching attributes which defines resource defaults for a project and domain. -// For more info on matchable attributes, see :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` -message ProjectDomainAttributes { - // Unique project id for which this set of attributes will be applied. - string project = 1; - - // Unique domain id for which this set of attributes will be applied. - string domain = 2; - - MatchingAttributes matching_attributes = 3; - - // Optional, org key applied to the attributes. - string org = 4; -} - -// Sets custom attributes for a project-domain combination. -// For more info on matchable attributes, see :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` -message ProjectDomainAttributesUpdateRequest { - // +required - ProjectDomainAttributes attributes = 1; -} - -// Purposefully empty, may be populated in the future. -message ProjectDomainAttributesUpdateResponse { -} - -// Request to get an individual project domain attribute override. -// For more info on matchable attributes, see :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` -message ProjectDomainAttributesGetRequest { - // Unique project id which this set of attributes references. - // +required - string project = 1; - - // Unique domain id which this set of attributes references. - // +required - string domain = 2; - - // Which type of matchable attributes to return. - // +required - MatchableResource resource_type = 3; - - // Optional, org key applied to the attributes. - string org = 4; -} - -// Response to get an individual project domain attribute override. -// For more info on matchable attributes, see :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` -message ProjectDomainAttributesGetResponse { - ProjectDomainAttributes attributes = 1; -} - -// Request to delete a set matchable project domain attribute override. -// For more info on matchable attributes, see :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` -message ProjectDomainAttributesDeleteRequest { - // Unique project id which this set of attributes references. - // +required - string project = 1; - - // Unique domain id which this set of attributes references. - // +required - string domain = 2; - - // Which type of matchable attributes to delete. - // +required - MatchableResource resource_type = 3; - - // Optional, org key applied to the attributes. - string org = 4; -} - -// Purposefully empty, may be populated in the future. -message ProjectDomainAttributesDeleteResponse { -} diff --git a/docs/api/flyteidl/flyteidl/admin/schedule.proto b/docs/api/flyteidl/flyteidl/admin/schedule.proto deleted file mode 100644 index 6bcbd90140..0000000000 --- a/docs/api/flyteidl/flyteidl/admin/schedule.proto +++ /dev/null @@ -1,43 +0,0 @@ -syntax = "proto3"; - -package flyteidl.admin; -option go_package = "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/admin"; - -// Represents a frequency at which to run a schedule. -enum FixedRateUnit { - MINUTE = 0; - HOUR = 1; - DAY = 2; -} - -// Option for schedules run at a certain frequency e.g. every 2 minutes. -message FixedRate { - uint32 value = 1; - FixedRateUnit unit = 2; -} - -// Options for schedules to run according to a cron expression. -message CronSchedule { - // Standard/default cron implementation as described by https://en.wikipedia.org/wiki/Cron#CRON_expression; - // Also supports nonstandard predefined scheduling definitions - // as described by https://docs.aws.amazon.com/AmazonCloudWatch/latest/events/ScheduledEvents.html#CronExpressions - // except @reboot - string schedule = 1; - // ISO 8601 duration as described by https://en.wikipedia.org/wiki/ISO_8601#Durations - string offset = 2; -} - -// Defines complete set of information required to trigger an execution on a schedule. -message Schedule { - - oneof ScheduleExpression { - // Uses AWS syntax: Minutes Hours Day-of-month Month Day-of-week Year - // e.g. for a schedule that runs every 15 minutes: 0/15 * * * ? * - string cron_expression = 1 [deprecated=true]; - FixedRate rate = 2; - CronSchedule cron_schedule = 4; - } - - // Name of the input variable that the kickoff time will be supplied to when the workflow is kicked off. - string kickoff_time_input_arg = 3; -} diff --git a/docs/api/flyteidl/flyteidl/admin/signal.proto b/docs/api/flyteidl/flyteidl/admin/signal.proto deleted file mode 100644 index 39ff5c09b9..0000000000 --- a/docs/api/flyteidl/flyteidl/admin/signal.proto +++ /dev/null @@ -1,86 +0,0 @@ -syntax = "proto3"; - -package flyteidl.admin; -option go_package = "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/admin"; - -import "flyteidl/admin/common.proto"; -import "flyteidl/core/identifier.proto"; -import "flyteidl/core/literals.proto"; -import "flyteidl/core/types.proto"; - -// SignalGetOrCreateRequest represents a request structure to retrieve or create a signal. -// See :ref:`ref_flyteidl.admin.Signal` for more details -message SignalGetOrCreateRequest { - // A unique identifier for the requested signal. - core.SignalIdentifier id = 1; - - // A type denoting the required value type for this signal. - core.LiteralType type = 2; -} - -// SignalListRequest represents a request structure to retrieve a collection of signals. -// See :ref:`ref_flyteidl.admin.Signal` for more details -message SignalListRequest { - // Indicates the workflow execution to filter by. - // +required - core.WorkflowExecutionIdentifier workflow_execution_id = 1; - - // Indicates the number of resources to be returned. - // +required - uint32 limit = 2; - - // In the case of multiple pages of results, the, server-provided token can be used to fetch the next page - // in a query. - // +optional - string token = 3; - - // Indicates a list of filters passed as string. - // +optional - string filters = 4; - - // Sort ordering. - // +optional - Sort sort_by = 5; -} - -// SignalList represents collection of signals along with the token of the last result. -// See :ref:`ref_flyteidl.admin.Signal` for more details -message SignalList { - // A list of signals matching the input filters. - repeated Signal signals = 1; - - // In the case of multiple pages of results, the server-provided token can be used to fetch the next page - // in a query. If there are no more results, this value will be empty. - string token = 2; -} - -// SignalSetRequest represents a request structure to set the value on a signal. Setting a signal -// effetively satisfies the signal condition within a Flyte workflow. -// See :ref:`ref_flyteidl.admin.Signal` for more details -message SignalSetRequest { - // A unique identifier for the requested signal. - core.SignalIdentifier id = 1; - - // The value of this signal, must match the defining signal type. - core.Literal value = 2; -} - -// SignalSetResponse represents a response structure if signal setting succeeds. -message SignalSetResponse { - // Purposefully empty, may be populated in the future. -} - -// Signal encapsulates a unique identifier, associated metadata, and a value for a single Flyte -// signal. Signals may exist either without a set value (representing a signal request) or with a -// populated value (indicating the signal has been given). -message Signal { - // A unique identifier for the requested signal. - core.SignalIdentifier id = 1; - - // A type denoting the required value type for this signal. - core.LiteralType type = 2; - - // The value of the signal. This is only available if the signal has been "set" and must match - // the defined the type. - core.Literal value = 3; -} diff --git a/docs/api/flyteidl/flyteidl/admin/task.proto b/docs/api/flyteidl/flyteidl/admin/task.proto deleted file mode 100644 index 78fbba39f8..0000000000 --- a/docs/api/flyteidl/flyteidl/admin/task.proto +++ /dev/null @@ -1,71 +0,0 @@ -syntax = "proto3"; - -package flyteidl.admin; -option go_package = "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/admin"; - -import "flyteidl/core/identifier.proto"; -import "flyteidl/core/tasks.proto"; -import "flyteidl/core/compiler.proto"; -import "flyteidl/admin/description_entity.proto"; -import "google/protobuf/timestamp.proto"; - -// Represents a request structure to create a revision of a task. -// See :ref:`ref_flyteidl.admin.Task` for more details -message TaskCreateRequest { - // id represents the unique identifier of the task. - // +required - core.Identifier id = 1; - - // Represents the specification for task. - // +required - TaskSpec spec = 2; -} - -// Represents a response structure if task creation succeeds. -message TaskCreateResponse { - // Purposefully empty, may be populated in the future. -} - -// Flyte workflows are composed of many ordered tasks. That is small, reusable, self-contained logical blocks -// arranged to process workflow inputs and produce a deterministic set of outputs. -// Tasks can come in many varieties tuned for specialized behavior. -message Task { - // id represents the unique identifier of the task. - core.Identifier id = 1; - - // closure encapsulates all the fields that maps to a compiled version of the task. - TaskClosure closure = 2; - - // One-liner overview of the entity. - string short_description = 3; -} - -// Represents a list of tasks returned from the admin. -// See :ref:`ref_flyteidl.admin.Task` for more details -message TaskList { - // A list of tasks returned based on the request. - repeated Task tasks = 1; - - // In the case of multiple pages of results, the server-provided token can be used to fetch the next page - // in a query. If there are no more results, this value will be empty. - string token = 2; -} - -// Represents a structure that encapsulates the user-configured specification of the task. -message TaskSpec { - // Template of the task that encapsulates all the metadata of the task. - core.TaskTemplate template = 1; - - // Represents the specification for description entity. - DescriptionEntity description = 2; -} - -// Compute task attributes which include values derived from the TaskSpec, as well as plugin-specific data -// and task metadata. -message TaskClosure { - // Represents the compiled representation of the task from the specification provided. - core.CompiledTask compiled_task = 1; - - // Time at which the task was created. - google.protobuf.Timestamp created_at = 2; -} diff --git a/docs/api/flyteidl/flyteidl/admin/task_execution.proto b/docs/api/flyteidl/flyteidl/admin/task_execution.proto deleted file mode 100644 index 54d2ff1e61..0000000000 --- a/docs/api/flyteidl/flyteidl/admin/task_execution.proto +++ /dev/null @@ -1,168 +0,0 @@ -syntax = "proto3"; - -package flyteidl.admin; -option go_package = "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/admin"; - -import "flyteidl/admin/common.proto"; -import "flyteidl/core/execution.proto"; -import "flyteidl/core/identifier.proto"; -import "flyteidl/core/literals.proto"; -import "flyteidl/event/event.proto"; -import "google/protobuf/timestamp.proto"; -import "google/protobuf/duration.proto"; -import "google/protobuf/struct.proto"; - -// A message used to fetch a single task execution entity. -// See :ref:`ref_flyteidl.admin.TaskExecution` for more details -message TaskExecutionGetRequest { - // Unique identifier for the task execution. - // +required - core.TaskExecutionIdentifier id = 1; -} - -// Represents a request structure to retrieve a list of task execution entities yielded by a specific node execution. -// See :ref:`ref_flyteidl.admin.TaskExecution` for more details -message TaskExecutionListRequest { - // Indicates the node execution to filter by. - // +required - core.NodeExecutionIdentifier node_execution_id = 1; - - // Indicates the number of resources to be returned. - // +required - uint32 limit = 2; - - // In the case of multiple pages of results, the server-provided token can be used to fetch the next page - // in a query. - // +optional - string token = 3; - - // Indicates a list of filters passed as string. - // More info on constructing filters : - // +optional - string filters = 4; - - // Sort ordering for returned list. - // +optional - Sort sort_by = 5; -} - -// Encapsulates all details for a single task execution entity. -// A task execution represents an instantiated task, including all inputs and additional -// metadata as well as computed results included state, outputs, and duration-based attributes. -message TaskExecution { - // Unique identifier for the task execution. - core.TaskExecutionIdentifier id = 1; - - // Path to remote data store where input blob is stored. - string input_uri = 2; - - // Task execution details and results. - TaskExecutionClosure closure = 3; - - // Whether this task spawned nodes. - bool is_parent = 4; -} - -// Response structure for a query to list of task execution entities. -// See :ref:`ref_flyteidl.admin.TaskExecution` for more details -message TaskExecutionList { - repeated TaskExecution task_executions = 1; - - // In the case of multiple pages of results, the server-provided token can be used to fetch the next page - // in a query. If there are no more results, this value will be empty. - string token = 2; -} - -// Container for task execution details and results. -message TaskExecutionClosure { - oneof output_result { - // Path to remote data store where output blob is stored if the execution succeeded (and produced outputs). - // DEPRECATED. Use GetTaskExecutionData to fetch output data instead. - string output_uri = 1 [deprecated = true]; - - // Error information for the task execution. Populated if the execution failed. - core.ExecutionError error = 2; - - // Raw output data produced by this task execution. - // DEPRECATED. Use GetTaskExecutionData to fetch output data instead. - core.LiteralMap output_data = 12 [deprecated = true]; - } - - // The last recorded phase for this task execution. - core.TaskExecution.Phase phase = 3; - - // Detailed log information output by the task execution. - repeated core.TaskLog logs = 4; - - // Time at which the task execution began running. - google.protobuf.Timestamp started_at = 5; - - // The amount of time the task execution spent running. - google.protobuf.Duration duration = 6; - - // Time at which the task execution was created. - google.protobuf.Timestamp created_at = 7; - - // Time at which the task execution was last updated. - google.protobuf.Timestamp updated_at = 8; - - // Custom data specific to the task plugin. - google.protobuf.Struct custom_info = 9; - - // If there is an explanation for the most recent phase transition, the reason will capture it. - string reason = 10; - - // A predefined yet extensible Task type identifier. - string task_type = 11; - - // Metadata around how a task was executed. - event.TaskExecutionMetadata metadata = 16; - - // The event version is used to indicate versioned changes in how data is maintained using this - // proto message. For example, event_verison > 0 means that maps tasks logs use the - // TaskExecutionMetadata ExternalResourceInfo fields for each subtask rather than the TaskLog - // in this message. - int32 event_version = 17; - - // A time-series of the phase transition or update explanations. This, when compared to storing a singular reason - // as previously done, is much more valuable in visualizing and understanding historical evaluations. - repeated Reason reasons = 18; -} - -// Reason is a single message annotated with a timestamp to indicate the instant the reason occurred. -message Reason { - // occurred_at is the timestamp indicating the instant that this reason happened. - google.protobuf.Timestamp occurred_at = 1; - - // message is the explanation for the most recent phase transition or status update. - string message = 2; -} - -// Request structure to fetch inputs and output for a task execution. -// By default this data is not returned inline in :ref:`ref_flyteidl.admin.TaskExecutionGetRequest` -message TaskExecutionGetDataRequest { - // The identifier of the task execution for which to fetch inputs and outputs. - // +required - core.TaskExecutionIdentifier id = 1; -} - -// Response structure for TaskExecutionGetDataRequest which contains inputs and outputs for a task execution. -message TaskExecutionGetDataResponse { - // Signed url to fetch a core.LiteralMap of task execution inputs. - // Deprecated: Please use full_inputs instead. - UrlBlob inputs = 1 [deprecated = true]; - - // Signed url to fetch a core.LiteralMap of task execution outputs. - // Deprecated: Please use full_outputs instead. - UrlBlob outputs = 2 [deprecated = true]; - - // Full_inputs will only be populated if they are under a configured size threshold. - core.LiteralMap full_inputs = 3; - - // Full_outputs will only be populated if they are under a configured size threshold. - core.LiteralMap full_outputs = 4; - - // flyte tiny url to fetch a core.LiteralMap of task execution's IO - // Deck will be empty for task - FlyteURLs flyte_urls = 5; -} diff --git a/docs/api/flyteidl/flyteidl/admin/version.proto b/docs/api/flyteidl/flyteidl/admin/version.proto deleted file mode 100644 index e0e38bda1f..0000000000 --- a/docs/api/flyteidl/flyteidl/admin/version.proto +++ /dev/null @@ -1,27 +0,0 @@ -syntax = "proto3"; - -package flyteidl.admin; -option go_package = "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/admin"; - -// Response for the GetVersion API -message GetVersionResponse { - // The control plane version information. FlyteAdmin and related components - // form the control plane of Flyte - Version control_plane_version = 1; -} - -// Provides Version information for a component -message Version { - // Specifies the GIT sha of the build - string Build = 1; - - // Version for the build, should follow a semver - string Version = 2; - - // Build timestamp - string BuildTime = 3; -} - -// Empty request for GetVersion -message GetVersionRequest { -} diff --git a/docs/api/flyteidl/flyteidl/admin/workflow.proto b/docs/api/flyteidl/flyteidl/admin/workflow.proto deleted file mode 100644 index d522d65b73..0000000000 --- a/docs/api/flyteidl/flyteidl/admin/workflow.proto +++ /dev/null @@ -1,92 +0,0 @@ -syntax = "proto3"; - -package flyteidl.admin; -option go_package = "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/admin"; - -import "flyteidl/core/compiler.proto"; -import "flyteidl/core/identifier.proto"; -import "flyteidl/core/workflow.proto"; -import "flyteidl/admin/description_entity.proto"; -import "google/protobuf/timestamp.proto"; - -// Represents a request structure to create a revision of a workflow. -// See :ref:`ref_flyteidl.admin.Workflow` for more details -message WorkflowCreateRequest { - // id represents the unique identifier of the workflow. - // +required - core.Identifier id = 1; - - // Represents the specification for workflow. - // +required - WorkflowSpec spec = 2; -} - -message WorkflowCreateResponse { - // Purposefully empty, may be populated in the future. -} - -// Represents the workflow structure stored in the Admin -// A workflow is created by ordering tasks and associating outputs to inputs -// in order to produce a directed-acyclic execution graph. -message Workflow { - // id represents the unique identifier of the workflow. - core.Identifier id = 1; - - // closure encapsulates all the fields that maps to a compiled version of the workflow. - WorkflowClosure closure = 2; - - // One-liner overview of the entity. - string short_description = 3; -} - -// Represents a list of workflows returned from the admin. -// See :ref:`ref_flyteidl.admin.Workflow` for more details -message WorkflowList { - // A list of workflows returned based on the request. - repeated Workflow workflows = 1; - - // In the case of multiple pages of results, the server-provided token can be used to fetch the next page - // in a query. If there are no more results, this value will be empty. - string token = 2; -} - -// Represents a structure that encapsulates the specification of the workflow. -message WorkflowSpec { - // Template of the task that encapsulates all the metadata of the workflow. - core.WorkflowTemplate template = 1; - - // Workflows that are embedded into other workflows need to be passed alongside the parent workflow to the - // propeller compiler (since the compiler doesn't have any knowledge of other workflows - ie, it doesn't reach out - // to Admin to see other registered workflows). In fact, subworkflows do not even need to be registered. - repeated core.WorkflowTemplate sub_workflows = 2; - - // Represents the specification for description entity. - DescriptionEntity description = 3; -} - -// A container holding the compiled workflow produced from the WorkflowSpec and additional metadata. -message WorkflowClosure { - // Represents the compiled representation of the workflow from the specification provided. - core.CompiledWorkflowClosure compiled_workflow = 1; - - // Time at which the workflow was created. - google.protobuf.Timestamp created_at = 2; -} - -// The workflow id is already used and the structure is different -message WorkflowErrorExistsDifferentStructure { - core.Identifier id = 1; -} - -// The workflow id is already used with an identical sctructure -message WorkflowErrorExistsIdenticalStructure { - core.Identifier id = 1; -} - -// When a CreateWorkflowRequest fails due to matching id -message CreateWorkflowFailureReason { - oneof reason { - WorkflowErrorExistsDifferentStructure exists_different_structure = 1; - WorkflowErrorExistsIdenticalStructure exists_identical_structure = 2; - } -} diff --git a/docs/api/flyteidl/flyteidl/admin/workflow_attributes.proto b/docs/api/flyteidl/flyteidl/admin/workflow_attributes.proto deleted file mode 100644 index 9767f00df7..0000000000 --- a/docs/api/flyteidl/flyteidl/admin/workflow_attributes.proto +++ /dev/null @@ -1,89 +0,0 @@ -syntax = "proto3"; - -package flyteidl.admin; -option go_package = "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/admin"; - -import "flyteidl/admin/matchable_resource.proto"; - -// Defines a set of custom matching attributes which defines resource defaults for a project, domain and workflow. -// For more info on matchable attributes, see :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` -message WorkflowAttributes { - // Unique project id for which this set of attributes will be applied. - string project = 1; - - // Unique domain id for which this set of attributes will be applied. - string domain = 2; - - // Workflow name for which this set of attributes will be applied. - string workflow = 3; - - MatchingAttributes matching_attributes = 4; - - // Optional, org key applied to the attributes. - string org = 5; -} - -// Sets custom attributes for a project, domain and workflow combination. -// For more info on matchable attributes, see :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` -message WorkflowAttributesUpdateRequest { - WorkflowAttributes attributes = 1; -} - -// Purposefully empty, may be populated in the future. -message WorkflowAttributesUpdateResponse { -} - -// Request to get an individual workflow attribute override. -// For more info on matchable attributes, see :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` -message WorkflowAttributesGetRequest { - // Unique project id which this set of attributes references. - // +required - string project = 1; - - // Unique domain id which this set of attributes references. - // +required - string domain = 2; - - // Workflow name which this set of attributes references. - // +required - string workflow = 3; - - // Which type of matchable attributes to return. - // +required - MatchableResource resource_type = 4; - - // Optional, org key applied to the attributes. - string org = 5; -} - -// Response to get an individual workflow attribute override. -message WorkflowAttributesGetResponse { - WorkflowAttributes attributes = 1; -} - -// Request to delete a set matchable workflow attribute override. -// For more info on matchable attributes, see :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` -message WorkflowAttributesDeleteRequest { - // Unique project id which this set of attributes references. - // +required - string project = 1; - - // Unique domain id which this set of attributes references. - // +required - string domain = 2; - - // Workflow name which this set of attributes references. - // +required - string workflow = 3; - - // Which type of matchable attributes to delete. - // +required - MatchableResource resource_type = 4; - - // Optional, org key applied to the attributes. - string org = 5; -} - -// Purposefully empty, may be populated in the future. -message WorkflowAttributesDeleteResponse { -} diff --git a/docs/api/flyteidl/flyteidl/cacheservice/cacheservice.proto b/docs/api/flyteidl/flyteidl/cacheservice/cacheservice.proto deleted file mode 100644 index c85e2eb55c..0000000000 --- a/docs/api/flyteidl/flyteidl/cacheservice/cacheservice.proto +++ /dev/null @@ -1,143 +0,0 @@ -syntax = "proto3"; - -package flyteidl.cacheservice; - -import "flyteidl/core/literals.proto"; -import "flyteidl/core/types.proto"; -import "flyteidl/core/identifier.proto"; -import "flyteidl/core/interface.proto"; -import "google/protobuf/duration.proto"; -import "google/protobuf/timestamp.proto"; - -option go_package = "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/cacheservice"; - -/* - * CacheService defines operations for cache management including retrieval, storage, and deletion of cached task/workflow outputs. - */ -service CacheService { - // Retrieves cached data by key. - rpc Get (GetCacheRequest) returns (GetCacheResponse); - - // Stores or updates cached data by key. - rpc Put (PutCacheRequest) returns (PutCacheResponse); - - // Deletes cached data by key. - rpc Delete (DeleteCacheRequest) returns (DeleteCacheResponse); - - // Get or extend a reservation for a cache key - rpc GetOrExtendReservation (GetOrExtendReservationRequest) returns (GetOrExtendReservationResponse); - - // Release the reservation for a cache key - rpc ReleaseReservation (ReleaseReservationRequest) returns (ReleaseReservationResponse); -} - -/* - * Additional metadata as key-value pairs - */ -message KeyMapMetadata { - map values = 1; // Additional metadata as key-value pairs -} - -/* - * Metadata for cached outputs, including the source identifier and timestamps. - */ -message Metadata { - core.Identifier source_identifier = 1; // Source task or workflow identifier - KeyMapMetadata key_map = 2; // Additional metadata as key-value pairs - google.protobuf.Timestamp created_at = 3; // Creation timestamp - google.protobuf.Timestamp last_updated_at = 4; // Last update timestamp -} - -/* - * Represents cached output, either as literals or an URI, with associated metadata. - */ -message CachedOutput { - oneof output { - flyteidl.core.LiteralMap output_literals = 1; // Output literals - string output_uri = 2; // URI to output data - } - Metadata metadata = 3; // Associated metadata -} - -/* - * Request to retrieve cached data by key. - */ -message GetCacheRequest { - string key = 1; // Cache key -} - -/* - * Response with cached data for a given key. - */ -message GetCacheResponse { - CachedOutput output = 1; // Cached output -} - -/* - * Request to store/update cached data by key. - */ -message PutCacheRequest { - string key = 1; // Cache key - CachedOutput output = 2; // Output to cache - bool overwrite = 3; // Overwrite flag -} - -/* - * Response message of cache store/update operation. - */ -message PutCacheResponse { - // Empty, success indicated by no errors -} - -/* - * Request to delete cached data by key. - */ -message DeleteCacheRequest { - string key = 1; // Cache key -} - -/* - * Response message of cache deletion operation. - */ -message DeleteCacheResponse { - // Empty, success indicated by no errors -} - -// A reservation including owner, heartbeat interval, expiration timestamp, and various metadata. -message Reservation { - string key = 1; // The unique ID for the reservation - same as the cache key - string owner_id = 2; // The unique ID of the owner for the reservation - google.protobuf.Duration heartbeat_interval = 3; // Requested reservation extension heartbeat interval - google.protobuf.Timestamp expires_at = 4; // Expiration timestamp of this reservation -} - -/* - * Request to get or extend a reservation for a cache key - */ -message GetOrExtendReservationRequest { - string key = 1; // The unique ID for the reservation - same as the cache key - string owner_id = 2; // The unique ID of the owner for the reservation - google.protobuf.Duration heartbeat_interval = 3; // Requested reservation extension heartbeat interval -} - -/* - * Request to get or extend a reservation for a cache key - */ -message GetOrExtendReservationResponse { - Reservation reservation = 1; // The reservation that was created or extended -} - -/* - * Request to release the reservation for a cache key - */ -message ReleaseReservationRequest { - string key = 1; // The unique ID for the reservation - same as the cache key - string owner_id = 2; // The unique ID of the owner for the reservation -} - -/* - * Response message of release reservation operation. - */ -message ReleaseReservationResponse { - // Empty, success indicated by no errors -} \ No newline at end of file diff --git a/docs/api/flyteidl/flyteidl/core/artifact_id.proto b/docs/api/flyteidl/flyteidl/core/artifact_id.proto deleted file mode 100644 index 022bc20cff..0000000000 --- a/docs/api/flyteidl/flyteidl/core/artifact_id.proto +++ /dev/null @@ -1,112 +0,0 @@ -syntax = "proto3"; - -package flyteidl.core; - -option go_package = "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/core"; - -import "google/protobuf/timestamp.proto"; -import "flyteidl/core/identifier.proto"; - - -message ArtifactKey { - // Project and domain and suffix needs to be unique across a given artifact store. - string project = 1; - string domain = 2; - string name = 3; - string org = 4; -} - -// Only valid for triggers -message ArtifactBindingData { - reserved 1 to 4; - // These two fields are only relevant in the partition value case - oneof partition_data { - string partition_key = 5; - bool bind_to_time_partition = 6; - } - - // This is only relevant in the time partition case - TimeTransform time_transform = 7; -} - -enum Granularity { - UNSET = 0; - MINUTE = 1; - HOUR = 2; - DAY = 3; // default - MONTH = 4; -} - -enum Operator { - MINUS = 0; - PLUS = 1; -} - -message TimeTransform { - string transform = 1; - Operator op = 2; -} - -message InputBindingData { - string var = 1; -} - -message RuntimeBinding {} - -message LabelValue { - oneof value { - // The string static value is for use in the Partitions object - string static_value = 1; - - // The time value is for use in the TimePartition case - google.protobuf.Timestamp time_value = 2; - ArtifactBindingData triggered_binding = 3; - InputBindingData input_binding = 4; - RuntimeBinding runtime_binding = 5; - } -} - -message Partitions { - map value = 1; -} - -message TimePartition { - LabelValue value = 1; - Granularity granularity = 2; -} - -message ArtifactID { - ArtifactKey artifact_key = 1; - - string version = 2; - - // Think of a partition as a tag on an Artifact, except it's a key-value pair. - // Different partitions naturally have different versions (execution ids). - Partitions partitions = 3; - - // There is no such thing as an empty time partition - if it's not set, then there is no time partition. - TimePartition time_partition = 4; -} - -message ArtifactTag { - ArtifactKey artifact_key = 1; - - LabelValue value = 2; -} - -// Uniqueness constraints for Artifacts -// - project, domain, name, version, partitions -// Option 2 (tags are standalone, point to an individual artifact id): -// - project, domain, name, alias (points to one partition if partitioned) -// - project, domain, name, partition key, partition value -message ArtifactQuery { - oneof identifier { - ArtifactID artifact_id = 1; - ArtifactTag artifact_tag = 2; - string uri = 3; - - // This is used in the trigger case, where a user specifies a value for an input that is one of the triggering - // artifacts, or a partition value derived from a triggering artifact. - ArtifactBindingData binding = 4; - } -} diff --git a/docs/api/flyteidl/flyteidl/core/catalog.proto b/docs/api/flyteidl/flyteidl/core/catalog.proto deleted file mode 100644 index 4d98c28d7e..0000000000 --- a/docs/api/flyteidl/flyteidl/core/catalog.proto +++ /dev/null @@ -1,63 +0,0 @@ -syntax = "proto3"; - -package flyteidl.core; - -option go_package = "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/core"; - -import "flyteidl/core/identifier.proto"; - -// Indicates the status of CatalogCaching. The reason why this is not embedded in TaskNodeMetadata is, that we may use for other types of nodes as well in the future -enum CatalogCacheStatus { - // Used to indicate that caching was disabled - CACHE_DISABLED = 0; - // Used to indicate that the cache lookup resulted in no matches - CACHE_MISS = 1; - // used to indicate that the associated artifact was a result of a previous execution - CACHE_HIT = 2; - // used to indicate that the resultant artifact was added to the cache - CACHE_POPULATED = 3; - // Used to indicate that cache lookup failed because of an error - CACHE_LOOKUP_FAILURE = 4; - // Used to indicate that cache lookup failed because of an error - CACHE_PUT_FAILURE = 5; - // Used to indicate the cache lookup was skipped - CACHE_SKIPPED = 6; - // Used to indicate that the cache was evicted - CACHE_EVICTED = 7; -}; - -message CatalogArtifactTag { - // Artifact ID is generated name - string artifact_id = 1; - // Flyte computes the tag automatically, as the hash of the values - string name = 2; -}; - -// Catalog artifact information with specific metadata -message CatalogMetadata { - // Dataset ID in the catalog - Identifier dataset_id = 1; - // Artifact tag in the catalog - CatalogArtifactTag artifact_tag = 2; - // Optional: Source Execution identifier, if this dataset was generated by another execution in Flyte. This is a one-of field and will depend on the caching context - oneof source_execution { - // Today we only support TaskExecutionIdentifier as a source, as catalog caching only works for task executions - TaskExecutionIdentifier source_task_execution = 3; - } -}; - -message CatalogReservation { - // Indicates the status of a catalog reservation operation. - enum Status { - // Used to indicate that reservations are disabled - RESERVATION_DISABLED = 0; - // Used to indicate that a reservation was successfully acquired or extended - RESERVATION_ACQUIRED = 1; - // Used to indicate that an active reservation currently exists - RESERVATION_EXISTS = 2; - // Used to indicate that the reservation has been successfully released - RESERVATION_RELEASED = 3; - // Used to indicate that a reservation operation resulted in failure - RESERVATION_FAILURE = 4; - } -} diff --git a/docs/api/flyteidl/flyteidl/core/compiler.proto b/docs/api/flyteidl/flyteidl/core/compiler.proto deleted file mode 100644 index 620ee26f2d..0000000000 --- a/docs/api/flyteidl/flyteidl/core/compiler.proto +++ /dev/null @@ -1,64 +0,0 @@ -syntax = "proto3"; - -package flyteidl.core; - -option go_package = "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/core"; - -import "flyteidl/core/identifier.proto"; -import "flyteidl/core/interface.proto"; -import "flyteidl/core/workflow.proto"; -import "flyteidl/core/tasks.proto"; - -// Adjacency list for the workflow. This is created as part of the compilation process. Every process after the compilation -// step uses this created ConnectionSet -message ConnectionSet { - message IdList { - repeated string ids = 1; - } - - // A list of all the node ids that are downstream from a given node id - map downstream = 7; - - // A list of all the node ids, that are upstream of this node id - map upstream = 8; -} - -// Output of the compilation Step. This object represents one workflow. We store more metadata at this layer -message CompiledWorkflow { - // Completely contained Workflow Template - WorkflowTemplate template = 1; - // For internal use only! This field is used by the system and must not be filled in. Any values set will be ignored. - ConnectionSet connections = 2; -} - -// Output of the compilation step. This object represents one LaunchPlan. We store more metadata at this layer -message CompiledLaunchPlan { - // Completely contained LaunchPlan Template - LaunchPlanTemplate template = 1; -} - -// Output of the Compilation step. This object represent one Task. We store more metadata at this layer -message CompiledTask { - // Completely contained TaskTemplate - TaskTemplate template = 1; -} - -// A Compiled Workflow Closure contains all the information required to start a new execution, or to visualize a workflow -// and its details. The CompiledWorkflowClosure should always contain a primary workflow, that is the main workflow that -// will being the execution. All subworkflows are denormalized. WorkflowNodes refer to the workflow identifiers of -// compiled subworkflows. -message CompiledWorkflowClosure { - //+required - CompiledWorkflow primary = 1; - // Guaranteed that there will only exist one and only one workflow with a given id, i.e., every sub workflow has a - // unique identifier. Also every enclosed subworkflow is used either by a primary workflow or by a subworkflow - // as an inlined workflow - //+optional - repeated CompiledWorkflow sub_workflows = 2; - // Guaranteed that there will only exist one and only one task with a given id, i.e., every task has a unique id - //+required (at least 1) - repeated CompiledTask tasks = 3; - // A collection of launch plans that are compiled. Guaranteed that there will only exist one and only one launch plan - // with a given id, i.e., every launch plan has a unique id. - repeated CompiledLaunchPlan launch_plans = 4; -} diff --git a/docs/api/flyteidl/flyteidl/core/condition.proto b/docs/api/flyteidl/flyteidl/core/condition.proto deleted file mode 100644 index 84c7fb0314..0000000000 --- a/docs/api/flyteidl/flyteidl/core/condition.proto +++ /dev/null @@ -1,63 +0,0 @@ -syntax = "proto3"; - -package flyteidl.core; - -option go_package = "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/core"; - -import "flyteidl/core/literals.proto"; - -// Defines a 2-level tree where the root is a comparison operator and Operands are primitives or known variables. -// Each expression results in a boolean result. -message ComparisonExpression { - // Binary Operator for each expression - enum Operator { - EQ = 0; - NEQ = 1; - // Greater Than - GT = 2; - GTE = 3; - // Less Than - LT = 4; - LTE = 5; - } - - Operator operator = 1; - Operand left_value = 2; - Operand right_value = 3; -} - -// Defines an operand to a comparison expression. -message Operand { - oneof val { - // Can be a constant - core.Primitive primitive = 1 [deprecated = true]; - // Or one of this node's input variables - string var = 2; - // Replace the primitive field - core.Scalar scalar = 3; - } -} - -// Defines a boolean expression tree. It can be a simple or a conjunction expression. -// Multiple expressions can be combined using a conjunction or a disjunction to result in a final boolean result. -message BooleanExpression { - oneof expr { - ConjunctionExpression conjunction = 1; - ComparisonExpression comparison = 2; - } -} - -// Defines a conjunction expression of two boolean expressions. -message ConjunctionExpression { - // Nested conditions. They can be conjoined using AND / OR - // Order of evaluation is not important as the operators are Commutative - enum LogicalOperator { - // Conjunction - AND = 0; - OR = 1; - } - - LogicalOperator operator = 1; - BooleanExpression left_expression = 2; - BooleanExpression right_expression = 3; -} diff --git a/docs/api/flyteidl/flyteidl/core/dynamic_job.proto b/docs/api/flyteidl/flyteidl/core/dynamic_job.proto deleted file mode 100644 index 1665f5fa29..0000000000 --- a/docs/api/flyteidl/flyteidl/core/dynamic_job.proto +++ /dev/null @@ -1,32 +0,0 @@ -syntax = "proto3"; - -import "flyteidl/core/tasks.proto"; -import "flyteidl/core/workflow.proto"; -import "flyteidl/core/literals.proto"; - -package flyteidl.core; - -option go_package = "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/core"; - -// Describes a set of tasks to execute and how the final outputs are produced. -message DynamicJobSpec { - // A collection of nodes to execute. - repeated Node nodes = 1; - - // An absolute number of successful completions of nodes required to mark this job as succeeded. As soon as this - // criteria is met, the dynamic job will be marked as successful and outputs will be computed. If this number - // becomes impossible to reach (e.g. number of currently running tasks + number of already succeeded tasks < - // min_successes) the task will be aborted immediately and marked as failed. The default value of this field, if not - // specified, is the count of nodes repeated field. - int64 min_successes = 2; - - // Describes how to bind the final output of the dynamic job from the outputs of executed nodes. The referenced ids - // in bindings should have the generated id for the subtask. - repeated Binding outputs = 3; - - // [Optional] A complete list of task specs referenced in nodes. - repeated TaskTemplate tasks = 4; - - // [Optional] A complete list of task specs referenced in nodes. - repeated WorkflowTemplate subworkflows = 5; -} diff --git a/docs/api/flyteidl/flyteidl/core/errors.proto b/docs/api/flyteidl/flyteidl/core/errors.proto deleted file mode 100644 index 4d25389349..0000000000 --- a/docs/api/flyteidl/flyteidl/core/errors.proto +++ /dev/null @@ -1,35 +0,0 @@ -syntax = "proto3"; - -package flyteidl.core; - -option go_package = "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/core"; - -import "flyteidl/core/execution.proto"; - -// Error message to propagate detailed errors from container executions to the execution -// engine. -message ContainerError { - // A simplified code for errors, so that we can provide a glossary of all possible errors. - string code = 1; - // A detailed error message. - string message = 2; - - // Defines a generic error type that dictates the behavior of the retry strategy. - enum Kind { - NON_RECOVERABLE = 0; - RECOVERABLE = 1; - } - - // An abstract error kind for this error. Defaults to Non_Recoverable if not specified. - Kind kind = 3; - - // Defines the origin of the error (system, user, unknown). - ExecutionError.ErrorKind origin = 4; -} - -// Defines the errors.pb file format the container can produce to communicate -// failure reasons to the execution engine. -message ErrorDocument { - // The error raised during execution. - ContainerError error = 1; -} diff --git a/docs/api/flyteidl/flyteidl/core/execution.proto b/docs/api/flyteidl/flyteidl/core/execution.proto deleted file mode 100644 index 4d55198955..0000000000 --- a/docs/api/flyteidl/flyteidl/core/execution.proto +++ /dev/null @@ -1,118 +0,0 @@ -syntax = "proto3"; - -package flyteidl.core; - -option go_package = "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/core"; - -import "google/protobuf/duration.proto"; - -// Indicates various phases of Workflow Execution -message WorkflowExecution { - enum Phase { - UNDEFINED = 0; - QUEUED = 1; - RUNNING = 2; - SUCCEEDING = 3; - SUCCEEDED = 4; - FAILING = 5; - FAILED = 6; - ABORTED = 7; - TIMED_OUT = 8; - ABORTING = 9; - } -} - -// Indicates various phases of Node Execution that only include the time spent to run the nodes/workflows -message NodeExecution { - enum Phase { - UNDEFINED = 0; - QUEUED = 1; - RUNNING = 2; - SUCCEEDED = 3; - FAILING = 4; - FAILED = 5; - ABORTED = 6; - SKIPPED = 7; - TIMED_OUT = 8; - DYNAMIC_RUNNING = 9; - RECOVERED = 10; - } -} - -// Phases that task plugins can go through. Not all phases may be applicable to a specific plugin task, -// but this is the cumulative list that customers may want to know about for their task. -message TaskExecution{ - enum Phase { - UNDEFINED = 0; - QUEUED = 1; - RUNNING = 2; - SUCCEEDED = 3; - ABORTED = 4; - FAILED = 5; - // To indicate cases where task is initializing, like: ErrImagePull, ContainerCreating, PodInitializing - INITIALIZING = 6; - // To address cases, where underlying resource is not available: Backoff error, Resource quota exceeded - WAITING_FOR_RESOURCES = 7; - } -} - - -// Represents the error message from the execution. -message ExecutionError { - // Error code indicates a grouping of a type of error. - // More Info: - string code = 1; - // Detailed description of the error - including stack trace. - string message = 2; - // Full error contents accessible via a URI - string error_uri = 3; - // Error type: System or User - enum ErrorKind { - UNKNOWN = 0; - USER = 1; - SYSTEM = 2; - } - ErrorKind kind = 4; -} - -// Log information for the task that is specific to a log sink -// When our log story is flushed out, we may have more metadata here like log link expiry -message TaskLog { - - enum MessageFormat { - UNKNOWN = 0; - CSV = 1; - JSON = 2; - } - - string uri = 1; - string name = 2; - MessageFormat message_format = 3; - google.protobuf.Duration ttl = 4; - bool ShowWhilePending = 5; - bool HideOnceFinished = 6; -} - -// Represents customized execution run-time attributes. -message QualityOfServiceSpec { - // Indicates how much queueing delay an execution can tolerate. - google.protobuf.Duration queueing_budget = 1; - - // Add future, user-configurable options here -} - -// Indicates the priority of an execution. -message QualityOfService { - enum Tier { - // Default: no quality of service specified. - UNDEFINED = 0; - HIGH = 1; - MEDIUM = 2; - LOW = 3; - } - - oneof designation { - Tier tier = 1; - QualityOfServiceSpec spec = 2; - } -} diff --git a/docs/api/flyteidl/flyteidl/core/execution_envs.proto b/docs/api/flyteidl/flyteidl/core/execution_envs.proto deleted file mode 100644 index d5da775f65..0000000000 --- a/docs/api/flyteidl/flyteidl/core/execution_envs.proto +++ /dev/null @@ -1,45 +0,0 @@ -syntax = "proto3"; - -package flyteidl.core; - -option go_package = "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/core"; - -import "google/protobuf/struct.proto"; - -// ExecutionEnvAssignment is a message that is used to assign an execution environment to a set of -// nodes. -message ExecutionEnvAssignment { - // node_ids is a list of node ids that are being assigned the execution environment. - repeated string node_ids = 1; - - // task_type is the type of task that is being assigned. This is used to override which Flyte - // plugin will be used during execution. - string task_type = 2; - - // execution_env is the environment that is being assigned to the nodes. - ExecutionEnv execution_env = 3; -} - -// ExecutionEnv is a message that is used to specify the execution environment. -message ExecutionEnv { - // name is a human-readable identifier for the execution environment. This is combined with the - // project, domain, and version to uniquely identify an execution environment. - string name = 1; - - // type is the type of the execution environment. - string type = 2; - - // environment is a oneof field that can be used to specify the environment in different ways. - oneof environment { - // extant is a reference to an existing environment. - google.protobuf.Struct extant = 3; - - // spec is a specification of the environment. - google.protobuf.Struct spec = 4; - } - - // version is the version of the execution environment. This may be used differently by each - // individual environment type (ex. auto-generated or manually provided), but is intended to - // allow variance in environment specifications with the same ID. - string version = 5; -} diff --git a/docs/api/flyteidl/flyteidl/core/identifier.proto b/docs/api/flyteidl/flyteidl/core/identifier.proto deleted file mode 100644 index 50bf22429c..0000000000 --- a/docs/api/flyteidl/flyteidl/core/identifier.proto +++ /dev/null @@ -1,80 +0,0 @@ -syntax = "proto3"; - -package flyteidl.core; - -option go_package = "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/core"; - -// Indicates a resource type within Flyte. -enum ResourceType { - UNSPECIFIED = 0; - TASK = 1; - WORKFLOW = 2; - LAUNCH_PLAN = 3; - // A dataset represents an entity modeled in Flyte DataCatalog. A Dataset is also a versioned entity and can be a compilation of multiple individual objects. - // Eventually all Catalog objects should be modeled similar to Flyte Objects. The Dataset entities makes it possible for the UI and CLI to act on the objects - // in a similar manner to other Flyte objects - DATASET = 4; -} - -// Encapsulation of fields that uniquely identifies a Flyte resource. -message Identifier { - // Identifies the specific type of resource that this identifier corresponds to. - core.ResourceType resource_type = 1; - - // Name of the project the resource belongs to. - string project = 2; - - // Name of the domain the resource belongs to. - // A domain can be considered as a subset within a specific project. - string domain = 3; - - // User provided value for the resource. - string name = 4; - - // Specific version of the resource. - string version = 5; - - // Optional, org key applied to the resource. - string org = 6; -} - -// Encapsulation of fields that uniquely identifies a Flyte workflow execution -message WorkflowExecutionIdentifier { - // Name of the project the resource belongs to. - string project = 1; - - // Name of the domain the resource belongs to. - // A domain can be considered as a subset within a specific project. - string domain = 2; - - // User or system provided value for the resource. - string name = 4; - - // Optional, org key applied to the resource. - string org = 5; -} - -// Encapsulation of fields that identify a Flyte node execution entity. -message NodeExecutionIdentifier { - string node_id = 1; - - WorkflowExecutionIdentifier execution_id = 2; -} - -// Encapsulation of fields that identify a Flyte task execution entity. -message TaskExecutionIdentifier { - core.Identifier task_id = 1; - - core.NodeExecutionIdentifier node_execution_id = 2; - - uint32 retry_attempt = 3; -} - -// Encapsulation of fields the uniquely identify a signal. -message SignalIdentifier { - // Unique identifier for a signal. - string signal_id = 1; - - // Identifies the Flyte workflow execution this signal belongs to. - WorkflowExecutionIdentifier execution_id = 2; -} diff --git a/docs/api/flyteidl/flyteidl/core/interface.proto b/docs/api/flyteidl/flyteidl/core/interface.proto deleted file mode 100644 index ec7673d9c4..0000000000 --- a/docs/api/flyteidl/flyteidl/core/interface.proto +++ /dev/null @@ -1,64 +0,0 @@ -syntax = "proto3"; - -package flyteidl.core; - -option go_package = "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/core"; - -import "flyteidl/core/types.proto"; -import "flyteidl/core/literals.proto"; -import "flyteidl/core/artifact_id.proto"; - -// Defines a strongly typed variable. -message Variable { - // Variable literal type. - LiteralType type = 1; - - //+optional string describing input variable - string description = 2; - - //+optional This object allows the user to specify how Artifacts are created. - // name, tag, partitions can be specified. The other fields (version and project/domain) are ignored. - core.ArtifactID artifact_partial_id = 3; - - core.ArtifactTag artifact_tag = 4; -} - -// A map of Variables -message VariableMap { - // Defines a map of variable names to variables. - map variables = 1; -} - -// Defines strongly typed inputs and outputs. -message TypedInterface { - VariableMap inputs = 1; - VariableMap outputs = 2; -} - -// A parameter is used as input to a launch plan and has -// the special ability to have a default value or mark itself as required. -message Parameter { - //+required Variable. Defines the type of the variable backing this parameter. - Variable var = 1; - - //+optional - oneof behavior { - // Defines a default value that has to match the variable type defined. - Literal default = 2; - - //+optional, is this value required to be filled. - bool required = 3; - - // This is an execution time search basically that should result in exactly one Artifact with a Type that - // matches the type of the variable. - core.ArtifactQuery artifact_query = 4; - - core.ArtifactID artifact_id = 5; - } -} - -// A map of Parameters. -message ParameterMap { - // Defines a map of parameter names to parameters. - map parameters = 1; -} diff --git a/docs/api/flyteidl/flyteidl/core/literals.proto b/docs/api/flyteidl/flyteidl/core/literals.proto deleted file mode 100644 index 66e4821867..0000000000 --- a/docs/api/flyteidl/flyteidl/core/literals.proto +++ /dev/null @@ -1,200 +0,0 @@ -syntax = "proto3"; - -package flyteidl.core; - -option go_package = "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/core"; - -import "google/protobuf/timestamp.proto"; -import "google/protobuf/duration.proto"; -import "google/protobuf/struct.proto"; -import "flyteidl/core/types.proto"; - -// Primitive Types -message Primitive { - // Defines one of simple primitive types. These types will get translated into different programming languages as - // described in https://developers.google.com/protocol-buffers/docs/proto#scalar. - oneof value { - int64 integer = 1; - double float_value = 2; - string string_value = 3; - bool boolean = 4; - google.protobuf.Timestamp datetime = 5; - google.protobuf.Duration duration = 6; - } -} - -// Used to denote a nil/null/None assignment to a scalar value. The underlying LiteralType for Void is intentionally -// undefined since it can be assigned to a scalar of any LiteralType. -message Void { -} - -// Refers to an offloaded set of files. It encapsulates the type of the store and a unique uri for where the data is. -// There are no restrictions on how the uri is formatted since it will depend on how to interact with the store. -message Blob { - BlobMetadata metadata = 1; - string uri = 3; -} - -message BlobMetadata { - BlobType type = 1; -} - -// A simple byte array with a tag to help different parts of the system communicate about what is in the byte array. -// It's strongly advisable that consumers of this type define a unique tag and validate the tag before parsing the data. -message Binary { - bytes value = 1; // Serialized data (MessagePack) for supported types like Dataclass, Pydantic BaseModel, and untyped dict. - string tag = 2; // The serialization format identifier (e.g., MessagePack). Consumers must define unique tags and validate them before deserialization. -} - -// A strongly typed schema that defines the interface of data retrieved from the underlying storage medium. -message Schema { - string uri = 1; - SchemaType type = 3; -} - -// The runtime representation of a tagged union value. See `UnionType` for more details. -message Union { - Literal value = 1; - LiteralType type = 2; -} - -message StructuredDatasetMetadata { - // Bundle the type information along with the literal. - // This is here because StructuredDatasets can often be more defined at run time than at compile time. - // That is, at compile time you might only declare a task to return a pandas dataframe or a StructuredDataset, - // without any column information, but at run time, you might have that column information. - // flytekit python will copy this type information into the literal, from the type information, if not provided by - // the various plugins (encoders). - // Since this field is run time generated, it's not used for any type checking. - StructuredDatasetType structured_dataset_type = 1; -} - -message StructuredDataset { - // String location uniquely identifying where the data is. - // Should start with the storage location (e.g. s3://, gs://, bq://, etc.) - string uri = 1; - - StructuredDatasetMetadata metadata = 2; -} - -message Scalar { - oneof value { - Primitive primitive = 1; - Blob blob = 2; - Binary binary = 3; - Schema schema = 4; - Void none_type = 5; - Error error = 6; - google.protobuf.Struct generic = 7; - StructuredDataset structured_dataset = 8; - Union union = 9; - } -} - -// A simple value. This supports any level of nesting (e.g. array of array of array of Blobs) as well as simple primitives. -message Literal { - reserved 6, 7; - oneof value { - // A simple value. - Scalar scalar = 1; - - // A collection of literals to allow nesting. - LiteralCollection collection = 2; - - // A map of strings to literals. - LiteralMap map = 3; - - // Offloaded literal metadata - // When you deserialize the offloaded metadata, it would be of Literal and its type would be defined by LiteralType stored in offloaded_metadata. - LiteralOffloadedMetadata offloaded_metadata = 8; - } - - // A hash representing this literal. - // This is used for caching purposes. For more details refer to RFC 1893 - // (https://github.com/flyteorg/flyte/blob/master/rfc/system/1893-caching-of-offloaded-objects.md) - string hash = 4; - - // Additional metadata for literals. - map metadata = 5; -} - -// A message that contains the metadata of the offloaded data. -message LiteralOffloadedMetadata { - // The location of the offloaded core.Literal. - string uri = 1; - - // The size of the offloaded data. - uint64 size_bytes = 2; - - // The inferred literal type of the offloaded data. - LiteralType inferred_type = 3; -} - -// A collection of literals. This is a workaround since oneofs in proto messages cannot contain a repeated field. -message LiteralCollection { - repeated Literal literals = 1; -} - -// A map of literals. This is a workaround since oneofs in proto messages cannot contain a repeated field. -message LiteralMap { - map literals = 1; -} - -// A collection of BindingData items. -message BindingDataCollection { - repeated BindingData bindings = 1; -} - -// A map of BindingData items. -message BindingDataMap { - map bindings = 1; -} - -message UnionInfo { - LiteralType targetType = 1; -} - -// Specifies either a simple value or a reference to another output. -message BindingData { - oneof value { - // A simple scalar value. - Scalar scalar = 1; - - // A collection of binding data. This allows nesting of binding data to any number - // of levels. - BindingDataCollection collection = 2; - - // References an output promised by another node. - OutputReference promise = 3; - - // A map of bindings. The key is always a string. - BindingDataMap map = 4; - } - - UnionInfo union = 5; -} - -// An input/output binding of a variable to either static value or a node output. -message Binding { - // Variable name must match an input/output variable of the node. - string var = 1; - - // Data to use to bind this variable. - BindingData binding = 2; -} - -// A generic key value pair. -message KeyValuePair { - //required. - string key = 1; - - //+optional. - string value = 2; -} - -// Retry strategy associated with an executable unit. -message RetryStrategy { - // Number of retries. Retries will be consumed when the job fails with a recoverable error. - // The number of retries must be less than or equals to 10. - uint32 retries = 5; -} diff --git a/docs/api/flyteidl/flyteidl/core/metrics.proto b/docs/api/flyteidl/flyteidl/core/metrics.proto deleted file mode 100644 index 5244ff4873..0000000000 --- a/docs/api/flyteidl/flyteidl/core/metrics.proto +++ /dev/null @@ -1,50 +0,0 @@ -syntax = "proto3"; - -package flyteidl.core; - -option go_package = "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/core"; - -import "flyteidl/core/identifier.proto"; -import "google/protobuf/timestamp.proto"; -import "google/protobuf/struct.proto"; - -// Span represents a duration trace of Flyte execution. The id field denotes a Flyte execution entity or an operation -// which uniquely identifies the Span. The spans attribute allows this Span to be further broken down into more -// precise definitions. -message Span { - // start_time defines the instance this span began. - google.protobuf.Timestamp start_time = 1; - - // end_time defines the instance this span completed. - google.protobuf.Timestamp end_time = 2; - - oneof id { - // workflow_id is the id of the workflow execution this Span represents. - flyteidl.core.WorkflowExecutionIdentifier workflow_id = 3; - - // node_id is the id of the node execution this Span represents. - flyteidl.core.NodeExecutionIdentifier node_id = 4; - - // task_id is the id of the task execution this Span represents. - flyteidl.core.TaskExecutionIdentifier task_id = 5; - - // operation_id is the id of a unique operation that this Span represents. - string operation_id = 6; - } - - // spans defines a collection of Spans that breakdown this execution. - repeated Span spans = 7; -} - -// ExecutionMetrics is a collection of metrics that are collected during the execution of a Flyte task. -message ExecutionMetricResult { - // The metric this data represents. e.g. EXECUTION_METRIC_USED_CPU_AVG or EXECUTION_METRIC_USED_MEMORY_BYTES_AVG. - string metric = 1; - - // The result data in prometheus range query result format - // https://prometheus.io/docs/prometheus/latest/querying/api/#expression-query-result-formats. - // This may include multiple time series, differentiated by their metric labels. - // Start time is greater of (execution attempt start, 48h ago) - // End time is lesser of (execution attempt end, now) - google.protobuf.Struct data = 2; -} \ No newline at end of file diff --git a/docs/api/flyteidl/flyteidl/core/security.proto b/docs/api/flyteidl/flyteidl/core/security.proto deleted file mode 100644 index 3aba017476..0000000000 --- a/docs/api/flyteidl/flyteidl/core/security.proto +++ /dev/null @@ -1,130 +0,0 @@ -syntax = "proto3"; - -package flyteidl.core; - -option go_package = "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/core"; - -// Secret encapsulates information about the secret a task needs to proceed. An environment variable -// FLYTE_SECRETS_ENV_PREFIX will be passed to indicate the prefix of the environment variables that will be present if -// secrets are passed through environment variables. -// FLYTE_SECRETS_DEFAULT_DIR will be passed to indicate the prefix of the path where secrets will be mounted if secrets -// are passed through file mounts. -message Secret { - enum MountType { - // Default case, indicates the client can tolerate either mounting options. - ANY = 0; - - // ENV_VAR indicates the secret needs to be mounted as an environment variable. - ENV_VAR = 1; - - // FILE indicates the secret needs to be mounted as a file. - FILE = 2; - } - - // The name of the secret group where to find the key referenced below. For K8s secrets, this should be the name of - // the v1/secret object. For Confidant, this should be the Credential name. For Vault, this should be the secret name. - // For AWS Secret Manager, this should be the name of the secret. - // +required - string group = 1; - - // The group version to fetch. This is not supported in all secret management systems. It'll be ignored for the ones - // that do not support it. - // +optional - string group_version = 2; - - // The name of the secret to mount. This has to match an existing secret in the system. It's up to the implementation - // of the secret management system to require case sensitivity. For K8s secrets, Confidant and Vault, this should - // match one of the keys inside the secret. For AWS Secret Manager, it's ignored. - // +optional - string key = 3; - - // mount_requirement is optional. Indicates where the secret has to be mounted. If provided, the execution will fail - // if the underlying key management system cannot satisfy that requirement. If not provided, the default location - // will depend on the key management system. - // +optional - MountType mount_requirement = 4; -} - -// OAuth2Client encapsulates OAuth2 Client Credentials to be used when making calls on behalf of that task. -message OAuth2Client { - // client_id is the public id for the client to use. The system will not perform any pre-auth validation that the - // secret requested matches the client_id indicated here. - // +required - string client_id = 1; - - // client_secret is a reference to the secret used to authenticate the OAuth2 client. - // +required - Secret client_secret = 2; -} - -// Identity encapsulates the various security identities a task can run as. It's up to the underlying plugin to pick the -// right identity for the execution environment. -message Identity { - // iam_role references the fully qualified name of Identity & Access Management role to impersonate. - string iam_role = 1; - - // k8s_service_account references a kubernetes service account to impersonate. - string k8s_service_account = 2; - - // oauth2_client references an oauth2 client. Backend plugins can use this information to impersonate the client when - // making external calls. - OAuth2Client oauth2_client = 3; - - // execution_identity references the subject who makes the execution - string execution_identity = 4; -} - -// OAuth2TokenRequest encapsulates information needed to request an OAuth2 token. -// FLYTE_TOKENS_ENV_PREFIX will be passed to indicate the prefix of the environment variables that will be present if -// tokens are passed through environment variables. -// FLYTE_TOKENS_PATH_PREFIX will be passed to indicate the prefix of the path where secrets will be mounted if tokens -// are passed through file mounts. -message OAuth2TokenRequest { - // Type of the token requested. - enum Type { - // CLIENT_CREDENTIALS indicates a 2-legged OAuth token requested using client credentials. - CLIENT_CREDENTIALS = 0; - } - - // name indicates a unique id for the token request within this task token requests. It'll be used as a suffix for - // environment variables and as a filename for mounting tokens as files. - // +required - string name = 1; - - // type indicates the type of the request to make. Defaults to CLIENT_CREDENTIALS. - // +required - Type type = 2; - - // client references the client_id/secret to use to request the OAuth2 token. - // +required - OAuth2Client client = 3; - - // idp_discovery_endpoint references the discovery endpoint used to retrieve token endpoint and other related - // information. - // +optional - string idp_discovery_endpoint = 4; - - // token_endpoint references the token issuance endpoint. If idp_discovery_endpoint is not provided, this parameter is - // mandatory. - // +optional - string token_endpoint = 5; -} - -// SecurityContext holds security attributes that apply to tasks. -message SecurityContext { - // run_as encapsulates the identity a pod should run as. If the task fills in multiple fields here, it'll be up to the - // backend plugin to choose the appropriate identity for the execution engine the task will run on. - Identity run_as = 1; - - // secrets indicate the list of secrets the task needs in order to proceed. Secrets will be mounted/passed to the - // pod as it starts. If the plugin responsible for kicking of the task will not run it on a flyte cluster (e.g. AWS - // Batch), it's the responsibility of the plugin to fetch the secret (which means propeller identity will need access - // to the secret) and to pass it to the remote execution engine. - repeated Secret secrets = 2; - - // tokens indicate the list of token requests the task needs in order to proceed. Tokens will be mounted/passed to the - // pod as it starts. If the plugin responsible for kicking of the task will not run it on a flyte cluster (e.g. AWS - // Batch), it's the responsibility of the plugin to fetch the secret (which means propeller identity will need access - // to the secret) and to pass it to the remote execution engine. - repeated OAuth2TokenRequest tokens = 3; -} diff --git a/docs/api/flyteidl/flyteidl/core/tasks.proto b/docs/api/flyteidl/flyteidl/core/tasks.proto deleted file mode 100644 index 20a1fa0cbf..0000000000 --- a/docs/api/flyteidl/flyteidl/core/tasks.proto +++ /dev/null @@ -1,351 +0,0 @@ -syntax = "proto3"; - -import "flyteidl/core/identifier.proto"; -import "flyteidl/core/interface.proto"; -import "flyteidl/core/literals.proto"; -import "flyteidl/core/security.proto"; -import "google/protobuf/duration.proto"; -import "google/protobuf/struct.proto"; - -package flyteidl.core; - -option go_package = "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/core"; - -// A customizable interface to convey resources requested for a container. This can be interpreted differently for different -// container engines. -message Resources { - // Known resource names. - enum ResourceName { - UNKNOWN = 0; - CPU = 1; - GPU = 2; - MEMORY = 3; - STORAGE = 4; - // For Kubernetes-based deployments, pods use ephemeral local storage for scratch space, caching, and for logs. - EPHEMERAL_STORAGE = 5; - } - - // Encapsulates a resource name and value. - message ResourceEntry { - // Resource name. - ResourceName name = 1; - - // Value must be a valid k8s quantity. See - // https://github.com/kubernetes/apimachinery/blob/master/pkg/api/resource/quantity.go#L30-L80 - string value = 2; - } - - // The desired set of resources requested. ResourceNames must be unique within the list. - repeated ResourceEntry requests = 1; - - // Defines a set of bounds (e.g. min/max) within which the task can reliably run. ResourceNames must be unique - // within the list. - repeated ResourceEntry limits = 2; -} - -// Metadata associated with the GPU accelerator to allocate to a task. Contains -// information about device type, and for multi-instance GPUs, the partition size to -// use. -message GPUAccelerator { - // This can be any arbitrary string, and should be informed by the labels or taints - // associated with the nodes in question. Default cloud provider labels typically - // use the following values: `nvidia-tesla-t4`, `nvidia-tesla-a100`, etc. - string device = 1; - oneof partition_size_value { - bool unpartitioned = 2; - // Like `device`, this can be any arbitrary string, and should be informed by - // the labels or taints associated with the nodes in question. Default cloud - // provider labels typically use the following values: `1g.5gb`, `2g.10gb`, etc. - string partition_size = 3; - } -} - -// Encapsulates all non-standard resources, not captured by v1.ResourceRequirements, to -// allocate to a task. -message ExtendedResources { - // GPU accelerator to select for task. Contains information about device type, and - // for multi-instance GPUs, the partition size to use. - GPUAccelerator gpu_accelerator = 1; -} - -// Runtime information. This is loosely defined to allow for extensibility. -message RuntimeMetadata { - enum RuntimeType { - OTHER = 0; - FLYTE_SDK = 1; - } - - // Type of runtime. - RuntimeType type = 1; - - // Version of the runtime. All versions should be backward compatible. However, certain cases call for version - // checks to ensure tighter validation or setting expectations. - string version = 2; - - //+optional It can be used to provide extra information about the runtime (e.g. python, golang... etc.). - string flavor = 3; -} - -// Task Metadata -message TaskMetadata { - // Indicates whether the system should attempt to lookup this task's output to avoid duplication of work. - bool discoverable = 1; - - // Runtime information about the task. - RuntimeMetadata runtime = 2; - - // The overall timeout of a task including user-triggered retries. - google.protobuf.Duration timeout = 4; - - // Number of retries per task. - RetryStrategy retries = 5; - - // Indicates a logical version to apply to this task for the purpose of discovery. - string discovery_version = 6; - - // If set, this indicates that this task is deprecated. This will enable owners of tasks to notify consumers - // of the ending of support for a given task. - string deprecated_error_message = 7; - - // For interruptible we will populate it at the node level but require it be part of TaskMetadata - // for a user to set the value. - // We are using oneof instead of bool because otherwise we would be unable to distinguish between value being - // set by the user or defaulting to false. - // The logic of handling precedence will be done as part of flytepropeller. - - // Identify whether task is interruptible - oneof interruptible_value { - bool interruptible = 8; - }; - - // Indicates whether the system should attempt to execute discoverable instances in serial to avoid duplicate work - bool cache_serializable = 9; - - // Indicates whether the task will generate a Deck URI when it finishes executing. - bool generates_deck = 10; - - // Arbitrary tags that allow users and the platform to store small but arbitrary labels - map tags = 11; - - // pod_template_name is the unique name of a PodTemplate k8s resource to be used as the base configuration if this - // task creates a k8s Pod. If this value is set, the specified PodTemplate will be used instead of, but applied - // identically as, the default PodTemplate configured in FlytePropeller. - string pod_template_name = 12; - - // cache_ignore_input_vars is the input variables that should not be included when calculating hash for cache. - repeated string cache_ignore_input_vars = 13; -} - -// A Task structure that uniquely identifies a task in the system -// Tasks are registered as a first step in the system. -message TaskTemplate { - // Auto generated taskId by the system. Task Id uniquely identifies this task globally. - Identifier id = 1; - - // A predefined yet extensible Task type identifier. This can be used to customize any of the components. If no - // extensions are provided in the system, Flyte will resolve the this task to its TaskCategory and default the - // implementation registered for the TaskCategory. - string type = 2; - - // Extra metadata about the task. - TaskMetadata metadata = 3; - - // A strongly typed interface for the task. This enables others to use this task within a workflow and guarantees - // compile-time validation of the workflow to avoid costly runtime failures. - TypedInterface interface = 4; - - // Custom data about the task. This is extensible to allow various plugins in the system. - google.protobuf.Struct custom = 5; - - // Known target types that the system will guarantee plugins for. Custom SDK plugins are allowed to set these if needed. - // If no corresponding execution-layer plugins are found, the system will default to handling these using built-in - // handlers. - oneof target { - Container container = 6; - K8sPod k8s_pod = 17; - Sql sql = 18; - } - - // This can be used to customize task handling at execution time for the same task type. - int32 task_type_version = 7; - - // security_context encapsulates security attributes requested to run this task. - SecurityContext security_context = 8; - - // Encapsulates all non-standard resources, not captured by - // v1.ResourceRequirements, to allocate to a task. - ExtendedResources extended_resources = 9; - - // Metadata about the custom defined for this task. This is extensible to allow various plugins in the system - // to use as required. - // reserve the field numbers 1 through 15 for very frequently occurring message elements - map config = 16; -} - -// ----------------- First class Plugins - -// Defines port properties for a container. -message ContainerPort { - // Number of port to expose on the pod's IP address. - // This must be a valid port number, 0 < x < 65536. - uint32 container_port = 1; -} - -message Container { - // Container image url. Eg: docker/redis:latest - string image = 1; - - // Command to be executed, if not provided, the default entrypoint in the container image will be used. - repeated string command = 2; - - // These will default to Flyte given paths. If provided, the system will not append known paths. If the task still - // needs flyte's inputs and outputs path, add $(FLYTE_INPUT_FILE), $(FLYTE_OUTPUT_FILE) wherever makes sense and the - // system will populate these before executing the container. - repeated string args = 3; - - // Container resources requirement as specified by the container engine. - Resources resources = 4; - - // Environment variables will be set as the container is starting up. - repeated KeyValuePair env = 5; - - // Allows extra configs to be available for the container. - // TODO: elaborate on how configs will become available. - // Deprecated, please use TaskTemplate.config instead. - repeated KeyValuePair config = 6 [deprecated = true]; - - // Ports to open in the container. This feature is not supported by all execution engines. (e.g. supported on K8s but - // not supported on AWS Batch) - // Only K8s - repeated ContainerPort ports = 7; - - // BETA: Optional configuration for DataLoading. If not specified, then default values are used. - // This makes it possible to to run a completely portable container, that uses inputs and outputs - // only from the local file-system and without having any reference to flyteidl. This is supported only on K8s at the moment. - // If data loading is enabled, then data will be mounted in accompanying directories specified in the DataLoadingConfig. If the directories - // are not specified, inputs will be mounted onto and outputs will be uploaded from a pre-determined file-system path. Refer to the documentation - // to understand the default paths. - // Only K8s - DataLoadingConfig data_config = 9; - - // Architecture-type the container image supports. - enum Architecture { - UNKNOWN = 0; - AMD64 = 1; - ARM64 = 2; - ARM_V6 = 3; - ARM_V7 = 4; - } - Architecture architecture = 10; -} - -// Strategy to use when dealing with Blob, Schema, or multipart blob data (large datasets) -message IOStrategy { - // Mode to use for downloading - enum DownloadMode { - // All data will be downloaded before the main container is executed - DOWNLOAD_EAGER = 0; - // Data will be downloaded as a stream and an End-Of-Stream marker will be written to indicate all data has been downloaded. Refer to protocol for details - DOWNLOAD_STREAM = 1; - // Large objects (offloaded) will not be downloaded - DO_NOT_DOWNLOAD = 2; - } - // Mode to use for uploading - enum UploadMode { - // All data will be uploaded after the main container exits - UPLOAD_ON_EXIT = 0; - // Data will be uploaded as it appears. Refer to protocol specification for details - UPLOAD_EAGER = 1; - // Data will not be uploaded, only references will be written - DO_NOT_UPLOAD = 2; - } - // Mode to use to manage downloads - DownloadMode download_mode = 1; - // Mode to use to manage uploads - UploadMode upload_mode = 2; -} - -// This configuration allows executing raw containers in Flyte using the Flyte CoPilot system. -// Flyte CoPilot, eliminates the needs of flytekit or sdk inside the container. Any inputs required by the users container are side-loaded in the input_path -// Any outputs generated by the user container - within output_path are automatically uploaded. -message DataLoadingConfig { - // LiteralMapFormat decides the encoding format in which the input metadata should be made available to the containers. - // If the user has access to the protocol buffer definitions, it is recommended to use the PROTO format. - // JSON and YAML do not need any protobuf definitions to read it - // All remote references in core.LiteralMap are replaced with local filesystem references (the data is downloaded to local filesystem) - enum LiteralMapFormat { - // JSON / YAML for the metadata (which contains inlined primitive values). The representation is inline with the standard json specification as specified - https://www.json.org/json-en.html - JSON = 0; - YAML = 1; - // Proto is a serialized binary of `core.LiteralMap` defined in flyteidl/core - PROTO = 2; - } - // Flag enables DataLoading Config. If this is not set, data loading will not be used! - bool enabled = 1; - // File system path (start at root). This folder will contain all the inputs exploded to a separate file. - // Example, if the input interface needs (x: int, y: blob, z: multipart_blob) and the input path is '/var/flyte/inputs', then the file system will look like - // /var/flyte/inputs/inputs. .pb .json .yaml> -> Format as defined previously. The Blob and Multipart blob will reference local filesystem instead of remote locations - // /var/flyte/inputs/x -> X is a file that contains the value of x (integer) in string format - // /var/flyte/inputs/y -> Y is a file in Binary format - // /var/flyte/inputs/z/... -> Note Z itself is a directory - // More information about the protocol - refer to docs #TODO reference docs here - string input_path = 2; - // File system path (start at root). This folder should contain all the outputs for the task as individual files and/or an error text file - string output_path = 3; - // In the inputs folder, there will be an additional summary/metadata file that contains references to all files or inlined primitive values. - // This format decides the actual encoding for the data. Refer to the encoding to understand the specifics of the contents and the encoding - LiteralMapFormat format = 4; - IOStrategy io_strategy = 5; -} - -// Defines a pod spec and additional pod metadata that is created when a task is executed. -message K8sPod { - // Contains additional metadata for building a kubernetes pod. - K8sObjectMetadata metadata = 1; - - // Defines the primary pod spec created when a task is executed. - // This should be a JSON-marshalled pod spec, which can be defined in - // - go, using: https://github.com/kubernetes/api/blob/release-1.21/core/v1/types.go#L2936 - // - python: using https://github.com/kubernetes-client/python/blob/release-19.0/kubernetes/client/models/v1_pod_spec.py - google.protobuf.Struct pod_spec = 2; - - // BETA: Optional configuration for DataLoading. If not specified, then default values are used. - // This makes it possible to to run a completely portable container, that uses inputs and outputs - // only from the local file-system and without having any reference to flytekit. This is supported only on K8s at the moment. - // If data loading is enabled, then data will be mounted in accompanying directories specified in the DataLoadingConfig. If the directories - // are not specified, inputs will be mounted onto and outputs will be uploaded from a pre-determined file-system path. Refer to the documentation - // to understand the default paths. - // Only K8s - DataLoadingConfig data_config = 3; -} - -// Metadata for building a kubernetes object when a task is executed. -message K8sObjectMetadata { - // Optional labels to add to the pod definition. - map labels = 1; - - // Optional annotations to add to the pod definition. - map annotations = 2; -} - -// Sql represents a generic sql workload with a statement and dialect. -message Sql { - // The actual query to run, the query can have templated parameters. - // We use Flyte's Golang templating format for Query templating. - // For example, - // insert overwrite directory '{{ .rawOutputDataPrefix }}' stored as parquet - // select * - // from my_table - // where ds = '{{ .Inputs.ds }}' - string statement = 1; - // The dialect of the SQL statement. This is used to validate and parse SQL statements at compilation time to avoid - // expensive runtime operations. If set to an unsupported dialect, no validation will be done on the statement. - // We support the following dialect: ansi, hive. - enum Dialect { - UNDEFINED = 0; - ANSI = 1; - HIVE = 2; - OTHER = 3; - } - Dialect dialect = 2; -} diff --git a/docs/api/flyteidl/flyteidl/core/types.proto b/docs/api/flyteidl/flyteidl/core/types.proto deleted file mode 100644 index 3580eea9f0..0000000000 --- a/docs/api/flyteidl/flyteidl/core/types.proto +++ /dev/null @@ -1,208 +0,0 @@ -syntax = "proto3"; - -package flyteidl.core; - -option go_package = "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/core"; - -import "google/protobuf/struct.proto"; - -// Define a set of simple types. -enum SimpleType { - NONE = 0; - INTEGER = 1; - FLOAT = 2; - STRING = 3; - BOOLEAN = 4; - DATETIME = 5; - DURATION = 6; - BINARY = 7; - ERROR = 8; - STRUCT = 9; -} - -// Defines schema columns and types to strongly type-validate schemas interoperability. -message SchemaType { - message SchemaColumn { - // A unique name -within the schema type- for the column - string name = 1; - - enum SchemaColumnType { - INTEGER = 0; - FLOAT = 1; - STRING = 2; - BOOLEAN = 3; - DATETIME = 4; - DURATION = 5; - } - - // The column type. This allows a limited set of types currently. - SchemaColumnType type = 2; - } - - // A list of ordered columns this schema comprises of. - repeated SchemaColumn columns = 3; -} - -message StructuredDatasetType { - message DatasetColumn { - // A unique name within the schema type for the column. - string name = 1; - - // The column type. - LiteralType literal_type = 2; - } - - // A list of ordered columns this schema comprises of. - repeated DatasetColumn columns = 1; - - // This is the storage format, the format of the bits at rest - // parquet, feather, csv, etc. - // For two types to be compatible, the format will need to be an exact match. - string format = 2; - - // This is a string representing the type that the bytes in external_schema_bytes are formatted in. - // This is an optional field that will not be used for type checking. - string external_schema_type = 3; - - // The serialized bytes of a third-party schema library like Arrow. - // This is an optional field that will not be used for type checking. - bytes external_schema_bytes = 4; -} - -// Defines type behavior for blob objects -message BlobType { - enum BlobDimensionality { - SINGLE = 0; - MULTIPART = 1; - } - - // Format can be a free form string understood by SDK/UI etc like - // csv, parquet etc - string format = 1; - BlobDimensionality dimensionality = 2; -} - -// Enables declaring enum types, with predefined string values -// For len(values) > 0, the first value in the ordered list is regarded as the default value. If you wish -// To provide no defaults, make the first value as undefined. -message EnumType { - // Predefined set of enum values. - repeated string values = 1; -} - -// Defines a tagged union type, also known as a variant (and formally as the sum type). -// -// A sum type S is defined by a sequence of types (A, B, C, ...), each tagged by a string tag -// A value of type S is constructed from a value of any of the variant types. The specific choice of type is recorded by -// storing the varaint's tag with the literal value and can be examined in runtime. -// -// Type S is typically written as -// S := Apple A | Banana B | Cantaloupe C | ... -// -// Notably, a nullable (optional) type is a sum type between some type X and the singleton type representing a null-value: -// Optional X := X | Null -// -// See also: https://en.wikipedia.org/wiki/Tagged_union -message UnionType { - // Predefined set of variants in union. - repeated LiteralType variants = 1; -} - -// Hints to improve type matching -// e.g. allows distinguishing output from custom type transformers -// even if the underlying IDL serialization matches. -message TypeStructure { - // Must exactly match for types to be castable - string tag = 1; - // dataclass_type only exists for dataclasses. - // This is used to resolve the type of the fields of dataclass - // The key is the field name, and the value is the literal type of the field - // e.g. For dataclass Foo, with fields a, and a is a string - // Foo.a will be resolved as a literal type of string from dataclass_type - map dataclass_type = 2; -} - -// TypeAnnotation encapsulates registration time information about a type. This can be used for various control-plane operations. TypeAnnotation will not be available at runtime when a task runs. -message TypeAnnotation { - // A arbitrary JSON payload to describe a type. - google.protobuf.Struct annotations = 1; -} - -// Defines a strong type to allow type checking between interfaces. -message LiteralType { - oneof type { - // A simple type that can be compared one-to-one with another. - SimpleType simple = 1; - - // A complex type that requires matching of inner fields. - SchemaType schema = 2; - - // Defines the type of the value of a collection. Only homogeneous collections are allowed. - LiteralType collection_type = 3; - - // Defines the type of the value of a map type. The type of the key is always a string. - LiteralType map_value_type = 4; - - // A blob might have specialized implementation details depending on associated metadata. - BlobType blob = 5; - - // Defines an enum with pre-defined string values. - EnumType enum_type = 7; - - // Generalized schema support - StructuredDatasetType structured_dataset_type = 8; - - // Defines an union type with pre-defined LiteralTypes. - UnionType union_type = 10; - } - - // This field contains type metadata that is descriptive of the type, but is NOT considered in type-checking. This might be used by - // consumers to identify special behavior or display extended information for the type. - google.protobuf.Struct metadata = 6; - - // This field contains arbitrary data that might have special semantic - // meaning for the client but does not effect internal flyte behavior. - TypeAnnotation annotation = 9; - - // Hints to improve type matching. - TypeStructure structure = 11; -} - -// A reference to an output produced by a node. The type can be retrieved -and validated- from -// the underlying interface of the node. -message OutputReference { - // Node id must exist at the graph layer. - string node_id = 1; - - // Variable name must refer to an output variable for the node. - string var = 2; - - repeated PromiseAttribute attr_path = 3; -} - -// PromiseAttribute stores the attribute path of a promise, which will be resolved at runtime. -// The attribute path is a list of strings and integers. -// In the following example, -// ``` -// @workflow -// def wf(): -// o = t1() -// t2(o.a["b"][0]) -// ``` -// the output reference t2 binds to has a list of PromiseAttribute ["a", "b", 0] - -message PromiseAttribute { - oneof value { - string string_value = 1; - int32 int_value = 2; - } -} - -// Represents an error thrown from a node. -message Error { - // The node id that threw the error. - string failed_node_id = 1; - - // Error message thrown. - string message = 2; -} diff --git a/docs/api/flyteidl/flyteidl/core/workflow.proto b/docs/api/flyteidl/flyteidl/core/workflow.proto deleted file mode 100644 index 3df4b2422f..0000000000 --- a/docs/api/flyteidl/flyteidl/core/workflow.proto +++ /dev/null @@ -1,331 +0,0 @@ -syntax = "proto3"; - -package flyteidl.core; - -option go_package = "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/core"; - -import "flyteidl/core/condition.proto"; -import "flyteidl/core/execution.proto"; -import "flyteidl/core/identifier.proto"; -import "flyteidl/core/interface.proto"; -import "flyteidl/core/literals.proto"; -import "flyteidl/core/tasks.proto"; -import "flyteidl/core/types.proto"; -import "flyteidl/core/security.proto"; -import "google/protobuf/duration.proto"; -import "google/protobuf/wrappers.proto"; - -// Defines a condition and the execution unit that should be executed if the condition is satisfied. -message IfBlock { - core.BooleanExpression condition = 1; - Node then_node = 2; -} - -// Defines a series of if/else blocks. The first branch whose condition evaluates to true is the one to execute. -// If no conditions were satisfied, the else_node or the error will execute. -message IfElseBlock { - //+required. First condition to evaluate. - IfBlock case = 1; - - //+optional. Additional branches to evaluate. - repeated IfBlock other = 2; - - //+required. - oneof default { - // The node to execute in case none of the branches were taken. - Node else_node = 3; - - // An error to throw in case none of the branches were taken. - Error error = 4; - } -} - -// BranchNode is a special node that alter the flow of the workflow graph. It allows the control flow to branch at -// runtime based on a series of conditions that get evaluated on various parameters (e.g. inputs, primitives). -message BranchNode { - //+required - IfElseBlock if_else = 1; -} - -// Refers to the task that the Node is to execute. -message TaskNode { - oneof reference { - // A globally unique identifier for the task. - Identifier reference_id = 1; - } - - // Optional overrides applied at task execution time. - TaskNodeOverrides overrides = 2; -} - -// Refers to a the workflow the node is to execute. -message WorkflowNode { - oneof reference { - // A globally unique identifier for the launch plan. - Identifier launchplan_ref = 1; - - // Reference to a subworkflow, that should be defined with the compiler context - Identifier sub_workflow_ref = 2; - } -} - -// ApproveCondition represents a dependency on an external approval. During execution, this will manifest as a boolean -// signal with the provided signal_id. -message ApproveCondition { - // A unique identifier for the requested boolean signal. - string signal_id = 1; -} - -// SignalCondition represents a dependency on an signal. -message SignalCondition { - // A unique identifier for the requested signal. - string signal_id = 1; - - // A type denoting the required value type for this signal. - LiteralType type = 2; - - // The variable name for the signal value in this nodes outputs. - string output_variable_name = 3; -} - -// SleepCondition represents a dependency on waiting for the specified duration. -message SleepCondition { - // The overall duration for this sleep. - google.protobuf.Duration duration = 1; -} - -// GateNode refers to the condition that is required for the gate to successfully complete. -message GateNode { - oneof condition { - // ApproveCondition represents a dependency on an external approval provided by a boolean signal. - ApproveCondition approve = 1; - - // SignalCondition represents a dependency on an signal. - SignalCondition signal = 2; - - // SleepCondition represents a dependency on waiting for the specified duration. - SleepCondition sleep = 3; - } -} - -// ArrayNode is a Flyte node type that simplifies the execution of a sub-node over a list of input -// values. An ArrayNode can be executed with configurable parallelism (separate from the parent -// workflow) and can be configured to succeed when a certain number of sub-nodes succeed. -message ArrayNode { - // node is the sub-node that will be executed for each element in the array. - Node node = 1; - - oneof parallelism_option { - // parallelism defines the minimum number of instances to bring up concurrently at any given - // point. Note that this is an optimistic restriction and that, due to network partitioning or - // other failures, the actual number of currently running instances might be more. This has to - // be a positive number if assigned. Default value is size. - uint32 parallelism = 2; - } - - oneof success_criteria { - // min_successes is an absolute number of the minimum number of successful completions of - // sub-nodes. As soon as this criteria is met, the ArrayNode will be marked as successful - // and outputs will be computed. This has to be a non-negative number if assigned. Default - // value is size (if specified). - uint32 min_successes = 3; - - // If the array job size is not known beforehand, the min_success_ratio can instead be used - // to determine when an ArrayNode can be marked successful. - float min_success_ratio = 4; - } - - enum ExecutionMode { - // Indicates the ArrayNode will store minimal state for the sub-nodes. - // This is more efficient, but only supports a subset of Flyte entities. - MINIMAL_STATE = 0; - - // Indicates the ArrayNode will store full state for the sub-nodes. - // This supports a wider range of Flyte entities. - FULL_STATE = 1; - } - - // execution_mode determines the execution path for ArrayNode. - ExecutionMode execution_mode = 5; -} - -// Defines extra information about the Node. -message NodeMetadata { - // A friendly name for the Node - string name = 1; - - // The overall timeout of a task. - google.protobuf.Duration timeout = 4; - - // Number of retries per task. - RetryStrategy retries = 5; - - // Identify whether node is interruptible - oneof interruptible_value { - bool interruptible = 6; - }; - - // Identify whether a node should have it's outputs cached. - oneof cacheable_value { - bool cacheable = 7; - } - - // The version of the cache to use. - oneof cache_version_value { - string cache_version = 8; - } - - // Identify whether caching operations involving this node should be serialized. - oneof cache_serializable_value { - bool cache_serializable = 9; - } -} - -// Links a variable to an alias. -message Alias { - // Must match one of the output variable names on a node. - string var = 1; - - // A workflow-level unique alias that downstream nodes can refer to in their input. - string alias = 2; -} - -// A Workflow graph Node. One unit of execution in the graph. Each node can be linked to a Task, a Workflow or a branch -// node. -message Node { - // A workflow-level unique identifier that identifies this node in the workflow. 'inputs' and 'outputs' are reserved - // node ids that cannot be used by other nodes. - string id = 1; - - // Extra metadata about the node. - NodeMetadata metadata = 2; - - // Specifies how to bind the underlying interface's inputs. All required inputs specified in the underlying interface - // must be fulfilled. - repeated Binding inputs = 3; - - //+optional Specifies execution dependency for this node ensuring it will only get scheduled to run after all its - // upstream nodes have completed. This node will have an implicit dependency on any node that appears in inputs - // field. - repeated string upstream_node_ids = 4; - - //+optional. A node can define aliases for a subset of its outputs. This is particularly useful if different nodes - // need to conform to the same interface (e.g. all branches in a branch node). Downstream nodes must refer to this - // nodes outputs using the alias if one's specified. - repeated Alias output_aliases = 5; - - // Information about the target to execute in this node. - oneof target { - // Information about the Task to execute in this node. - TaskNode task_node = 6; - - // Information about the Workflow to execute in this mode. - WorkflowNode workflow_node = 7; - - // Information about the branch node to evaluate in this node. - BranchNode branch_node = 8; - - // Information about the condition to evaluate in this node. - GateNode gate_node = 9; - - // Information about the sub-node executions for each value in the list of this nodes - // inputs values. - ArrayNode array_node = 10; - } -} - -// This is workflow layer metadata. These settings are only applicable to the workflow as a whole, and do not -// percolate down to child entities (like tasks) launched by the workflow. -message WorkflowMetadata { - // Indicates the runtime priority of workflow executions. - QualityOfService quality_of_service = 1; - - // Failure Handling Strategy - enum OnFailurePolicy { - // FAIL_IMMEDIATELY instructs the system to fail as soon as a node fails in the workflow. It'll automatically - // abort all currently running nodes and clean up resources before finally marking the workflow executions as - // failed. - FAIL_IMMEDIATELY = 0; - - // FAIL_AFTER_EXECUTABLE_NODES_COMPLETE instructs the system to make as much progress as it can. The system will - // not alter the dependencies of the execution graph so any node that depend on the failed node will not be run. - // Other nodes that will be executed to completion before cleaning up resources and marking the workflow - // execution as failed. - FAIL_AFTER_EXECUTABLE_NODES_COMPLETE = 1; - } - - // Defines how the system should behave when a failure is detected in the workflow execution. - OnFailurePolicy on_failure = 2; - - // Arbitrary tags that allow users and the platform to store small but arbitrary labels - map tags = 3; -} - -// The difference between these settings and the WorkflowMetadata ones is that these are meant to be passed down to -// a workflow's underlying entities (like tasks). For instance, 'interruptible' has no meaning at the workflow layer, it -// is only relevant when a task executes. The settings here are the defaults that are passed to all nodes -// unless explicitly overridden at the node layer. -// If you are adding a setting that applies to both the Workflow itself, and everything underneath it, it should be -// added to both this object and the WorkflowMetadata object above. -message WorkflowMetadataDefaults { - // Whether child nodes of the workflow are interruptible. - bool interruptible = 1; -} - -// Flyte Workflow Structure that encapsulates task, branch and subworkflow nodes to form a statically analyzable, -// directed acyclic graph. -message WorkflowTemplate { - // A globally unique identifier for the workflow. - Identifier id = 1; - - // Extra metadata about the workflow. - WorkflowMetadata metadata = 2; - - // Defines a strongly typed interface for the Workflow. This can include some optional parameters. - TypedInterface interface = 3; - - // A list of nodes. In addition, 'globals' is a special reserved node id that can be used to consume workflow inputs. - repeated Node nodes = 4; - - // A list of output bindings that specify how to construct workflow outputs. Bindings can pull node outputs or - // specify literals. All workflow outputs specified in the interface field must be bound in order for the workflow - // to be validated. A workflow has an implicit dependency on all of its nodes to execute successfully in order to - // bind final outputs. - // Most of these outputs will be Binding's with a BindingData of type OutputReference. That is, your workflow can - // just have an output of some constant (`Output(5)`), but usually, the workflow will be pulling - // outputs from the output of a task. - repeated Binding outputs = 5; - - //+optional A catch-all node. This node is executed whenever the execution engine determines the workflow has failed. - // The interface of this node must match the Workflow interface with an additional input named 'error' of type - // pb.lyft.flyte.core.Error. - Node failure_node = 6; - - // workflow defaults - WorkflowMetadataDefaults metadata_defaults = 7; -} - -// Optional task node overrides that will be applied at task execution time. -message TaskNodeOverrides { - // A customizable interface to convey resources requested for a task container. - Resources resources = 1; - - // Overrides for all non-standard resources, not captured by - // v1.ResourceRequirements, to allocate to a task. - ExtendedResources extended_resources = 2; - - // Override for the image used by task pods. - string container_image = 3; -} - -// A structure that uniquely identifies a launch plan in the system. -message LaunchPlanTemplate { - // A globally unique identifier for the launch plan. - Identifier id = 1; - - // The input and output interface for the launch plan - TypedInterface interface = 2; - - // A collection of input literals that are fixed for the launch plan - LiteralMap fixed_inputs = 3; -} diff --git a/docs/api/flyteidl/flyteidl/core/workflow_closure.proto b/docs/api/flyteidl/flyteidl/core/workflow_closure.proto deleted file mode 100644 index c8ee990036..0000000000 --- a/docs/api/flyteidl/flyteidl/core/workflow_closure.proto +++ /dev/null @@ -1,18 +0,0 @@ -syntax = "proto3"; - -package flyteidl.core; - -option go_package = "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/core"; - -import "flyteidl/core/workflow.proto"; -import "flyteidl/core/tasks.proto"; - -// Defines an enclosed package of workflow and tasks it references. -message WorkflowClosure { - //required. Workflow template. - WorkflowTemplate workflow = 1; - - //optional. A collection of tasks referenced by the workflow. Only needed if the workflow - // references tasks. - repeated TaskTemplate tasks = 2; -} diff --git a/docs/api/flyteidl/flyteidl/datacatalog/datacatalog.proto b/docs/api/flyteidl/flyteidl/datacatalog/datacatalog.proto deleted file mode 100644 index e296603113..0000000000 --- a/docs/api/flyteidl/flyteidl/datacatalog/datacatalog.proto +++ /dev/null @@ -1,420 +0,0 @@ -syntax = "proto3"; - -package datacatalog; - -import "flyteidl/core/literals.proto"; -import "google/protobuf/duration.proto"; -import "google/protobuf/timestamp.proto"; - -option go_package = "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/datacatalog"; - -/* - * Data Catalog service definition - * Data Catalog is a service for indexing parameterized, strongly-typed data artifacts across revisions. - * Artifacts are associated with a Dataset, and can be tagged for retrieval. - */ -service DataCatalog { - // Create a new Dataset. Datasets are unique based on the DatasetID. Datasets are logical groupings of artifacts. - // Each dataset can have one or more artifacts - rpc CreateDataset (CreateDatasetRequest) returns (CreateDatasetResponse); - - // Get a Dataset by the DatasetID. This returns the Dataset with the associated metadata. - rpc GetDataset (GetDatasetRequest) returns (GetDatasetResponse); - - // Create an artifact and the artifact data associated with it. An artifact can be a hive partition or arbitrary - // files or data values - rpc CreateArtifact (CreateArtifactRequest) returns (CreateArtifactResponse); - - // Retrieve an artifact by an identifying handle. This returns an artifact along with the artifact data. - rpc GetArtifact (GetArtifactRequest) returns (GetArtifactResponse); - - // Associate a tag with an artifact. Tags are unique within a Dataset. - rpc AddTag (AddTagRequest) returns (AddTagResponse); - - // Return a paginated list of artifacts - rpc ListArtifacts (ListArtifactsRequest) returns (ListArtifactsResponse); - - // Return a paginated list of datasets - rpc ListDatasets (ListDatasetsRequest) returns (ListDatasetsResponse); - - // Updates an existing artifact, overwriting the stored artifact data in the underlying blob storage. - rpc UpdateArtifact (UpdateArtifactRequest) returns (UpdateArtifactResponse); - - // Attempts to get or extend a reservation for the corresponding artifact. If one already exists - // (ie. another entity owns the reservation) then that reservation is retrieved. - // Once you acquire a reservation, you need to periodically extend the reservation with an - // identical call. If the reservation is not extended before the defined expiration, it may be - // acquired by another task. - // Note: We may have multiple concurrent tasks with the same signature and the same input that - // try to populate the same artifact at the same time. Thus with reservation, only one task can - // run at a time, until the reservation expires. - // Note: If task A does not extend the reservation in time and the reservation expires, another - // task B may take over the reservation, resulting in two tasks A and B running in parallel. So - // a third task C may get the Artifact from A or B, whichever writes last. - rpc GetOrExtendReservation (GetOrExtendReservationRequest) returns (GetOrExtendReservationResponse); - - // Release the reservation when the task holding the spot fails so that the other tasks - // can grab the spot. - rpc ReleaseReservation (ReleaseReservationRequest) returns (ReleaseReservationResponse); -} - -/* - * Request message for creating a Dataset. - */ -message CreateDatasetRequest { - Dataset dataset = 1; -} - -/* - * Response message for creating a Dataset - */ -message CreateDatasetResponse { - -} - -/* - * Request message for retrieving a Dataset. The Dataset is retrieved by it's unique identifier - * which is a combination of several fields. - */ -message GetDatasetRequest { - DatasetID dataset = 1; -} - -/* - * Response message for retrieving a Dataset. The response will include the metadata for the - * Dataset. - */ -message GetDatasetResponse { - Dataset dataset = 1; -} - -/* - * Request message for retrieving an Artifact. Retrieve an artifact based on a query handle that - * can be one of artifact_id or tag. The result returned will include the artifact data and metadata - * associated with the artifact. - */ -message GetArtifactRequest { - DatasetID dataset = 1; - - oneof query_handle { - string artifact_id = 2; - string tag_name = 3; - } -} - -/* - * Response message for retrieving an Artifact. The result returned will include the artifact data - * and metadata associated with the artifact. - */ -message GetArtifactResponse { - Artifact artifact = 1; -} - -/* - * Request message for creating an Artifact and its associated artifact Data. - */ -message CreateArtifactRequest { - Artifact artifact = 1; -} - -/* - * Response message for creating an Artifact. - */ -message CreateArtifactResponse { - -} - -/* - * Request message for tagging an Artifact. - */ -message AddTagRequest { - Tag tag = 1; -} - -/* - * Response message for tagging an Artifact. - */ -message AddTagResponse { - -} - -// List the artifacts that belong to the Dataset, optionally filtered using filtered expression. -message ListArtifactsRequest { - // Use a datasetID for which you want to retrieve the artifacts - DatasetID dataset = 1; - - // Apply the filter expression to this query - FilterExpression filter = 2; - // Pagination options to get a page of artifacts - PaginationOptions pagination = 3; -} - -// Response to list artifacts -message ListArtifactsResponse { - // The list of artifacts - repeated Artifact artifacts = 1; - // Token to use to request the next page, pass this into the next requests PaginationOptions - string next_token = 2; -} - -// List the datasets for the given query -message ListDatasetsRequest { - // Apply the filter expression to this query - FilterExpression filter = 1; - // Pagination options to get a page of datasets - PaginationOptions pagination = 2; -} - -// List the datasets response with token for next pagination -message ListDatasetsResponse { - // The list of datasets - repeated Dataset datasets = 1; - // Token to use to request the next page, pass this into the next requests PaginationOptions - string next_token = 2; -} - -/* - * Request message for updating an Artifact and overwriting its associated ArtifactData. - */ -message UpdateArtifactRequest { - // ID of dataset the artifact is associated with - DatasetID dataset = 1; - - // Either ID of artifact or name of tag to retrieve existing artifact from - oneof query_handle { - string artifact_id = 2; - string tag_name = 3; - } - - // List of data to overwrite stored artifact data with. Must contain ALL data for updated Artifact as any missing - // ArtifactData entries will be removed from the underlying blob storage and database. - repeated ArtifactData data = 4; - - // Update execution metadata(including execution domain, name, node, project data) when overwriting cache - Metadata metadata = 5; -} - -/* - * Response message for updating an Artifact. - */ -message UpdateArtifactResponse { - // The unique ID of the artifact updated - string artifact_id = 1; -} - -/* - * ReservationID message that is composed of several string fields. - */ -message ReservationID { - // The unique ID for the reserved dataset - DatasetID dataset_id = 1; - - // The specific artifact tag for the reservation - string tag_name = 2; -} - -// Try to acquire or extend an artifact reservation. If an active reservation exists, retrieve that instance. -message GetOrExtendReservationRequest { - // The unique ID for the reservation - ReservationID reservation_id = 1; - - // The unique ID of the owner for the reservation - string owner_id = 2; - - // Requested reservation extension heartbeat interval - google.protobuf.Duration heartbeat_interval = 3; -} - -// A reservation including owner, heartbeat interval, expiration timestamp, and various metadata. -message Reservation { - // The unique ID for the reservation - ReservationID reservation_id = 1; - - // The unique ID of the owner for the reservation - string owner_id = 2; - - // Recommended heartbeat interval to extend reservation - google.protobuf.Duration heartbeat_interval = 3; - - // Expiration timestamp of this reservation - google.protobuf.Timestamp expires_at = 4; - - // Free-form metadata associated with the artifact - Metadata metadata = 6; -} - -// Response including either a newly minted reservation or the existing reservation -message GetOrExtendReservationResponse { - // The reservation to be acquired or extended - Reservation reservation = 1; -} - -// Request to release reservation -message ReleaseReservationRequest { - // The unique ID for the reservation - ReservationID reservation_id = 1; - - // The unique ID of the owner for the reservation - string owner_id = 2; -} - -// Response to release reservation -message ReleaseReservationResponse { - -} - -/* - * Dataset message. It is uniquely identified by DatasetID. - */ -message Dataset { - DatasetID id = 1; - Metadata metadata = 2; - repeated string partitionKeys = 3; -} - -/* - * An artifact could have multiple partitions and each partition can have an arbitrary string key/value pair - */ -message Partition { - string key = 1; - string value = 2; -} - -/* - * DatasetID message that is composed of several string fields. - */ -message DatasetID { - string project = 1; // The name of the project - string name = 2; // The name of the dataset - string domain = 3; // The domain (eg. environment) - string version = 4; // Version of the data schema - string UUID = 5; // UUID for the dataset (if set the above fields are optional) - - // Optional, org key applied to the resource. - string org = 6; -} - -/* - * Artifact message. It is composed of several string fields. - */ -message Artifact { - string id = 1; // The unique ID of the artifact - DatasetID dataset = 2; // The Dataset that the artifact belongs to - repeated ArtifactData data = 3; // A list of data that is associated with the artifact - Metadata metadata = 4; // Free-form metadata associated with the artifact - repeated Partition partitions = 5; - repeated Tag tags = 6; - google.protobuf.Timestamp created_at = 7; // creation timestamp of artifact, autogenerated by service -} - -/* - * ArtifactData that belongs to an artifact - */ -message ArtifactData { - string name = 1; - flyteidl.core.Literal value = 2; -} - -/* - * Tag message that is unique to a Dataset. It is associated to a single artifact and - * can be retrieved by name later. - */ -message Tag { - string name = 1; // Name of tag - string artifact_id = 2; // The tagged artifact - DatasetID dataset = 3; // The Dataset that this tag belongs to -} - -/* - * Metadata representation for artifacts and datasets - */ -message Metadata { - map key_map = 1; // key map is a dictionary of key/val strings that represent metadata -} - -// Filter expression that is composed of a combination of single filters -message FilterExpression { - repeated SinglePropertyFilter filters = 1; -} - -// A single property to filter on. -message SinglePropertyFilter { - oneof property_filter { - TagPropertyFilter tag_filter = 1; - PartitionPropertyFilter partition_filter = 2; - ArtifactPropertyFilter artifact_filter = 3; - DatasetPropertyFilter dataset_filter = 4; - } - - // as use-cases come up we can add more operators, ex: gte, like, not eq etc. - enum ComparisonOperator { - EQUALS = 0; - } - - ComparisonOperator operator = 10; // field 10 in case we add more entities to query - // Next field number: 11 -} - -// Artifact properties we can filter by -message ArtifactPropertyFilter { - // oneof because we can add more properties in the future - oneof property { - string artifact_id = 1; - } -} - -// Tag properties we can filter by -message TagPropertyFilter { - oneof property { - string tag_name = 1; - } -} - -// Partition properties we can filter by -message PartitionPropertyFilter { - oneof property { - KeyValuePair key_val = 1; - } -} - -message KeyValuePair { - string key = 1; - string value = 2; -} - -// Dataset properties we can filter by -message DatasetPropertyFilter { - oneof property { - string project = 1; - string name = 2; - string domain = 3; - string version = 4; - // Optional, org key applied to the dataset. - string org = 5; - } -} - -// Pagination options for making list requests -message PaginationOptions { - - // the max number of results to return - uint32 limit = 1; - - // the token to pass to fetch the next page - string token = 2; - - // the property that we want to sort the results by - SortKey sortKey = 3; - - // the sort order of the results - SortOrder sortOrder = 4; - - enum SortOrder { - DESCENDING = 0; - ASCENDING = 1; - } - - enum SortKey { - CREATION_TIME = 0; - } -} diff --git a/docs/api/flyteidl/flyteidl/event/cloudevents.proto b/docs/api/flyteidl/flyteidl/event/cloudevents.proto deleted file mode 100644 index d02c5ff516..0000000000 --- a/docs/api/flyteidl/flyteidl/event/cloudevents.proto +++ /dev/null @@ -1,73 +0,0 @@ -syntax = "proto3"; - -package flyteidl.event; - -option go_package = "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/event"; - -import "flyteidl/event/event.proto"; -import "flyteidl/core/literals.proto"; -import "flyteidl/core/interface.proto"; -import "flyteidl/core/artifact_id.proto"; -import "flyteidl/core/identifier.proto"; -import "google/protobuf/timestamp.proto"; - -// This is the cloud event parallel to the raw WorkflowExecutionEvent message. It's filled in with additional -// information that downstream consumers may find useful. -message CloudEventWorkflowExecution { - event.WorkflowExecutionEvent raw_event = 1; - - core.TypedInterface output_interface = 2; - - // The following are ExecutionMetadata fields - // We can't have the ExecutionMetadata object directly because of import cycle - repeated core.ArtifactID artifact_ids = 3; - core.WorkflowExecutionIdentifier reference_execution = 4; - string principal = 5; - - // The ID of the LP that generated the execution that generated the Artifact. - // Here for provenance information. - // Launch plan IDs are easier to get than workflow IDs so we'll use these for now. - core.Identifier launch_plan_id = 6; -} - -message CloudEventNodeExecution { - event.NodeExecutionEvent raw_event = 1; - - // The relevant task execution if applicable - core.TaskExecutionIdentifier task_exec_id = 2; - - // The typed interface for the task that produced the event. - core.TypedInterface output_interface = 3; - - // The following are ExecutionMetadata fields - // We can't have the ExecutionMetadata object directly because of import cycle - repeated core.ArtifactID artifact_ids = 4; - string principal = 5; - - // The ID of the LP that generated the execution that generated the Artifact. - // Here for provenance information. - // Launch plan IDs are easier to get than workflow IDs so we'll use these for now. - core.Identifier launch_plan_id = 6; -} - -message CloudEventTaskExecution { - event.TaskExecutionEvent raw_event = 1; -} - -// This event is to be sent by Admin after it creates an execution. -message CloudEventExecutionStart { - // The execution created. - core.WorkflowExecutionIdentifier execution_id = 1; - // The launch plan used. - core.Identifier launch_plan_id = 2; - - core.Identifier workflow_id = 3; - - // Artifact inputs to the workflow execution for which we have the full Artifact ID. These are likely the result of artifact queries that are run. - repeated core.ArtifactID artifact_ids = 4; - - // Artifact inputs to the workflow execution for which we only have the tracking bit that's installed into the Literal's metadata by the Artifact service. - repeated string artifact_trackers = 5; - - string principal = 6; -} diff --git a/docs/api/flyteidl/flyteidl/event/event.proto b/docs/api/flyteidl/flyteidl/event/event.proto deleted file mode 100644 index 640b4804e9..0000000000 --- a/docs/api/flyteidl/flyteidl/event/event.proto +++ /dev/null @@ -1,328 +0,0 @@ -syntax = "proto3"; - -package flyteidl.event; - -option go_package = "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/event"; - -import "flyteidl/core/literals.proto"; -import "flyteidl/core/compiler.proto"; -import "flyteidl/core/execution.proto"; -import "flyteidl/core/identifier.proto"; -import "flyteidl/core/catalog.proto"; -import "google/protobuf/timestamp.proto"; -import "google/protobuf/struct.proto"; - - -message WorkflowExecutionEvent { - // Workflow execution id - core.WorkflowExecutionIdentifier execution_id = 1; - - // the id of the originator (Propeller) of the event - string producer_id = 2; - - core.WorkflowExecution.Phase phase = 3; - - // This timestamp represents when the original event occurred, it is generated - // by the executor of the workflow. - google.protobuf.Timestamp occurred_at = 4; - - oneof output_result { - // URL to the output of the execution, it encodes all the information - // including Cloud source provider. ie., s3://... - string output_uri = 5; - - // Error information for the execution - core.ExecutionError error = 6; - - // Raw output data produced by this workflow execution. - core.LiteralMap output_data = 7; - } -} - -message NodeExecutionEvent { - // Unique identifier for this node execution - core.NodeExecutionIdentifier id = 1; - - // the id of the originator (Propeller) of the event - string producer_id = 2; - - core.NodeExecution.Phase phase = 3; - - // This timestamp represents when the original event occurred, it is generated - // by the executor of the node. - google.protobuf.Timestamp occurred_at = 4; - - oneof input_value { - string input_uri = 5; - - // Raw input data consumed by this node execution. - core.LiteralMap input_data = 20; - } - - oneof output_result { - // URL to the output of the execution, it encodes all the information - // including Cloud source provider. ie., s3://... - string output_uri = 6; - - // Error information for the execution - core.ExecutionError error = 7; - - // Raw output data produced by this node execution. - core.LiteralMap output_data = 15; - } - - // Additional metadata to do with this event's node target based - // on the node type - oneof target_metadata { - WorkflowNodeMetadata workflow_node_metadata = 8; - TaskNodeMetadata task_node_metadata = 14; - } - - // [To be deprecated] Specifies which task (if any) launched this node. - ParentTaskExecutionMetadata parent_task_metadata = 9; - - // Specifies the parent node of the current node execution. Node executions at level zero will not have a parent node. - ParentNodeExecutionMetadata parent_node_metadata = 10; - - // Retry group to indicate grouping of nodes by retries - string retry_group = 11; - - // Identifier of the node in the original workflow/graph - // This maps to value of WorkflowTemplate.nodes[X].id - string spec_node_id = 12; - - // Friendly readable name for the node - string node_name = 13; - - int32 event_version = 16; - - // Whether this node launched a subworkflow. - bool is_parent = 17; - - // Whether this node yielded a dynamic workflow. - bool is_dynamic = 18; - - // String location uniquely identifying where the deck HTML file is - // NativeUrl specifies the url in the format of the configured storage provider (e.g. s3://my-bucket/randomstring/suffix.tar) - string deck_uri = 19; - - // This timestamp represents the instant when the event was reported by the executing framework. For example, - // when first processing a node the `occurred_at` timestamp should be the instant propeller makes progress, so when - // literal inputs are initially copied. The event however will not be sent until after the copy completes. - // Extracting both of these timestamps facilitates a more accurate portrayal of the evaluation time-series. - google.protobuf.Timestamp reported_at = 21; - - // Indicates if this node is an ArrayNode. - bool is_array = 22; - - // So that Admin doesn't have to rebuild the node execution graph to find the target entity, propeller will fill this - // in optionally - currently this is only filled in for subworkflows. This is the ID of the subworkflow corresponding - // to this node execution. It is difficult to find because Admin only sees one node at a time. A subworkflow could be - // nested multiple layers deep, and you'd need to access the correct workflow template to know the target subworkflow. - core.Identifier target_entity = 23; - - // Tasks and subworkflows (but not launch plans) that are run within a dynamic task are effectively independent of - // the tasks that are registered in Admin's db. Confusingly, they are often identical, but sometimes they are not - // even registered at all. Similar to the target_entity field, at the time Admin receives this event, it has no idea - // if the relevant execution entity is was registered, or dynamic. This field indicates that the target_entity ID, - // as well as task IDs in any corresponding Task Executions, should not be used to looked up the task in Admin's db. - bool is_in_dynamic_chain = 24; -} - -// For Workflow Nodes we need to send information about the workflow that's launched -message WorkflowNodeMetadata { - core.WorkflowExecutionIdentifier execution_id = 1; -} - -message TaskNodeMetadata { - // Captures the status of caching for this execution. - core.CatalogCacheStatus cache_status = 1; - // This structure carries the catalog artifact information - core.CatalogMetadata catalog_key = 2; - // Captures the status of cache reservations for this execution. - core.CatalogReservation.Status reservation_status = 3; - // The latest checkpoint location - string checkpoint_uri = 4; - - // In the case this task launched a dynamic workflow we capture its structure here. - DynamicWorkflowNodeMetadata dynamic_workflow = 16; -} - -// For dynamic workflow nodes we send information about the dynamic workflow definition that gets generated. -message DynamicWorkflowNodeMetadata { - // id represents the unique identifier of the workflow. - core.Identifier id = 1; - - // Represents the compiled representation of the embedded dynamic workflow. - core.CompiledWorkflowClosure compiled_workflow = 2; - - // dynamic_job_spec_uri is the location of the DynamicJobSpec proto message for this DynamicWorkflow. This is - // required to correctly recover partially completed executions where the workflow has already been compiled. - string dynamic_job_spec_uri = 3; -} - -message ParentTaskExecutionMetadata { - core.TaskExecutionIdentifier id = 1; -} - -message ParentNodeExecutionMetadata { - // Unique identifier of the parent node id within the execution - // This is value of core.NodeExecutionIdentifier.node_id of the parent node - string node_id = 1; -} - -message EventReason { - // An explanation for this event - string reason = 1; - - // The time this reason occurred - google.protobuf.Timestamp occurred_at = 2; -} - -// Plugin specific execution event information. For tasks like Python, Hive, Spark, DynamicJob. -message TaskExecutionEvent { - // ID of the task. In combination with the retryAttempt this will indicate - // the task execution uniquely for a given parent node execution. - core.Identifier task_id = 1; - - // A task execution is always kicked off by a node execution, the event consumer - // will use the parent_id to relate the task to it's parent node execution - core.NodeExecutionIdentifier parent_node_execution_id = 2; - - // retry attempt number for this task, ie., 2 for the second attempt - uint32 retry_attempt = 3; - - // Phase associated with the event - core.TaskExecution.Phase phase = 4; - - // id of the process that sent this event, mainly for trace debugging - string producer_id = 5; - - // log information for the task execution - repeated core.TaskLog logs = 6; - - // This timestamp represents when the original event occurred, it is generated - // by the executor of the task. - google.protobuf.Timestamp occurred_at = 7; - - oneof input_value { - // URI of the input file, it encodes all the information - // including Cloud source provider. ie., s3://... - string input_uri = 8; - - // Raw input data consumed by this task execution. - core.LiteralMap input_data = 19; - } - - oneof output_result { - // URI to the output of the execution, it will be in a format that encodes all the information - // including Cloud source provider. ie., s3://... - string output_uri = 9; - - // Error information for the execution - core.ExecutionError error = 10; - - // Raw output data produced by this task execution. - core.LiteralMap output_data = 17; - } - - // Custom data that the task plugin sends back. This is extensible to allow various plugins in the system. - google.protobuf.Struct custom_info = 11; - - // Some phases, like RUNNING, can send multiple events with changed metadata (new logs, additional custom_info, etc) - // that should be recorded regardless of the lack of phase change. - // The version field should be incremented when metadata changes across the duration of an individual phase. - uint32 phase_version = 12; - - // An optional explanation for the phase transition. - // Deprecated: Use reasons instead. - string reason = 13 [deprecated = true]; - - // An optional list of explanations for the phase transition. - repeated EventReason reasons = 21; - - // A predefined yet extensible Task type identifier. If the task definition is already registered in flyte admin - // this type will be identical, but not all task executions necessarily use pre-registered definitions and this - // type is useful to render the task in the UI, filter task executions, etc. - string task_type = 14; - - // Metadata around how a task was executed. - TaskExecutionMetadata metadata = 16; - - // The event version is used to indicate versioned changes in how data is reported using this - // proto message. For example, event_verison > 0 means that maps tasks report logs using the - // TaskExecutionMetadata ExternalResourceInfo fields for each subtask rather than the TaskLog - // in this message. - int32 event_version = 18; - - // This timestamp represents the instant when the event was reported by the executing framework. For example, a k8s - // pod task may be marked completed at (ie. `occurred_at`) the instant the container running user code completes, - // but this event will not be reported until the pod is marked as completed. Extracting both of these timestamps - // facilitates a more accurate portrayal of the evaluation time-series. - google.protobuf.Timestamp reported_at = 20; -} - -// This message contains metadata about external resources produced or used by a specific task execution. -message ExternalResourceInfo { - - // Identifier for an external resource created by this task execution, for example Qubole query ID or presto query ids. - string external_id = 1; - - // A unique index for the external resource with respect to all external resources for this task. Although the - // identifier may change between task reporting events or retries, this will remain the same to enable aggregating - // information from multiple reports. - uint32 index = 2; - - // Retry attempt number for this external resource, ie., 2 for the second attempt - uint32 retry_attempt = 3; - - // Phase associated with the external resource - core.TaskExecution.Phase phase = 4; - - // Captures the status of caching for this external resource execution. - core.CatalogCacheStatus cache_status = 5; - - // log information for the external resource execution - repeated core.TaskLog logs = 6; -} - - -// This message holds task execution metadata specific to resource allocation used to manage concurrent -// executions for a project namespace. -message ResourcePoolInfo { - // Unique resource ID used to identify this execution when allocating a token. - string allocation_token = 1; - - // Namespace under which this task execution requested an allocation token. - string namespace = 2; -} - -// Holds metadata around how a task was executed. -// As a task transitions across event phases during execution some attributes, such its generated name, generated external resources, -// and more may grow in size but not change necessarily based on the phase transition that sparked the event update. -// Metadata is a container for these attributes across the task execution lifecycle. -message TaskExecutionMetadata { - - // Unique, generated name for this task execution used by the backend. - string generated_name = 1; - - // Additional data on external resources on other back-ends or platforms (e.g. Hive, Qubole, etc) launched by this task execution. - repeated ExternalResourceInfo external_resources = 2; - - // Includes additional data on concurrent resource management used during execution.. - // This is a repeated field because a plugin can request multiple resource allocations during execution. - repeated ResourcePoolInfo resource_pool_info = 3; - - // The identifier of the plugin used to execute this task. - string plugin_identifier = 4; - - // Includes the broad category of machine used for this specific task execution. - enum InstanceClass { - // The default instance class configured for the flyte application platform. - DEFAULT = 0; - - // The instance class configured for interruptible tasks. - INTERRUPTIBLE = 1; - } - InstanceClass instance_class = 16; -} diff --git a/docs/api/flyteidl/flyteidl/plugins/array_job.proto b/docs/api/flyteidl/flyteidl/plugins/array_job.proto deleted file mode 100644 index e202316ef5..0000000000 --- a/docs/api/flyteidl/flyteidl/plugins/array_job.proto +++ /dev/null @@ -1,30 +0,0 @@ -syntax = "proto3"; - -package flyteidl.plugins; - -option go_package = "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/plugins"; - -// Describes a job that can process independent pieces of data concurrently. Multiple copies of the runnable component -// will be executed concurrently. -message ArrayJob { - // Defines the maximum number of instances to bring up concurrently at any given point. Note that this is an - // optimistic restriction and that, due to network partitioning or other failures, the actual number of currently - // running instances might be more. This has to be a positive number if assigned. Default value is size. - int64 parallelism = 1; - - // Defines the number of instances to launch at most. This number should match the size of the input if the job - // requires processing of all input data. This has to be a positive number. - // In the case this is not defined, the back-end will determine the size at run-time by reading the inputs. - int64 size = 2; - - oneof success_criteria { - // An absolute number of the minimum number of successful completions of subtasks. As soon as this criteria is met, - // the array job will be marked as successful and outputs will be computed. This has to be a non-negative number if - // assigned. Default value is size (if specified). - int64 min_successes = 3; - - // If the array job size is not known beforehand, the min_success_ratio can instead be used to determine when an array - // job can be marked successful. - float min_success_ratio = 4; - } -} diff --git a/docs/api/flyteidl/flyteidl/plugins/common.proto b/docs/api/flyteidl/flyteidl/plugins/common.proto deleted file mode 100644 index 15f31cf2d2..0000000000 --- a/docs/api/flyteidl/flyteidl/plugins/common.proto +++ /dev/null @@ -1,27 +0,0 @@ -syntax = "proto3"; - -package flyteidl.plugins; - -option go_package = "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/plugins"; - -import "flyteidl/core/tasks.proto"; - -enum RestartPolicy { - RESTART_POLICY_NEVER = 0; - RESTART_POLICY_ON_FAILURE = 1; - RESTART_POLICY_ALWAYS = 2; -} - -message CommonReplicaSpec { - // Number of replicas - int32 replicas = 1; - - // Image used for the replica group - string image = 2; - - // Resources required for the replica group - core.Resources resources = 3; - - // RestartPolicy determines whether pods will be restarted when they exit - RestartPolicy restart_policy = 4; -} diff --git a/docs/api/flyteidl/flyteidl/plugins/dask.proto b/docs/api/flyteidl/flyteidl/plugins/dask.proto deleted file mode 100644 index 96e861049a..0000000000 --- a/docs/api/flyteidl/flyteidl/plugins/dask.proto +++ /dev/null @@ -1,41 +0,0 @@ -syntax = "proto3"; - -import "flyteidl/core/tasks.proto"; - -package flyteidl.plugins; - -option go_package = "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/plugins"; - - -// Custom Proto for Dask Plugin. -message DaskJob { - // Spec for the scheduler pod. - DaskScheduler scheduler = 1; - - // Spec of the default worker group. - DaskWorkerGroup workers = 2; -} - -// Specification for the scheduler pod. -message DaskScheduler { - // Optional image to use. If unset, will use the default image. - string image = 1; - - // Resources assigned to the scheduler pod. - core.Resources resources = 2; -} - -message DaskWorkerGroup { - // Number of workers in the group. - uint32 number_of_workers = 1; - - // Optional image to use for the pods of the worker group. If unset, will use the default image. - string image = 2; - - // Resources assigned to the all pods of the worker group. - // As per https://kubernetes.dask.org/en/latest/kubecluster.html?highlight=limit#best-practices - // it is advised to only set limits. If requests are not explicitly set, the plugin will make - // sure to set requests==limits. - // The plugin sets ` --memory-limit` as well as `--nthreads` for the workers according to the limit. - core.Resources resources = 3; -} diff --git a/docs/api/flyteidl/flyteidl/plugins/kubeflow/common.proto b/docs/api/flyteidl/flyteidl/plugins/kubeflow/common.proto deleted file mode 100644 index 37655caf3d..0000000000 --- a/docs/api/flyteidl/flyteidl/plugins/kubeflow/common.proto +++ /dev/null @@ -1,28 +0,0 @@ -syntax = "proto3"; - -package flyteidl.plugins.kubeflow; - -option go_package = "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/plugins/kubeflow"; - -import public "flyteidl/plugins/common.proto"; - -enum CleanPodPolicy { - CLEANPOD_POLICY_NONE = 0; - CLEANPOD_POLICY_RUNNING = 1; - CLEANPOD_POLICY_ALL = 2; -} - -message RunPolicy { - // Defines the policy to kill pods after the job completes. Default to None. - CleanPodPolicy clean_pod_policy = 1; - - // TTL to clean up jobs. Default to infinite. - int32 ttl_seconds_after_finished = 2; - - // Specifies the duration in seconds relative to the startTime that the job may be active - // before the system tries to terminate it; value must be positive integer. - int32 active_deadline_seconds = 3; - - // Number of retries before marking this job failed. - int32 backoff_limit = 4; -} diff --git a/docs/api/flyteidl/flyteidl/plugins/kubeflow/mpi.proto b/docs/api/flyteidl/flyteidl/plugins/kubeflow/mpi.proto deleted file mode 100644 index b98e8aad99..0000000000 --- a/docs/api/flyteidl/flyteidl/plugins/kubeflow/mpi.proto +++ /dev/null @@ -1,47 +0,0 @@ -syntax = "proto3"; - -package flyteidl.plugins.kubeflow; - -option go_package = "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/plugins/kubeflow"; - -import "flyteidl/core/tasks.proto"; -import "flyteidl/plugins/kubeflow/common.proto"; - -// Proto for plugin that enables distributed training using https://github.com/kubeflow/mpi-operator -message DistributedMPITrainingTask { - // Worker replicas spec - DistributedMPITrainingReplicaSpec worker_replicas = 1; - - // Master replicas spec - DistributedMPITrainingReplicaSpec launcher_replicas = 2; - - // RunPolicy encapsulates various runtime policies of the distributed training - // job, for example how to clean up resources and how long the job can stay - // active. - RunPolicy run_policy = 3; - - // Number of slots per worker - int32 slots = 4; -} - -// Replica specification for distributed MPI training -message DistributedMPITrainingReplicaSpec { - // 1~4 deprecated. Use common instead. - // Number of replicas - int32 replicas = 1 [deprecated = true]; - - // Image used for the replica group - string image = 2 [deprecated = true]; - - // Resources required for the replica group - core.Resources resources = 3 [deprecated = true]; - - // Restart policy determines whether pods will be restarted when they exit - RestartPolicy restart_policy = 4 [deprecated = true]; - - // MPI sometimes requires different command set for different replica groups - repeated string command = 5; - - // The common replica spec - CommonReplicaSpec common = 6; -} diff --git a/docs/api/flyteidl/flyteidl/plugins/kubeflow/pytorch.proto b/docs/api/flyteidl/flyteidl/plugins/kubeflow/pytorch.proto deleted file mode 100644 index 0433384e75..0000000000 --- a/docs/api/flyteidl/flyteidl/plugins/kubeflow/pytorch.proto +++ /dev/null @@ -1,53 +0,0 @@ -syntax = "proto3"; - -package flyteidl.plugins.kubeflow; - -option go_package = "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/plugins/kubeflow"; - -import "flyteidl/core/tasks.proto"; -import "flyteidl/plugins/kubeflow/common.proto"; - -// Custom proto for torch elastic config for distributed training using -// https://github.com/kubeflow/training-operator/blob/master/pkg/apis/kubeflow.org/v1/pytorch_types.go -message ElasticConfig { - string rdzv_backend = 1; - int32 min_replicas = 2; - int32 max_replicas = 3; - int32 nproc_per_node = 4; - int32 max_restarts = 5; -} - -// Proto for plugin that enables distributed training using https://github.com/kubeflow/pytorch-operator -message DistributedPyTorchTrainingTask { - // Worker replicas spec - DistributedPyTorchTrainingReplicaSpec worker_replicas = 1; - - // Master replicas spec, master replicas can only have 1 replica - DistributedPyTorchTrainingReplicaSpec master_replicas = 2; - - // RunPolicy encapsulates various runtime policies of the distributed training - // job, for example how to clean up resources and how long the job can stay - // active. - RunPolicy run_policy = 3; - - // config for an elastic pytorch job - ElasticConfig elastic_config = 4; -} - -message DistributedPyTorchTrainingReplicaSpec { - // 1~4 deprecated. Use common instead. - // Number of replicas - int32 replicas = 1 [deprecated = true]; - - // Image used for the replica group - string image = 2 [deprecated = true]; - - // Resources required for the replica group - core.Resources resources = 3 [deprecated = true]; - - // Restart policy determines whether pods will be restarted when they exit - RestartPolicy restart_policy = 4 [deprecated = true]; - - // The common replica spec - CommonReplicaSpec common = 5; -} diff --git a/docs/api/flyteidl/flyteidl/plugins/kubeflow/tensorflow.proto b/docs/api/flyteidl/flyteidl/plugins/kubeflow/tensorflow.proto deleted file mode 100644 index 251526f7e0..0000000000 --- a/docs/api/flyteidl/flyteidl/plugins/kubeflow/tensorflow.proto +++ /dev/null @@ -1,46 +0,0 @@ -syntax = "proto3"; - -package flyteidl.plugins.kubeflow; - -option go_package = "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/plugins/kubeflow"; - -import "flyteidl/core/tasks.proto"; -import "flyteidl/plugins/kubeflow/common.proto"; - -// Proto for plugin that enables distributed training using https://github.com/kubeflow/tf-operator -message DistributedTensorflowTrainingTask { - // Worker replicas spec - DistributedTensorflowTrainingReplicaSpec worker_replicas = 1; - - // Parameter server replicas spec - DistributedTensorflowTrainingReplicaSpec ps_replicas = 2; - - // Chief replicas spec - DistributedTensorflowTrainingReplicaSpec chief_replicas = 3; - - // RunPolicy encapsulates various runtime policies of the distributed training - // job, for example how to clean up resources and how long the job can stay - // active. - RunPolicy run_policy = 4; - - // Evaluator replicas spec - DistributedTensorflowTrainingReplicaSpec evaluator_replicas = 5; -} - -message DistributedTensorflowTrainingReplicaSpec { - // 1~4 deprecated. Use common instead. - // Number of replicas - int32 replicas = 1 [deprecated = true]; - - // Image used for the replica group - string image = 2 [deprecated = true]; - - // Resources required for the replica group - core.Resources resources = 3 [deprecated = true]; - - // Restart policy determines whether pods will be restarted when they exit - RestartPolicy restart_policy = 4 [deprecated = true]; - - // The common replica spec - CommonReplicaSpec common = 5; -} diff --git a/docs/api/flyteidl/flyteidl/plugins/mpi.proto b/docs/api/flyteidl/flyteidl/plugins/mpi.proto deleted file mode 100644 index 9e657279d8..0000000000 --- a/docs/api/flyteidl/flyteidl/plugins/mpi.proto +++ /dev/null @@ -1,20 +0,0 @@ -syntax = "proto3"; - -package flyteidl.plugins; - -option go_package = "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/plugins"; - -// MPI operator proposal https://github.com/kubeflow/community/blob/master/proposals/mpi-operator-proposal.md -// Custom proto for plugin that enables distributed training using https://github.com/kubeflow/mpi-operator -message DistributedMPITrainingTask { - // number of worker spawned in the cluster for this job - int32 num_workers = 1; - - // number of launcher replicas spawned in the cluster for this job - // The launcher pod invokes mpirun and communicates with worker pods through MPI. - int32 num_launcher_replicas = 2; - - // number of slots per worker used in hostfile. - // The available slots (GPUs) in each pod. - int32 slots = 3; -} \ No newline at end of file diff --git a/docs/api/flyteidl/flyteidl/plugins/presto.proto b/docs/api/flyteidl/flyteidl/plugins/presto.proto deleted file mode 100644 index 5ff3a8a2e0..0000000000 --- a/docs/api/flyteidl/flyteidl/plugins/presto.proto +++ /dev/null @@ -1,14 +0,0 @@ -syntax = "proto3"; - -package flyteidl.plugins; - -option go_package = "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/plugins"; - -// This message works with the 'presto' task type in the SDK and is the object that will be in the 'custom' field -// of a Presto task's TaskTemplate -message PrestoQuery { - string routing_group = 1; - string catalog = 2; - string schema = 3; - string statement = 4; -} diff --git a/docs/api/flyteidl/flyteidl/plugins/pytorch.proto b/docs/api/flyteidl/flyteidl/plugins/pytorch.proto deleted file mode 100644 index 02e748ab8e..0000000000 --- a/docs/api/flyteidl/flyteidl/plugins/pytorch.proto +++ /dev/null @@ -1,25 +0,0 @@ -syntax = "proto3"; - -package flyteidl.plugins; - -option go_package = "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/plugins"; - -// Custom proto for torch elastic config for distributed training using -// https://github.com/kubeflow/training-operator/blob/master/pkg/apis/kubeflow.org/v1/pytorch_types.go -message ElasticConfig { - string rdzv_backend = 1; - int32 min_replicas = 2; - int32 max_replicas = 3; - int32 nproc_per_node = 4; - int32 max_restarts = 5; -} - -// Custom proto for plugin that enables distributed training using https://github.com/kubeflow/pytorch-operator -message DistributedPyTorchTrainingTask { - // number of worker replicas spawned in the cluster for this job - int32 workers = 1; - - // config for an elastic pytorch job - // - ElasticConfig elastic_config = 2; -} diff --git a/docs/api/flyteidl/flyteidl/plugins/qubole.proto b/docs/api/flyteidl/flyteidl/plugins/qubole.proto deleted file mode 100644 index b1faada9f3..0000000000 --- a/docs/api/flyteidl/flyteidl/plugins/qubole.proto +++ /dev/null @@ -1,26 +0,0 @@ -syntax = "proto3"; - -package flyteidl.plugins; - -option go_package = "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/plugins"; - -// Defines a query to execute on a hive cluster. -message HiveQuery { - string query = 1; - uint32 timeout_sec = 2; - uint32 retryCount = 3; -} - -// Defines a collection of hive queries. -message HiveQueryCollection { - repeated HiveQuery queries = 2; -} - -// This message works with the 'hive' task type in the SDK and is the object that will be in the 'custom' field -// of a hive task's TaskTemplate -message QuboleHiveJob { - string cluster_label = 1; - HiveQueryCollection query_collection = 2 [deprecated=true]; - repeated string tags = 3; - HiveQuery query = 4; -} diff --git a/docs/api/flyteidl/flyteidl/plugins/ray.proto b/docs/api/flyteidl/flyteidl/plugins/ray.proto deleted file mode 100644 index c20c6360e7..0000000000 --- a/docs/api/flyteidl/flyteidl/plugins/ray.proto +++ /dev/null @@ -1,53 +0,0 @@ -syntax = "proto3"; - -package flyteidl.plugins; - -option go_package = "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/plugins"; - -// RayJobSpec defines the desired state of RayJob -message RayJob { - // RayClusterSpec is the cluster template to run the job - RayCluster ray_cluster = 1; - // runtime_env is base64 encoded. - // Ray runtime environments: https://docs.ray.io/en/latest/ray-core/handling-dependencies.html#runtime-environments - string runtime_env = 2 [deprecated = true]; - // shutdown_after_job_finishes specifies whether the RayCluster should be deleted after the RayJob finishes. - bool shutdown_after_job_finishes = 3; - // ttl_seconds_after_finished specifies the number of seconds after which the RayCluster will be deleted after the RayJob finishes. - int32 ttl_seconds_after_finished = 4; - // RuntimeEnvYAML represents the runtime environment configuration - // provided as a multi-line YAML string. - string runtime_env_yaml = 5; -} - -// Define Ray cluster defines the desired state of RayCluster -message RayCluster { - // HeadGroupSpecs are the spec for the head pod - HeadGroupSpec head_group_spec = 1; - // WorkerGroupSpecs are the specs for the worker pods - repeated WorkerGroupSpec worker_group_spec = 2; - // Whether to enable autoscaling. - bool enable_autoscaling = 3; -} - -// HeadGroupSpec are the spec for the head pod -message HeadGroupSpec { - // Optional. RayStartParams are the params of the start command: address, object-store-memory. - // Refer to https://docs.ray.io/en/latest/ray-core/package-ref.html#ray-start - map ray_start_params = 1; -} - -// WorkerGroupSpec are the specs for the worker pods -message WorkerGroupSpec { - // Required. RayCluster can have multiple worker groups, and it distinguishes them by name - string group_name = 1; - // Required. Desired replicas of the worker group. Defaults to 1. - int32 replicas = 2; - // Optional. Min replicas of the worker group. MinReplicas defaults to 1. - int32 min_replicas = 3; - // Optional. Max replicas of the worker group. MaxReplicas defaults to maxInt32 - int32 max_replicas = 4; - // Optional. RayStartParams are the params of the start command: address, object-store-memory. - // Refer to https://docs.ray.io/en/latest/ray-core/package-ref.html#ray-start - map ray_start_params = 5; -} diff --git a/docs/api/flyteidl/flyteidl/plugins/spark.proto b/docs/api/flyteidl/flyteidl/plugins/spark.proto deleted file mode 100644 index 666ea311b2..0000000000 --- a/docs/api/flyteidl/flyteidl/plugins/spark.proto +++ /dev/null @@ -1,34 +0,0 @@ -syntax = "proto3"; - -package flyteidl.plugins; -import "google/protobuf/struct.proto"; - -option go_package = "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/plugins"; - -message SparkApplication { - enum Type { - PYTHON = 0; - JAVA = 1; - SCALA = 2; - R = 3; - } -} - -// Custom Proto for Spark Plugin. -message SparkJob { - SparkApplication.Type applicationType = 1; - string mainApplicationFile = 2; - string mainClass = 3; - map sparkConf = 4; - map hadoopConf = 5; - string executorPath = 6; // Executor path for Python jobs. - // Databricks job configuration. - // Config structure can be found here. https://docs.databricks.com/dev-tools/api/2.0/jobs.html#request-structure. - google.protobuf.Struct databricksConf = 7; - // Databricks access token. https://docs.databricks.com/dev-tools/api/latest/authentication.html - // This token can be set in either flytepropeller or flytekit. - string databricksToken = 8; - // Domain name of your deployment. Use the form .cloud.databricks.com. - // This instance name can be set in either flytepropeller or flytekit. - string databricksInstance = 9; -} diff --git a/docs/api/flyteidl/flyteidl/plugins/tensorflow.proto b/docs/api/flyteidl/flyteidl/plugins/tensorflow.proto deleted file mode 100644 index e494a6cc32..0000000000 --- a/docs/api/flyteidl/flyteidl/plugins/tensorflow.proto +++ /dev/null @@ -1,18 +0,0 @@ -syntax = "proto3"; - -package flyteidl.plugins; - -option go_package = "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/plugins"; - -// Custom proto for plugin that enables distributed training using https://github.com/kubeflow/tf-operator -message DistributedTensorflowTrainingTask { - // number of worker replicas spawned in the cluster for this job - int32 workers = 1; - // PS -> Parameter server - // number of ps replicas spawned in the cluster for this job - int32 ps_replicas = 2; - // number of chief replicas spawned in the cluster for this job - int32 chief_replicas = 3; - // number of evaluator replicas spawned in the cluster for this job - int32 evaluator_replicas = 4; -} diff --git a/docs/api/flyteidl/flyteidl/plugins/waitable.proto b/docs/api/flyteidl/flyteidl/plugins/waitable.proto deleted file mode 100644 index dd2138d535..0000000000 --- a/docs/api/flyteidl/flyteidl/plugins/waitable.proto +++ /dev/null @@ -1,15 +0,0 @@ -syntax = "proto3"; - -import "flyteidl/core/execution.proto"; -import "flyteidl/core/identifier.proto"; - -package flyteidl.plugins; - -option go_package = "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/plugins"; - -// Represents an Execution that was launched and could be waited on. -message Waitable { - core.WorkflowExecutionIdentifier wf_exec_id = 1; - core.WorkflowExecution.Phase phase = 2; - string workflow_id = 3; -} diff --git a/docs/api/flyteidl/flyteidl/service/admin.proto b/docs/api/flyteidl/flyteidl/service/admin.proto deleted file mode 100644 index d7d5adeeef..0000000000 --- a/docs/api/flyteidl/flyteidl/service/admin.proto +++ /dev/null @@ -1,668 +0,0 @@ -syntax = "proto3"; -package flyteidl.service; - -option go_package = "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/service"; - -import "google/api/annotations.proto"; -import "flyteidl/admin/project.proto"; -import "flyteidl/admin/project_domain_attributes.proto"; -import "flyteidl/admin/project_attributes.proto"; -import "flyteidl/admin/task.proto"; -import "flyteidl/admin/workflow.proto"; -import "flyteidl/admin/workflow_attributes.proto"; -import "flyteidl/admin/launch_plan.proto"; -import "flyteidl/admin/event.proto"; -import "flyteidl/admin/execution.proto"; -import "flyteidl/admin/matchable_resource.proto"; -import "flyteidl/admin/node_execution.proto"; -import "flyteidl/admin/task_execution.proto"; -import "flyteidl/admin/version.proto"; -import "flyteidl/admin/common.proto"; -import "flyteidl/admin/description_entity.proto"; -import "protoc-gen-openapiv2/options/annotations.proto"; - - -// The following defines an RPC service that is also served over HTTP via grpc-gateway. -// Standard response codes for both are defined here: https://github.com/grpc-ecosystem/grpc-gateway/blob/master/runtime/errors.go -service AdminService { - // Create and upload a :ref:`ref_flyteidl.admin.Task` definition - rpc CreateTask (flyteidl.admin.TaskCreateRequest) returns (flyteidl.admin.TaskCreateResponse) { - option (google.api.http) = { - post: "/api/v1/tasks" - body: "*" - }; - option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { - description: "Create and register a task definition." - responses: { - key: "400" - value: { - description: "Returned for bad request that may have failed validation." - } - } - responses: { - key: "409" - value: { - description: "Returned for a request that references an identical entity that has already been registered." - } - } - }; - } - - // Fetch a :ref:`ref_flyteidl.admin.Task` definition. - rpc GetTask (flyteidl.admin.ObjectGetRequest) returns (flyteidl.admin.Task) { - option (google.api.http) = { - get: "/api/v1/tasks/{id.project}/{id.domain}/{id.name}/{id.version}" - }; - option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { - description: "Retrieve an existing task definition." - }; - } - - // Fetch a list of :ref:`ref_flyteidl.admin.NamedEntityIdentifier` of task objects. - rpc ListTaskIds (flyteidl.admin.NamedEntityIdentifierListRequest) returns (flyteidl.admin.NamedEntityIdentifierList) { - option (google.api.http) = { - get: "/api/v1/task_ids/{project}/{domain}" - }; - option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { - description: "Fetch existing task definition identifiers matching input filters." - }; - } - - // Fetch a list of :ref:`ref_flyteidl.admin.Task` definitions. - rpc ListTasks (flyteidl.admin.ResourceListRequest) returns (flyteidl.admin.TaskList) { - option (google.api.http) = { - get: "/api/v1/tasks/{id.project}/{id.domain}/{id.name}" - additional_bindings { - get: "/api/v1/tasks/{id.project}/{id.domain}", - } - }; - option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { - description: "Fetch existing task definitions matching input filters." - }; - } - - // Create and upload a :ref:`ref_flyteidl.admin.Workflow` definition - rpc CreateWorkflow (flyteidl.admin.WorkflowCreateRequest) returns (flyteidl.admin.WorkflowCreateResponse) { - option (google.api.http) = { - post: "/api/v1/workflows" - body: "*" - }; - option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { - description: "Create and register a workflow definition." - responses: { - key: "400" - value: { - description: "Returned for bad request that may have failed validation." - } - } - responses: { - key: "409" - value: { - description: "Returned for a request that references an identical entity that has already been registered." - } - } - }; - } - - // Fetch a :ref:`ref_flyteidl.admin.Workflow` definition. - rpc GetWorkflow (flyteidl.admin.ObjectGetRequest) returns (flyteidl.admin.Workflow) { - option (google.api.http) = { - get: "/api/v1/workflows/{id.project}/{id.domain}/{id.name}/{id.version}" - }; - option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { - description: "Retrieve an existing workflow definition." - }; - } - - // Fetch a list of :ref:`ref_flyteidl.admin.NamedEntityIdentifier` of workflow objects. - rpc ListWorkflowIds (flyteidl.admin.NamedEntityIdentifierListRequest) returns (flyteidl.admin.NamedEntityIdentifierList) { - option (google.api.http) = { - get: "/api/v1/workflow_ids/{project}/{domain}" - }; - // option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { - // description: "Fetch an existing workflow definition identifiers matching input filters." - // }; - } - - // Fetch a list of :ref:`ref_flyteidl.admin.Workflow` definitions. - rpc ListWorkflows (flyteidl.admin.ResourceListRequest) returns (flyteidl.admin.WorkflowList) { - option (google.api.http) = { - get: "/api/v1/workflows/{id.project}/{id.domain}/{id.name}" - additional_bindings { - get: "/api/v1/workflows/{id.project}/{id.domain}", - } - }; - option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { - description: "Fetch existing workflow definitions matching input filters." - }; - } - - // Create and upload a :ref:`ref_flyteidl.admin.LaunchPlan` definition - rpc CreateLaunchPlan (flyteidl.admin.LaunchPlanCreateRequest) returns (flyteidl.admin.LaunchPlanCreateResponse) { - option (google.api.http) = { - post: "/api/v1/launch_plans" - body: "*" - }; - option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { - description: "Create and register a launch plan definition." - responses: { - key: "400" - value: { - description: "Returned for bad request that may have failed validation." - } - } - responses: { - key: "409" - value: { - description: "Returned for a request that references an identical entity that has already been registered." - } - } - }; - } - - // Fetch a :ref:`ref_flyteidl.admin.LaunchPlan` definition. - rpc GetLaunchPlan (flyteidl.admin.ObjectGetRequest) returns (flyteidl.admin.LaunchPlan) { - option (google.api.http) = { - get: "/api/v1/launch_plans/{id.project}/{id.domain}/{id.name}/{id.version}" - }; - option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { - description: "Retrieve an existing launch plan definition." - }; - } - - // Fetch the active version of a :ref:`ref_flyteidl.admin.LaunchPlan`. - rpc GetActiveLaunchPlan (flyteidl.admin.ActiveLaunchPlanRequest) returns (flyteidl.admin.LaunchPlan) { - option (google.api.http) = { - get: "/api/v1/active_launch_plans/{id.project}/{id.domain}/{id.name}" - }; - option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { - description: "Retrieve the active launch plan version specified by input request filters." - }; - } - - // List active versions of :ref:`ref_flyteidl.admin.LaunchPlan`. - rpc ListActiveLaunchPlans (flyteidl.admin.ActiveLaunchPlanListRequest) returns (flyteidl.admin.LaunchPlanList) { - option (google.api.http) = { - get: "/api/v1/active_launch_plans/{project}/{domain}" - }; - option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { - description: "Fetch the active launch plan versions specified by input request filters." - }; - } - - // Fetch a list of :ref:`ref_flyteidl.admin.NamedEntityIdentifier` of launch plan objects. - rpc ListLaunchPlanIds (flyteidl.admin.NamedEntityIdentifierListRequest) returns (flyteidl.admin.NamedEntityIdentifierList) { - option (google.api.http) = { - get: "/api/v1/launch_plan_ids/{project}/{domain}" - }; - option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { - description: "Fetch existing launch plan definition identifiers matching input filters." - }; - } - - // Fetch a list of :ref:`ref_flyteidl.admin.LaunchPlan` definitions. - rpc ListLaunchPlans (flyteidl.admin.ResourceListRequest) returns (flyteidl.admin.LaunchPlanList) { - option (google.api.http) = { - get: "/api/v1/launch_plans/{id.project}/{id.domain}/{id.name}" - additional_bindings { - get: "/api/v1/launch_plans/{id.project}/{id.domain}" - } - }; - option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { - description: "Fetch existing launch plan definitions matching input filters." - }; - } - - // Updates the status of a registered :ref:`ref_flyteidl.admin.LaunchPlan`. - rpc UpdateLaunchPlan (flyteidl.admin.LaunchPlanUpdateRequest) returns (flyteidl.admin.LaunchPlanUpdateResponse) { - option (google.api.http) = { - put: "/api/v1/launch_plans/{id.project}/{id.domain}/{id.name}/{id.version}" - body: "*" - }; - option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { - description: "Update the status of an existing launch plan definition. " - "At most one launch plan version for a given {project, domain, name} can be active at a time. " - "If this call sets a launch plan to active and existing version is already active, the result of this call will be that the " - "formerly active launch plan will be made inactive and specified launch plan in this request will be made active. " - "In the event that the formerly active launch plan had a schedule associated it with it, this schedule will be disabled. " - "If the reference launch plan in this request is being set to active and has a schedule associated with it, the schedule will be enabled." - }; - } - - // Triggers the creation of a :ref:`ref_flyteidl.admin.Execution` - rpc CreateExecution (flyteidl.admin.ExecutionCreateRequest) returns (flyteidl.admin.ExecutionCreateResponse) { - option (google.api.http) = { - post: "/api/v1/executions" - body: "*" - }; - option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { - description: "Create a workflow execution." - }; - } - - // Triggers the creation of an identical :ref:`ref_flyteidl.admin.Execution` - rpc RelaunchExecution (flyteidl.admin.ExecutionRelaunchRequest) returns (flyteidl.admin.ExecutionCreateResponse) { - option (google.api.http) = { - post: "/api/v1/executions/relaunch" - body: "*" - }; - option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { - description: "Relaunch a workflow execution." - }; - } - - // Recreates a previously-run workflow execution that will only start executing from the last known failure point. - // In Recover mode, users cannot change any input parameters or update the version of the execution. - // This is extremely useful to recover from system errors and byzantine faults like - Loss of K8s cluster, bugs in platform or instability, machine failures, - // downstream system failures (downstream services), or simply to recover executions that failed because of retry exhaustion and should complete if tried again. - // See :ref:`ref_flyteidl.admin.ExecutionRecoverRequest` for more details. - rpc RecoverExecution (flyteidl.admin.ExecutionRecoverRequest) returns (flyteidl.admin.ExecutionCreateResponse) { - option (google.api.http) = { - post: "/api/v1/executions/recover" - body: "*" - }; - option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { - description: "Recreates a previously-run workflow execution that will only start executing from the last known failure point. " - "In Recover mode, users cannot change any input parameters or update the version of the execution. " - "This is extremely useful to recover from system errors and byzantine faults like - Loss of K8s cluster, bugs in platform or instability, machine failures, " - "downstream system failures (downstream services), or simply to recover executions that failed because of retry exhaustion and should complete if tried again." - }; - } - - // Fetches a :ref:`ref_flyteidl.admin.Execution`. - rpc GetExecution (flyteidl.admin.WorkflowExecutionGetRequest) returns (flyteidl.admin.Execution) { - option (google.api.http) = { - get: "/api/v1/executions/{id.project}/{id.domain}/{id.name}" - }; - option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { - description: "Retrieve an existing workflow execution." - }; - } - - // Update execution belonging to project domain :ref:`ref_flyteidl.admin.Execution`. - rpc UpdateExecution (flyteidl.admin.ExecutionUpdateRequest) returns (flyteidl.admin.ExecutionUpdateResponse) { - option (google.api.http) = { - put: "/api/v1/executions/{id.project}/{id.domain}/{id.name}" - body: "*" - }; - // option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { - // description: "Update execution belonging to project domain." - // }; - } - - // Fetches input and output data for a :ref:`ref_flyteidl.admin.Execution`. - rpc GetExecutionData (flyteidl.admin.WorkflowExecutionGetDataRequest) returns (flyteidl.admin.WorkflowExecutionGetDataResponse) { - option (google.api.http) = { - get: "/api/v1/data/executions/{id.project}/{id.domain}/{id.name}" - }; - // option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { - // description: "Retrieve input and output data from an existing workflow execution." - // }; - }; - - // Fetch a list of :ref:`ref_flyteidl.admin.Execution`. - rpc ListExecutions (flyteidl.admin.ResourceListRequest) returns (flyteidl.admin.ExecutionList) { - option (google.api.http) = { - get: "/api/v1/executions/{id.project}/{id.domain}" - }; - // option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { - // description: "Fetch existing workflow executions matching input filters." - // }; - } - - // Terminates an in-progress :ref:`ref_flyteidl.admin.Execution`. - rpc TerminateExecution (flyteidl.admin.ExecutionTerminateRequest) returns (flyteidl.admin.ExecutionTerminateResponse) { - option (google.api.http) = { - delete: "/api/v1/executions/{id.project}/{id.domain}/{id.name}" - body: "*" - }; - // option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { - // description: "Terminate the active workflow execution specified in the request." - // }; - } - - // Fetches a :ref:`ref_flyteidl.admin.NodeExecution`. - rpc GetNodeExecution (flyteidl.admin.NodeExecutionGetRequest) returns (flyteidl.admin.NodeExecution) { - option (google.api.http) = { - get: "/api/v1/node_executions/{id.execution_id.project}/{id.execution_id.domain}/{id.execution_id.name}/{id.node_id}" - }; - // option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { - // description: "Retrieve an existing node execution." - // }; - } - - // Fetches a :ref:`ref_flyteidl.admin.DynamicNodeWorkflowResponse`. - rpc GetDynamicNodeWorkflow (flyteidl.admin.GetDynamicNodeWorkflowRequest) returns (flyteidl.admin.DynamicNodeWorkflowResponse) { - option (google.api.http) = { - get: "/api/v1/node_executions/{id.execution_id.project}/{id.execution_id.domain}/{id.execution_id.name}/{id.node_id}/dynamic_workflow" - }; - // option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { - // description: "Retrieve a workflow closure from a dynamic node execution." - // }; - } - - // Fetch a list of :ref:`ref_flyteidl.admin.NodeExecution`. - rpc ListNodeExecutions (flyteidl.admin.NodeExecutionListRequest) returns (flyteidl.admin.NodeExecutionList) { - option (google.api.http) = { - get: "/api/v1/node_executions/{workflow_execution_id.project}/{workflow_execution_id.domain}/{workflow_execution_id.name}" - }; - // option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { - // description: "Fetch existing node executions matching input filters." - // }; - } - - // Fetch a list of :ref:`ref_flyteidl.admin.NodeExecution` launched by the reference :ref:`ref_flyteidl.admin.TaskExecution`. - rpc ListNodeExecutionsForTask (flyteidl.admin.NodeExecutionForTaskListRequest) returns (flyteidl.admin.NodeExecutionList) { - option (google.api.http) = { - get: "/api/v1/children/task_executions/{task_execution_id.node_execution_id.execution_id.project}/{task_execution_id.node_execution_id.execution_id.domain}/{task_execution_id.node_execution_id.execution_id.name}/{task_execution_id.node_execution_id.node_id}/{task_execution_id.task_id.project}/{task_execution_id.task_id.domain}/{task_execution_id.task_id.name}/{task_execution_id.task_id.version}/{task_execution_id.retry_attempt}" - }; - // option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { - // description: "Fetch child node executions launched by the specified task execution." - // }; - } - - // Fetches input and output data for a :ref:`ref_flyteidl.admin.NodeExecution`. - rpc GetNodeExecutionData (flyteidl.admin.NodeExecutionGetDataRequest) returns (flyteidl.admin.NodeExecutionGetDataResponse) { - option (google.api.http) = { - get: "/api/v1/data/node_executions/{id.execution_id.project}/{id.execution_id.domain}/{id.execution_id.name}/{id.node_id}" - }; - // option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { - // description: "Retrieve input and output data from an existing node execution." - // }; - }; - - // Registers a :ref:`ref_flyteidl.admin.Project` with the Flyte deployment. - rpc RegisterProject (flyteidl.admin.ProjectRegisterRequest) returns (flyteidl.admin.ProjectRegisterResponse) { - option (google.api.http) = { - post: "/api/v1/projects" - body: "*" - }; - // option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { - // description: "Register a project." - // }; - } - - // Updates an existing :ref:`ref_flyteidl.admin.Project` - // flyteidl.admin.Project should be passed but the domains property should be empty; - // it will be ignored in the handler as domains cannot be updated via this API. - rpc UpdateProject (flyteidl.admin.Project) returns (flyteidl.admin.ProjectUpdateResponse) { - option (google.api.http) = { - put: "/api/v1/projects/{id}" - body: "*" - }; - option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { - description: "Update a project." - }; - } - - // Fetches a :ref:`ref_flyteidl.admin.Project` - rpc GetProject (flyteidl.admin.ProjectGetRequest) returns (flyteidl.admin.Project) { - option (google.api.http) = { - get: "/api/v1/projects/{id}" - }; - option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { - description: "Fetch a registered project." - }; - } - - // Fetches a list of :ref:`ref_flyteidl.admin.Project` - rpc ListProjects (flyteidl.admin.ProjectListRequest) returns (flyteidl.admin.Projects) { - option (google.api.http) = { - get: "/api/v1/projects" - }; - option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { - description: "Fetch registered projects." - }; - } - - rpc GetDomains (flyteidl.admin.GetDomainRequest) returns (flyteidl.admin.GetDomainsResponse) { - option (google.api.http) = { - get: "/api/v1/domains" - }; - // option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { - // description: "Fetch registered domains." - // }; - } - - // Indicates a :ref:`ref_flyteidl.event.WorkflowExecutionEvent` has occurred. - rpc CreateWorkflowEvent (flyteidl.admin.WorkflowExecutionEventRequest) returns (flyteidl.admin.WorkflowExecutionEventResponse) { - option (google.api.http) = { - post: "/api/v1/events/workflows" - body: "*" - }; - option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { - description: "Create a workflow execution event recording a phase transition." - }; - } - - // Indicates a :ref:`ref_flyteidl.event.NodeExecutionEvent` has occurred. - rpc CreateNodeEvent (flyteidl.admin.NodeExecutionEventRequest) returns (flyteidl.admin.NodeExecutionEventResponse) { - option (google.api.http) = { - post: "/api/v1/events/nodes" - body: "*" - }; - option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { - description: "Create a node execution event recording a phase transition." - }; - } - - // Indicates a :ref:`ref_flyteidl.event.TaskExecutionEvent` has occurred. - rpc CreateTaskEvent (flyteidl.admin.TaskExecutionEventRequest) returns (flyteidl.admin.TaskExecutionEventResponse) { - option (google.api.http) = { - post: "/api/v1/events/tasks" - body: "*" - }; - option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { - description: "Create a task execution event recording a phase transition." - }; - } - - // Fetches a :ref:`ref_flyteidl.admin.TaskExecution`. - rpc GetTaskExecution (flyteidl.admin.TaskExecutionGetRequest) returns (flyteidl.admin.TaskExecution) { - option (google.api.http) = { - get: "/api/v1/task_executions/{id.node_execution_id.execution_id.project}/{id.node_execution_id.execution_id.domain}/{id.node_execution_id.execution_id.name}/{id.node_execution_id.node_id}/{id.task_id.project}/{id.task_id.domain}/{id.task_id.name}/{id.task_id.version}/{id.retry_attempt}" - }; - option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { - description: "Retrieve an existing task execution." - }; - } - - // Fetches a list of :ref:`ref_flyteidl.admin.TaskExecution`. - rpc ListTaskExecutions (flyteidl.admin.TaskExecutionListRequest) returns (flyteidl.admin.TaskExecutionList) { - option (google.api.http) = { - get: "/api/v1/task_executions/{node_execution_id.execution_id.project}/{node_execution_id.execution_id.domain}/{node_execution_id.execution_id.name}/{node_execution_id.node_id}" - }; - option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { - description: "Fetch existing task executions matching input filters." - }; - - } - - // Fetches input and output data for a :ref:`ref_flyteidl.admin.TaskExecution`. - rpc GetTaskExecutionData (flyteidl.admin.TaskExecutionGetDataRequest) returns (flyteidl.admin.TaskExecutionGetDataResponse) { - option (google.api.http) = { - get: "/api/v1/data/task_executions/{id.node_execution_id.execution_id.project}/{id.node_execution_id.execution_id.domain}/{id.node_execution_id.execution_id.name}/{id.node_execution_id.node_id}/{id.task_id.project}/{id.task_id.domain}/{id.task_id.name}/{id.task_id.version}/{id.retry_attempt}" - }; - option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { - description: "Retrieve input and output data from an existing task execution." - }; - } - - // Creates or updates custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project and domain. - rpc UpdateProjectDomainAttributes (flyteidl.admin.ProjectDomainAttributesUpdateRequest) returns (flyteidl.admin.ProjectDomainAttributesUpdateResponse) { - option (google.api.http) = { - put: "/api/v1/project_domain_attributes/{attributes.project}/{attributes.domain}" - body: "*" - }; - option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { - description: "Update the customized resource attributes associated with a project-domain combination" - }; - } - - // Fetches custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project and domain. - rpc GetProjectDomainAttributes (flyteidl.admin.ProjectDomainAttributesGetRequest) returns (flyteidl.admin.ProjectDomainAttributesGetResponse) { - option (google.api.http) = { - get: "/api/v1/project_domain_attributes/{project}/{domain}" - }; - option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { - description: "Retrieve the customized resource attributes associated with a project-domain combination" - }; - } - - // Deletes custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project and domain. - rpc DeleteProjectDomainAttributes (flyteidl.admin.ProjectDomainAttributesDeleteRequest) returns (flyteidl.admin.ProjectDomainAttributesDeleteResponse) { - option (google.api.http) = { - delete: "/api/v1/project_domain_attributes/{project}/{domain}" - body: "*" - }; - option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { - description: "Delete the customized resource attributes associated with a project-domain combination" - }; - } - - // Creates or updates custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` at the project level - rpc UpdateProjectAttributes (flyteidl.admin.ProjectAttributesUpdateRequest) returns (flyteidl.admin.ProjectAttributesUpdateResponse) { - option (google.api.http) = { - put: "/api/v1/project_attributes/{attributes.project}" - body: "*" - }; - option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { - description: "Update the customized resource attributes associated with a project" - }; - } - - // Fetches custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project and domain. - rpc GetProjectAttributes (flyteidl.admin.ProjectAttributesGetRequest) returns (flyteidl.admin.ProjectAttributesGetResponse) { - option (google.api.http) = { - get: "/api/v1/project_attributes/{project}" - }; - option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { - description: "Retrieve the customized resource attributes associated with a project" - }; - } - - // Deletes custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project and domain. - rpc DeleteProjectAttributes (flyteidl.admin.ProjectAttributesDeleteRequest) returns (flyteidl.admin.ProjectAttributesDeleteResponse) { - option (google.api.http) = { - delete: "/api/v1/project_attributes/{project}" - body: "*" - }; - option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { - description: "Delete the customized resource attributes associated with a project" - }; - } - // Creates or updates custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project, domain and workflow. - rpc UpdateWorkflowAttributes (flyteidl.admin.WorkflowAttributesUpdateRequest) returns (flyteidl.admin.WorkflowAttributesUpdateResponse) { - option (google.api.http) = { - put: "/api/v1/workflow_attributes/{attributes.project}/{attributes.domain}/{attributes.workflow}" - body: "*" - }; - option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { - description: "Update the customized resource attributes associated with a project, domain and workflow combination" - }; - } - - // Fetches custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project, domain and workflow. - rpc GetWorkflowAttributes (flyteidl.admin.WorkflowAttributesGetRequest) returns (flyteidl.admin.WorkflowAttributesGetResponse) { - option (google.api.http) = { - get: "/api/v1/workflow_attributes/{project}/{domain}/{workflow}" - }; - option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { - description: "Retrieve the customized resource attributes associated with a project, domain and workflow combination" - }; - } - - // Deletes custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project, domain and workflow. - rpc DeleteWorkflowAttributes (flyteidl.admin.WorkflowAttributesDeleteRequest) returns (flyteidl.admin.WorkflowAttributesDeleteResponse) { - option (google.api.http) = { - delete: "/api/v1/workflow_attributes/{project}/{domain}/{workflow}" - body: "*" - }; - option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { - description: "Delete the customized resource attributes associated with a project, domain and workflow combination" - }; - } - - // Lists custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a specific resource type. - rpc ListMatchableAttributes (flyteidl.admin.ListMatchableAttributesRequest) returns (flyteidl.admin.ListMatchableAttributesResponse) { - option (google.api.http) = { - get: "/api/v1/matchable_attributes" - }; - option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { - description: "Retrieve a list of MatchableAttributesConfiguration objects." - }; - } - - // Returns a list of :ref:`ref_flyteidl.admin.NamedEntity` objects. - rpc ListNamedEntities (flyteidl.admin.NamedEntityListRequest) returns (flyteidl.admin.NamedEntityList) { - option (google.api.http) = { - get: "/api/v1/named_entities/{resource_type}/{project}/{domain}" - }; - option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { - description: "Retrieve a list of NamedEntity objects sharing a common resource type, project, and domain." - }; - } - - // Returns a :ref:`ref_flyteidl.admin.NamedEntity` object. - rpc GetNamedEntity (flyteidl.admin.NamedEntityGetRequest) returns (flyteidl.admin.NamedEntity) { - option (google.api.http) = { - get: "/api/v1/named_entities/{resource_type}/{id.project}/{id.domain}/{id.name}" - }; - option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { - description: "Retrieve a NamedEntity object." - }; - } - - // Updates a :ref:`ref_flyteidl.admin.NamedEntity` object. - rpc UpdateNamedEntity (flyteidl.admin.NamedEntityUpdateRequest) returns (flyteidl.admin.NamedEntityUpdateResponse) { - option (google.api.http) = { - put: "/api/v1/named_entities/{resource_type}/{id.project}/{id.domain}/{id.name}" - body: "*" - }; - option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { - description: "Update the fields associated with a NamedEntity" - }; - } - - rpc GetVersion (flyteidl.admin.GetVersionRequest) returns (flyteidl.admin.GetVersionResponse) { - option (google.api.http) = { - get: "/api/v1/version" - }; - option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { - description: "Retrieve the Version (including the Build information) for FlyteAdmin service" - }; - } - - // Fetch a :ref:`ref_flyteidl.admin.DescriptionEntity` object. - rpc GetDescriptionEntity (flyteidl.admin.ObjectGetRequest) returns (flyteidl.admin.DescriptionEntity) { - option (google.api.http) = { - get: "/api/v1/description_entities/{id.resource_type}/{id.project}/{id.domain}/{id.name}/{id.version}" - }; - option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { - description: "Retrieve an existing description entity description." - }; - } - - // Fetch a list of :ref:`ref_flyteidl.admin.DescriptionEntity` definitions. - rpc ListDescriptionEntities (flyteidl.admin.DescriptionEntityListRequest) returns (flyteidl.admin.DescriptionEntityList) { - option (google.api.http) = { - get: "/api/v1/description_entities/{resource_type}/{id.project}/{id.domain}/{id.name}" - additional_bindings { - get: "/api/v1/description_entities/{resource_type}/{id.project}/{id.domain}" - } - }; - option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { - description: "Fetch existing description entity definitions matching input filters." - }; - } - - // Fetches runtime metrics for a :ref:`ref_flyteidl.admin.Execution`. - rpc GetExecutionMetrics (flyteidl.admin.WorkflowExecutionGetMetricsRequest) returns (flyteidl.admin.WorkflowExecutionGetMetricsResponse) { - option (google.api.http) = { - get: "/api/v1/metrics/executions/{id.project}/{id.domain}/{id.name}" - }; - option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { - description: "Retrieve metrics from an existing workflow execution." - }; - }; -} diff --git a/docs/api/flyteidl/flyteidl/service/agent.proto b/docs/api/flyteidl/flyteidl/service/agent.proto deleted file mode 100644 index cd6b93a972..0000000000 --- a/docs/api/flyteidl/flyteidl/service/agent.proto +++ /dev/null @@ -1,79 +0,0 @@ -syntax = "proto3"; -package flyteidl.service; - -option go_package = "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/service"; - -import "google/api/annotations.proto"; -import "flyteidl/admin/agent.proto"; - -// SyncAgentService defines an RPC Service that allows propeller to send the request to the agent server synchronously. -service SyncAgentService { - // ExecuteTaskSync streams the create request and inputs to the agent service and streams the outputs back. - rpc ExecuteTaskSync (stream flyteidl.admin.ExecuteTaskSyncRequest) returns (stream flyteidl.admin.ExecuteTaskSyncResponse){ - option (google.api.http) = { - post: "/api/v1/agent/task/stream" - body: "*" - }; - }; -} - -// AsyncAgentService defines an RPC Service that allows propeller to send the request to the agent server asynchronously. -service AsyncAgentService { - // CreateTask sends a task create request to the agent service. - rpc CreateTask (flyteidl.admin.CreateTaskRequest) returns (flyteidl.admin.CreateTaskResponse){ - option (google.api.http) = { - post: "/api/v1/agent/task" - body: "*" - }; - }; - - // Get job status. - rpc GetTask (flyteidl.admin.GetTaskRequest) returns (flyteidl.admin.GetTaskResponse){ - option (google.api.http) = { - get: "/api/v1/agent/task/{task_category.name}/{task_category.version}/{resource_meta}" - }; - }; - - // Delete the task resource. - rpc DeleteTask (flyteidl.admin.DeleteTaskRequest) returns (flyteidl.admin.DeleteTaskResponse){ - option (google.api.http) = { - delete: "/api/v1/agent/task_executions/{task_category.name}/{task_category.version}/{resource_meta}" - }; - }; - - // GetTaskMetrics returns one or more task execution metrics, if available. - // - // Errors include - // * OutOfRange if metrics are not available for the specified task time range - // * various other errors - rpc GetTaskMetrics(flyteidl.admin.GetTaskMetricsRequest) returns (flyteidl.admin.GetTaskMetricsResponse){ - option (google.api.http) = { - get: "/api/v1/agent/task/metrics/{task_category.name}/{task_category.version}/{resource_meta}" - }; - }; - - // GetTaskLogs returns task execution logs, if available. - rpc GetTaskLogs(flyteidl.admin.GetTaskLogsRequest) returns (stream flyteidl.admin.GetTaskLogsResponse){ - option (google.api.http) = { - get: "/api/v1/agent/task/logs/{task_category.name}/{task_category.version}/{resource_meta}" - }; - }; -} - -// AgentMetadataService defines an RPC service that is also served over HTTP via grpc-gateway. -// This service allows propeller or users to get the metadata of agents. -service AgentMetadataService { - // Fetch a :ref:`ref_flyteidl.admin.Agent` definition. - rpc GetAgent (flyteidl.admin.GetAgentRequest) returns (flyteidl.admin.GetAgentResponse){ - option (google.api.http) = { - get: "/api/v1/agent/{name}" - }; - }; - - // Fetch a list of :ref:`ref_flyteidl.admin.Agent` definitions. - rpc ListAgents (flyteidl.admin.ListAgentsRequest) returns (flyteidl.admin.ListAgentsResponse){ - option (google.api.http) = { - get: "/api/v1/agents" - }; - }; -} diff --git a/docs/api/flyteidl/flyteidl/service/auth.proto b/docs/api/flyteidl/flyteidl/service/auth.proto deleted file mode 100644 index a340f05add..0000000000 --- a/docs/api/flyteidl/flyteidl/service/auth.proto +++ /dev/null @@ -1,94 +0,0 @@ -syntax = "proto3"; -package flyteidl.service; - -option go_package = "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/service"; - -import "google/api/annotations.proto"; -import "protoc-gen-openapiv2/options/annotations.proto"; - -message OAuth2MetadataRequest {} - -// OAuth2MetadataResponse defines an RFC-Compliant response for /.well-known/oauth-authorization-server metadata -// as defined in https://tools.ietf.org/html/rfc8414 -message OAuth2MetadataResponse { - // Defines the issuer string in all JWT tokens this server issues. The issuer can be admin itself or an external - // issuer. - string issuer = 1; - - // URL of the authorization server's authorization endpoint [RFC6749]. This is REQUIRED unless no grant types are - // supported that use the authorization endpoint. - string authorization_endpoint = 2; - - // URL of the authorization server's token endpoint [RFC6749]. - string token_endpoint = 3; - - // Array containing a list of the OAuth 2.0 response_type values that this authorization server supports. - repeated string response_types_supported = 4; - - // JSON array containing a list of the OAuth 2.0 [RFC6749] scope values that this authorization server supports. - repeated string scopes_supported = 5; - - // JSON array containing a list of client authentication methods supported by this token endpoint. - repeated string token_endpoint_auth_methods_supported = 6; - - // URL of the authorization server's JWK Set [JWK] document. The referenced document contains the signing key(s) the - // client uses to validate signatures from the authorization server. - string jwks_uri = 7; - - // JSON array containing a list of Proof Key for Code Exchange (PKCE) [RFC7636] code challenge methods supported by - // this authorization server. - repeated string code_challenge_methods_supported = 8; - - // JSON array containing a list of the OAuth 2.0 grant type values that this authorization server supports. - repeated string grant_types_supported = 9; - - // URL of the authorization server's device authorization endpoint, as defined in Section 3.1 of [RFC8628] - string device_authorization_endpoint = 10; -} - -message PublicClientAuthConfigRequest {} - -// FlyteClientResponse encapsulates public information that flyte clients (CLIs... etc.) can use to authenticate users. -message PublicClientAuthConfigResponse { - // client_id to use when initiating OAuth2 authorization requests. - string client_id = 1; - // redirect uri to use when initiating OAuth2 authorization requests. - string redirect_uri = 2; - // scopes to request when initiating OAuth2 authorization requests. - repeated string scopes = 3; - // Authorization Header to use when passing Access Tokens to the server. If not provided, the client should use the - // default http `Authorization` header. - string authorization_metadata_key = 4; - // ServiceHttpEndpoint points to the http endpoint for the backend. If empty, clients can assume the endpoint used - // to configure the gRPC connection can be used for the http one respecting the insecure flag to choose between - // SSL or no SSL connections. - string service_http_endpoint = 5; - // audience to use when initiating OAuth2 authorization requests. - string audience = 6; -} - -// The following defines an RPC service that is also served over HTTP via grpc-gateway. -// Standard response codes for both are defined here: https://github.com/grpc-ecosystem/grpc-gateway/blob/master/runtime/errors.go -// RPCs defined in this service must be anonymously accessible. -service AuthMetadataService { - // Anonymously accessible. Retrieves local or external oauth authorization server metadata. - rpc GetOAuth2Metadata (OAuth2MetadataRequest) returns (OAuth2MetadataResponse) { - option (google.api.http) = { - get: "/.well-known/oauth-authorization-server" - }; - option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { - description: "Retrieves OAuth2 authorization server metadata. This endpoint is anonymously accessible." - }; - } - - // Anonymously accessible. Retrieves the client information clients should use when initiating OAuth2 authorization - // requests. - rpc GetPublicClientConfig (PublicClientAuthConfigRequest) returns (PublicClientAuthConfigResponse) { - option (google.api.http) = { - get: "/config/v1/flyte_client" - }; - option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { - description: "Retrieves public flyte client info. This endpoint is anonymously accessible." - }; - } -} diff --git a/docs/api/flyteidl/flyteidl/service/dataproxy.proto b/docs/api/flyteidl/flyteidl/service/dataproxy.proto deleted file mode 100644 index 86c7c4d977..0000000000 --- a/docs/api/flyteidl/flyteidl/service/dataproxy.proto +++ /dev/null @@ -1,205 +0,0 @@ -syntax = "proto3"; -package flyteidl.service; - -option go_package = "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/service"; - -import "google/api/annotations.proto"; -import "protoc-gen-openapiv2/options/annotations.proto"; -import "google/protobuf/duration.proto"; -import "google/protobuf/timestamp.proto"; -import "flyteidl/core/identifier.proto"; -import "flyteidl/core/literals.proto"; - - -message CreateUploadLocationResponse { - // SignedUrl specifies the url to use to upload content to (e.g. https://my-bucket.s3.amazonaws.com/randomstring/suffix.tar?X-...) - string signed_url = 1; - - // NativeUrl specifies the url in the format of the configured storage provider (e.g. s3://my-bucket/randomstring/suffix.tar) - string native_url = 2; - - // ExpiresAt defines when will the signed URL expires. - google.protobuf.Timestamp expires_at = 3; - - // Data proxy generates these headers for client, and they have to add these headers to the request when uploading the file. - map headers = 4; -} - -// CreateUploadLocationRequest specified request for the CreateUploadLocation API. -// The implementation in data proxy service will create the s3 location with some server side configured prefixes, -// and then: -// - project/domain/(a deterministic str representation of the content_md5)/filename (if present); OR -// - project/domain/filename_root (if present)/filename (if present). -message CreateUploadLocationRequest { - // Project to create the upload location for - // +required - string project = 1; - - // Domain to create the upload location for. - // +required - string domain = 2; - - // Filename specifies a desired suffix for the generated location. E.g. `file.py` or `pre/fix/file.zip`. - // +optional. By default, the service will generate a consistent name based on the provided parameters. - string filename = 3; - - // ExpiresIn defines a requested expiration duration for the generated url. The request will be rejected if this - // exceeds the platform allowed max. - // +optional. The default value comes from a global config. - google.protobuf.Duration expires_in = 4; - - // ContentMD5 restricts the upload location to the specific MD5 provided. The ContentMD5 will also appear in the - // generated path. - // +required - bytes content_md5 = 5; - - // If present, data proxy will use this string in lieu of the md5 hash in the path. When the filename is also included - // this makes the upload location deterministic. The native url will still be prefixed by the upload location prefix - // in data proxy config. This option is useful when uploading multiple files. - // +optional - string filename_root = 6; - - // If true, the data proxy will add content_md5 to the metadata to the signed URL and - // it will force clients to add this metadata to the object. - // This make sure dataproxy is backward compatible with the old flytekit. - bool add_content_md5_metadata = 7; - - - // Optional, org key applied to the resource. - string org = 8; -} - -// CreateDownloadLocationRequest specified request for the CreateDownloadLocation API. -message CreateDownloadLocationRequest { - option deprecated = true; - // NativeUrl specifies the url in the format of the configured storage provider (e.g. s3://my-bucket/randomstring/suffix.tar) - string native_url = 1; - - // ExpiresIn defines a requested expiration duration for the generated url. The request will be rejected if this - // exceeds the platform allowed max. - // +optional. The default value comes from a global config. - google.protobuf.Duration expires_in = 2; -} - -message CreateDownloadLocationResponse { - option deprecated = true; - // SignedUrl specifies the url to use to download content from (e.g. https://my-bucket.s3.amazonaws.com/randomstring/suffix.tar?X-...) - string signed_url = 1; - // ExpiresAt defines when will the signed URL expires. - google.protobuf.Timestamp expires_at = 2; -} - -// ArtifactType -enum ArtifactType { - // ARTIFACT_TYPE_UNDEFINED is the default, often invalid, value for the enum. - ARTIFACT_TYPE_UNDEFINED = 0; - - // ARTIFACT_TYPE_DECK refers to the deck html file optionally generated after a task, a workflow or a launch plan - // finishes executing. - ARTIFACT_TYPE_DECK = 1; -} - -// CreateDownloadLinkRequest defines the request parameters to create a download link (signed url) -message CreateDownloadLinkRequest { - // ArtifactType of the artifact requested. - ArtifactType artifact_type = 1; - - // ExpiresIn defines a requested expiration duration for the generated url. The request will be rejected if this - // exceeds the platform allowed max. - // +optional. The default value comes from a global config. - google.protobuf.Duration expires_in = 2; - - oneof source { - // NodeId is the unique identifier for the node execution. For a task node, this will retrieve the output of the - // most recent attempt of the task. - core.NodeExecutionIdentifier node_execution_id = 3; - } -} - -// CreateDownloadLinkResponse defines the response for the generated links -message CreateDownloadLinkResponse { - // SignedUrl specifies the url to use to download content from (e.g. https://my-bucket.s3.amazonaws.com/randomstring/suffix.tar?X-...) - repeated string signed_url = 1 [deprecated = true]; - - // ExpiresAt defines when will the signed URL expire. - google.protobuf.Timestamp expires_at = 2 [deprecated = true]; - - // New wrapper object containing the signed urls and expiration time - PreSignedURLs pre_signed_urls = 3; -} - -// Wrapper object since the message is shared across this and the GetDataResponse -message PreSignedURLs { - // SignedUrl specifies the url to use to download content from (e.g. https://my-bucket.s3.amazonaws.com/randomstring/suffix.tar?X-...) - repeated string signed_url = 1; - - // ExpiresAt defines when will the signed URL expire. - google.protobuf.Timestamp expires_at = 2; -} - -// General request artifact to retrieve data from a Flyte artifact url. -message GetDataRequest { - // A unique identifier in the form of flyte:// that uniquely, for a given Flyte - // backend, identifies a Flyte artifact ([i]nput, [o]output, flyte [d]eck, etc.). - // e.g. flyte://v1/proj/development/execid/n2/0/i (for 0th task execution attempt input) - // flyte://v1/proj/development/execid/n2/i (for node execution input) - // flyte://v1/proj/development/execid/n2/o/o3 (the o3 output of the second node) - string flyte_url = 1; -} - -message GetDataResponse { - oneof data { - // literal map data will be returned - core.LiteralMap literal_map = 1; - - // Flyte deck html will be returned as a signed url users can download - PreSignedURLs pre_signed_urls = 2; - - // Single literal will be returned. This is returned when the user/url requests a specific output or input - // by name. See the o3 example above. - core.Literal literal = 3; - } -} - -// DataProxyService defines an RPC Service that allows access to user-data in a controlled manner. -service DataProxyService { - // CreateUploadLocation creates a signed url to upload artifacts to for a given project/domain. - rpc CreateUploadLocation (CreateUploadLocationRequest) returns (CreateUploadLocationResponse) { - option (google.api.http) = { - post: "/api/v1/dataproxy/artifact_urn" - body: "*" - }; - option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { - description: "Creates a write-only http location that is accessible for tasks at runtime." - }; - } - - // CreateDownloadLocation creates a signed url to download artifacts. - rpc CreateDownloadLocation (CreateDownloadLocationRequest) returns (CreateDownloadLocationResponse) { - option deprecated = true; - option (google.api.http) = { - get: "/api/v1/dataproxy/artifact_urn" - }; - option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { - description: "Deprecated: Please use CreateDownloadLink instead. Creates a read-only http location that is accessible for tasks at runtime." - }; - } - - // CreateDownloadLocation creates a signed url to download artifacts. - rpc CreateDownloadLink (CreateDownloadLinkRequest) returns (CreateDownloadLinkResponse) { - option (google.api.http) = { - post: "/api/v1/dataproxy/artifact_link" - body: "*" - }; - option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { - description: "Creates a read-only http location that is accessible for tasks at runtime." - }; - } - - rpc GetData (GetDataRequest) returns (GetDataResponse) { - // Takes an address like flyte://v1/proj/development/execid/n2/0/i and return the actual data - option (google.api.http) = { - get: "/api/v1/data" - }; - } -} diff --git a/docs/api/flyteidl/flyteidl/service/external_plugin_service.proto b/docs/api/flyteidl/flyteidl/service/external_plugin_service.proto deleted file mode 100644 index a3035290e2..0000000000 --- a/docs/api/flyteidl/flyteidl/service/external_plugin_service.proto +++ /dev/null @@ -1,79 +0,0 @@ -syntax = "proto3"; -package flyteidl.service; - -option go_package = "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/service"; -import "flyteidl/core/literals.proto"; -import "flyteidl/core/tasks.proto"; - -// ExternalPluginService defines an RPC Service that allows propeller to send the request to the backend plugin server. -service ExternalPluginService { - // Send a task create request to the backend plugin server. - rpc CreateTask (TaskCreateRequest) returns (TaskCreateResponse){option deprecated = true;}; - // Get job status. - rpc GetTask (TaskGetRequest) returns (TaskGetResponse){option deprecated = true;}; - // Delete the task resource. - rpc DeleteTask (TaskDeleteRequest) returns (TaskDeleteResponse){option deprecated = true;}; -} - -// The state of the execution is used to control its visibility in the UI/CLI. -enum State { - option deprecated = true; - RETRYABLE_FAILURE = 0; - PERMANENT_FAILURE = 1; - PENDING = 2; - RUNNING = 3; - SUCCEEDED = 4; -} - -// Represents a request structure to create task. -message TaskCreateRequest { - option deprecated = true; - // The inputs required to start the execution. All required inputs must be - // included in this map. If not required and not provided, defaults apply. - // +optional - core.LiteralMap inputs = 1; - // Template of the task that encapsulates all the metadata of the task. - core.TaskTemplate template = 2; - // Prefix for where task output data will be written. (e.g. s3://my-bucket/randomstring) - string output_prefix = 3; -} - -// Represents a create response structure. -message TaskCreateResponse { - option deprecated = true; - string job_id = 1; -} - -// A message used to fetch a job state from backend plugin server. -message TaskGetRequest { - option deprecated = true; - // A predefined yet extensible Task type identifier. - string task_type = 1; - // The unique id identifying the job. - string job_id = 2; -} - -// Response to get an individual task state. -message TaskGetResponse { - option deprecated = true; - // The state of the execution is used to control its visibility in the UI/CLI. - State state = 1; - // The outputs of the execution. It's typically used by sql task. Flyteplugins service will create a - // Structured dataset pointing to the query result table. - // +optional - core.LiteralMap outputs = 2; -} - -// A message used to delete a task. -message TaskDeleteRequest { - option deprecated = true; - // A predefined yet extensible Task type identifier. - string task_type = 1; - // The unique id identifying the job. - string job_id = 2; -} - -// Response to delete a task. -message TaskDeleteResponse { - option deprecated = true; -} diff --git a/docs/api/flyteidl/flyteidl/service/identity.proto b/docs/api/flyteidl/flyteidl/service/identity.proto deleted file mode 100644 index 244bb9aaeb..0000000000 --- a/docs/api/flyteidl/flyteidl/service/identity.proto +++ /dev/null @@ -1,51 +0,0 @@ -syntax = "proto3"; -package flyteidl.service; - -option go_package = "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/service"; - -import "google/api/annotations.proto"; -import "google/protobuf/struct.proto"; -import "protoc-gen-openapiv2/options/annotations.proto"; - -message UserInfoRequest {} - -// See the OpenID Connect spec at https://openid.net/specs/openid-connect-core-1_0.html#UserInfoResponse for more information. -message UserInfoResponse { - // Locally unique and never reassigned identifier within the Issuer for the End-User, which is intended to be consumed - // by the Client. - string subject = 1; - - // Full name - string name = 2; - - // Shorthand name by which the End-User wishes to be referred to - string preferred_username = 3; - - // Given name(s) or first name(s) - string given_name = 4; - - // Surname(s) or last name(s) - string family_name = 5; - - // Preferred e-mail address - string email = 6; - - // Profile picture URL - string picture = 7; - - // Additional claims - google.protobuf.Struct additional_claims = 8; -} - -// IdentityService defines an RPC Service that interacts with user/app identities. -service IdentityService { - // Retrieves user information about the currently logged in user. - rpc UserInfo (UserInfoRequest) returns (UserInfoResponse) { - option (google.api.http) = { - get: "/me" - }; - option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { - description: "Retrieves authenticated identity info." - }; - } -} diff --git a/docs/api/flyteidl/flyteidl/service/signal.proto b/docs/api/flyteidl/flyteidl/service/signal.proto deleted file mode 100644 index b1b927979b..0000000000 --- a/docs/api/flyteidl/flyteidl/service/signal.proto +++ /dev/null @@ -1,55 +0,0 @@ -syntax = "proto3"; -package flyteidl.service; - -option go_package = "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/service"; - -import "google/api/annotations.proto"; -import "flyteidl/admin/signal.proto"; -import "protoc-gen-openapiv2/options/annotations.proto"; - -// SignalService defines an RPC Service that may create, update, and retrieve signal(s). -service SignalService { - // Fetches or creates a :ref:`ref_flyteidl.admin.Signal`. - rpc GetOrCreateSignal (flyteidl.admin.SignalGetOrCreateRequest) returns (flyteidl.admin.Signal) { - // Purposefully left out an HTTP API for this RPC call. This is meant to idempotently retrieve - // a signal, meaning the first call will create the signal and all subsequent calls will - // fetch the existing signal. This is only useful during Flyte Workflow execution and therefore - // is not exposed to mitigate unintended behavior. - option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { - description: "Retrieve a signal, creating it if it does not exist." - }; - } - - // Fetch a list of :ref:`ref_flyteidl.admin.Signal` definitions. - rpc ListSignals (flyteidl.admin.SignalListRequest) returns (flyteidl.admin.SignalList) { - option (google.api.http) = { - get: "/api/v1/signals/{workflow_execution_id.project}/{workflow_execution_id.domain}/{workflow_execution_id.name}" - }; - option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { - description: "Fetch existing signal definitions matching the input signal id filters." - }; - } - - // Sets the value on a :ref:`ref_flyteidl.admin.Signal` definition - rpc SetSignal (flyteidl.admin.SignalSetRequest) returns (flyteidl.admin.SignalSetResponse) { - option (google.api.http) = { - post: "/api/v1/signals" - body: "*" - }; - option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { - description: "Set a signal value." - responses: { - key: "400" - value: { - description: "Returned for bad request that may have failed validation." - } - } - responses: { - key: "409" - value: { - description: "Returned for a request that references an identical entity that has already been registered." - } - } - }; - } -} From 1d358c3727cc4a43d123ef577169202bd58a6d19 Mon Sep 17 00:00:00 2001 From: Niels Bantilan Date: Thu, 3 Oct 2024 14:57:11 -0400 Subject: [PATCH 3/5] update gitignore and unneeded conf Signed-off-by: Niels Bantilan --- .gitignore | 1 - docs/conf.py | 1 - flyteidl/protos/{ => docs}/contributing.md | 0 3 files changed, 2 deletions(-) rename flyteidl/protos/{ => docs}/contributing.md (100%) diff --git a/.gitignore b/.gitignore index ecb45d1558..6b280884f9 100644 --- a/.gitignore +++ b/.gitignore @@ -31,7 +31,6 @@ vendor/ **/bin/ docs/_tags/ docs/api/flytectl -docs/protos docs/api/flytekit docs/api/flyteidl docs/flytesnacks diff --git a/docs/conf.py b/docs/conf.py index a4cc689cc4..416b179adb 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -640,7 +640,6 @@ "source": "../flyteidl", "docs_path": "protos", "dest": "api/flyteidl", - # "cmd": ["cp", "../flyteidl/README.md", "api/flyteidl/docs/contributing.md"], "local": True, } ] diff --git a/flyteidl/protos/contributing.md b/flyteidl/protos/docs/contributing.md similarity index 100% rename from flyteidl/protos/contributing.md rename to flyteidl/protos/docs/contributing.md From 0198e917d5cc62eb641420526f73dfc58d81ffb8 Mon Sep 17 00:00:00 2001 From: Niels Bantilan Date: Thu, 3 Oct 2024 15:20:34 -0400 Subject: [PATCH 4/5] add mock DOCSEARCH_API_KEY to docs test ci Signed-off-by: Niels Bantilan --- .github/workflows/tests.yml | 3 +++ docs/conf.py | 4 ++-- 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 1d69466464..0571e60eea 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -69,6 +69,9 @@ jobs: - name: Build the documentation working-directory: ${{ github.workspace }}/flyte shell: bash -el {0} + env: + # this is a fake key for algolia docsearch to make docs build process pass + DOCSEARCH_API_KEY: fake_docsearch_api_key run: | conda activate monodocs-env make docs diff --git a/docs/conf.py b/docs/conf.py index 416b179adb..5581a70265 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -489,9 +489,9 @@ tags_overview_title = "Pages by tags" # Algolia Docsearch credentials -docsearch_app_id = os.getenv("DOCSEARCH_APP_ID") +docsearch_app_id = "WLG0MZB58Q" docsearch_api_key = os.getenv("DOCSEARCH_API_KEY") -docsearch_index_name = os.getenv("DOCSEARCH_INDEX_NAME") +docsearch_index_name = "flyte" # -- Options for intersphinx extension --------------------------------------- From efca412ed2e40dfb5dff2e1fb97b3ed16f543d55 Mon Sep 17 00:00:00 2001 From: Niels Bantilan Date: Thu, 3 Oct 2024 16:39:08 -0400 Subject: [PATCH 5/5] add css styling (#5803) * add css styling Signed-off-by: Niels Bantilan * update logo height Signed-off-by: Niels Bantilan --------- Signed-off-by: Niels Bantilan --- docs/_static/custom.css | 113 ++++++++++++++++++++++++++++++++++++ docs/_static/custom.js | 10 ++++ docs/_templates/layout.html | 6 +- docs/api/index.md | 1 + docs/conf.py | 27 +++++++-- docs/index.md | 77 ++++++++++++------------ docs/user_guide/index.md | 1 - 7 files changed, 189 insertions(+), 46 deletions(-) create mode 100644 docs/_static/custom.css create mode 100644 docs/_static/custom.js diff --git a/docs/_static/custom.css b/docs/_static/custom.css new file mode 100644 index 0000000000..faa3b25f33 --- /dev/null +++ b/docs/_static/custom.css @@ -0,0 +1,113 @@ +html[data-theme=light], html[data-theme=dark] { + --pst-color-primary: #7652A2; + --pst-color-link: #7652A2; + --pst-color-link-hover: #7652A2; +} + + +/* Styling for page layout */ +.bd-page-width { + max-width: 100%; +} + +/* style the main title logo */ +.navbar-brand:hover, .navbar-brand:visited:hover { + text-decoration: none; +} + +.navbar-brand img { + height: 70%; +} + +/* +Center the main page content only when the sidebar has the .hide-on-wide +class, which is only on the root page +*/ +.bd-sidebar.hide-on-wide + main.bd-main .bd-content { + justify-content: center; +} + +/* left sidebar nav width */ +.bd-sidebar-primary { + width: 15%; + min-width: 250px; + padding: 1rem; +} + +/* make the scroll bar sleeker */ +.bd-sidebar::-webkit-scrollbar { + width: 3px; + height: 3px; + background: transparent; +} + +.bd-sidebar::-webkit-scrollbar-thumb { + background: var(--pst-color-border); +} + +/* make sure page content fills up the page */ +.bd-main .bd-content { + justify-content: left; +} + +.bd-main .bd-content .bd-article-container { + padding: 2.5rem; + max-width: 75em; +} + +/* aligh navbar items with the sidebar */ +.bd-header .navbar-header-items__start { + width: 15%; + min-width: 250px; +} + +/* styles for the main page subtitle and badges */ +div.sd-card.sd-shadow-sm.subtitle-and-badges { + border: none; + box-shadow: none !important; +} + +div.subtitle-and-badges .sd-card-body { + padding: 0; +} + +div.subtitle-and-badges .sd-card-title { + color: #808080; + font-weight: 350 !important; + font-size: 25px; + padding-top: 10px; + padding-bottom: 10px; +} + +div.subtitle-and-badges a { + text-decoration: none; +} + +div.subtitle-and-badges a img { + margin: 2px 0; +} + +/* update table hover color to neutral color */ +.table tbody tr:hover { + background-color: var(--pst-color-table-inner-border); +} + +/* custom footer style */ +div.custom-footer { + text-align: center; + background-color: var(--pst-color-surface); + padding: 15px 0; + color: var(--pst-color-text-muted); +} + +/* anchor link hover style */ +a:hover, +nav.bd-links li>a:active, +nav.bd-links li>a:hover, +.bd-header ul.navbar-nav>li.nav-item>.nav-link:hover { + color: var(--pst-color-primary); +} + +.bd-header ul.navbar-nav>li.nav-item>.nav-link:hover:before { + border-bottom: max(3px, .1875rem, .12em) solid var(--pst-color-primary); +} diff --git a/docs/_static/custom.js b/docs/_static/custom.js new file mode 100644 index 0000000000..573b21e6fd --- /dev/null +++ b/docs/_static/custom.js @@ -0,0 +1,10 @@ +window.addEventListener("DOMContentLoaded", function() { + // Select all elements with class "external" + var externalLinks = document.querySelectorAll("a.external"); + + // Loop through each element with class "external" + externalLinks.forEach(function(link) { + // Set the target attribute to "_blank" + link.setAttribute("target", "_blank"); + }); +}); diff --git a/docs/_templates/layout.html b/docs/_templates/layout.html index a53f62a53f..fa6329208f 100644 --- a/docs/_templates/layout.html +++ b/docs/_templates/layout.html @@ -42,9 +42,11 @@ runllm-keyboard-shortcut="Mod+j" runllm-slack-community-url="https://flyte-org.slack.com/join/shared_invite/zt-2eq2fgs5f-UGUWnMYVB9agervilmlyaw#/shared-invite/email" runllm-name="RunLLM"> - + + {% endblock %} diff --git a/docs/api/index.md b/docs/api/index.md index 0b24d8f157..cde46b40a7 100644 --- a/docs/api/index.md +++ b/docs/api/index.md @@ -2,6 +2,7 @@ ::::{grid} 2 +:gutter: 2 :::{grid-item-card} {octicon}`rocket` Flytekit SDK :link: flytekit/docs_index diff --git a/docs/conf.py b/docs/conf.py index 5581a70265..6b0191c890 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -385,14 +385,31 @@ html_theme_options = { # custom flyteorg pydata theme options - "github_url": "https://github.com/flyteorg/flyte", + # "github_url": "https://github.com/flyteorg/flyte", "logo": { - "text": "Flyte Docs", + "text": "Flyte", }, + "external_links": [ + {"name": "Flyte", "url": "https://flyte.org"}, + ], "icon_links": [ { "name": "GitHub", + "icon": "fa-brands fa-github", + "type": "fontawesome", "url": "https://github.com/flyteorg/flyte", + }, + { + "name": "Slack", + "url": "https://slack.flyte.org", + "icon": "fa-brands fa-slack", + "type": "fontawesome", + }, + { + "name": "Flyte", + "url": "https://flyte.org", + "icon": "fa-solid fa-dragon", + "type": "fontawesome", } ], "use_edit_page_button": True, @@ -403,9 +420,9 @@ # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". -# html_static_path = [] -# html_css_files = [] -# html_js_files = [] +html_static_path = ["_static"] +html_css_files = ["custom.css"] +html_js_files = ["custom.js"] # Custom sidebar templates, must be a dictionary that maps document names # to template names. diff --git a/docs/index.md b/docs/index.md index 61bb687131..d5fcfbbb8b 100644 --- a/docs/index.md +++ b/docs/index.md @@ -1,54 +1,55 @@ # Welcome to Flyte! -```{eval-rst} -.. raw:: html +````{card} The highly scalable and flexible workflow orchestrator that unifies data, ML and analytics. +:class-card: subtitle-and-badges -

- The highly scalable and flexible workflow orchestrator that unifies data, ML and analytics. -

- -.. image:: https://img.shields.io/badge/Graduate%20Project-Linux%20Foundation-purple?style=for-the-badge - :target: https://lfaidata.foundation/projects/flyte/ - :alt: Linux Foundation - -.. image:: https://img.shields.io/github/stars/flyteorg/flyte?label=github&logo=github&style=for-the-badge - :target: https://github.com/flyteorg/flyte - :alt: GitHub Repo stars +```{image} https://img.shields.io/badge/Graduate%20Project-Linux%20Foundation-purple?style=for-the-badge +:target: https://lfaidata.foundation/projects/flyte/ +:alt: Linux Foundation +``` -.. image:: https://img.shields.io/github/release/flyteorg/flyte.svg?style=for-the-badge&color=blue - :target: https://github.com/flyteorg/flyte/releases/latest - :alt: Flyte Release +```{image} https://img.shields.io/github/stars/flyteorg/flyte?label=github&logo=github&style=for-the-badge +:target: https://github.com/flyteorg/flyte +:alt: GitHub Repo stars +``` -.. image:: https://img.shields.io/github/actions/workflow/status/flyteorg/flyte/tests.yml?label=tests&style=for-the-badge - :target: https://github.com/flyteorg/flyte/actions/workflows/tests.yml - :alt: GitHub Test Status +```{image} https://img.shields.io/github/release/flyteorg/flyte.svg?style=for-the-badge&color=blue +:target: https://github.com/flyteorg/flyte/releases/latest +:alt: Flyte Release +``` -.. image:: https://img.shields.io/github/actions/workflow/status/flyteorg/flyte/sandbox.yml?label=Sandbox%20docker%20image&style=for-the-badge - :target: https://github.com/flyteorg/flyte/actions/workflows/sandbox.yml - :alt: GitHub Sandbox Status +```{image} https://img.shields.io/github/actions/workflow/status/flyteorg/flyte/tests.yml?label=tests&style=for-the-badge +:target: https://github.com/flyteorg/flyte/actions/workflows/tests.yml +:alt: GitHub Test Status +``` -.. image:: https://img.shields.io/github/milestones/closed/flyteorg/flyte?style=for-the-badge - :target: https://github.com/flyteorg/flyte/milestones?state=closed - :alt: Completed Milestones +```{image} https://img.shields.io/github/actions/workflow/status/flyteorg/flyte/sandbox.yml?label=Sandbox%20docker%20image&style=for-the-badge +:target: https://github.com/flyteorg/flyte/actions/workflows/sandbox.yml +:alt: GitHub Sandbox Status +``` -.. image:: https://img.shields.io/pypi/dm/flytekit?color=blue&label=flytekit%20downloads&style=for-the-badge&logo=pypi&logoColor=white - :target: https://github.com/flyteorg/flytekit - :alt: Flytekit Downloads +```{image} https://img.shields.io/github/milestones/closed/flyteorg/flyte?style=for-the-badge +:target: https://github.com/flyteorg/flyte/milestones?state=closed +:alt: Completed Milestones +``` -.. image:: https://img.shields.io/badge/Slack-Chat-pink?style=for-the-badge&logo=slack - :target: https://slack.flyte.org - :alt: Flyte Slack +```{image} https://img.shields.io/pypi/dm/flytekit?color=blue&label=flytekit%20downloads&style=for-the-badge&logo=pypi&logoColor=white +:target: https://github.com/flyteorg/flytekit +:alt: Flytekit Downloads +``` -.. image:: https://img.shields.io/badge/LICENSE-Apache2.0-ff69b4.svg?style=for-the-badge - :target: http://www.apache.org/licenses/LICENSE-2.0.html - :alt: License +```{image} https://img.shields.io/badge/Slack-Chat-pink?style=for-the-badge&logo=slack +:target: https://slack.flyte.org +:alt: Flyte Slack +``` -.. |br| raw:: html +```{image} https://img.shields.io/badge/LICENSE-Apache2.0-ff69b4.svg?style=for-the-badge +:target: http://www.apache.org/licenses/LICENSE-2.0.html +:alt: License +``` -
-
+```` -``` [Flyte](https://github.com/flyteorg/flyte) is an open-source, Kubernetes-native workflow orchestrator implemented in [Go](https://go.dev/). It enables highly diff --git a/docs/user_guide/index.md b/docs/user_guide/index.md index 44c975f6ae..ae439c8760 100644 --- a/docs/user_guide/index.md +++ b/docs/user_guide/index.md @@ -94,7 +94,6 @@ concepts/component_architecture/index :hidden: flytekit-java -unionml pterodactyl latch sdk ```