From 94070e92d16a728101bf5566917a7eadd883bdcd Mon Sep 17 00:00:00 2001 From: Haytham Abuelfutuh Date: Thu, 31 Mar 2022 16:46:28 -0700 Subject: [PATCH] Rename DataProxy to DataProxyService for consistency (#273) * Rename DataProxy to DataProxyService for consistency Signed-off-by: Haytham Abuelfutuh * Update client go Signed-off-by: Haytham Abuelfutuh * bump Signed-off-by: Haytham Abuelfutuh --- .../flyte/golang_support_tools/go.mod | 10 +- .../flyte/golang_support_tools/go.sum | 19 +- flyteidl/clients/go/admin/client.go | 8 +- .../clients/go/admin/mocks/DataProxyServer.go | 56 - ...oxyClient.go => DataProxyServiceClient.go} | 20 +- .../go/admin/mocks/DataProxyServiceServer.go | 56 + .../flyteidl/service/dataproxy.grpc.pb.cc | 38 +- .../flyteidl/service/dataproxy.grpc.pb.h | 12 +- .../pb-cpp/flyteidl/service/dataproxy.pb.cc | 20 +- .../pb-go/flyteidl/service/dataproxy.pb.go | 108 +- .../pb-go/flyteidl/service/dataproxy.pb.gw.go | 34 +- .../flyteidl/service/dataproxy.swagger.json | 4 +- .../pb-java/flyteidl/service/Dataproxy.java | 18 +- flyteidl/gen/pb-js/flyteidl.d.ts | 16 +- flyteidl/gen/pb-js/flyteidl.js | 32 +- .../flyteidl/service/dataproxy_pb2.py | 16 +- .../flyteidl/service/dataproxy_pb2_grpc.py | 16 +- flyteidl/protos/docs/admin/admin.rst | 3742 ----------------- flyteidl/protos/docs/core/core.rst | 3656 ---------------- .../protos/docs/datacatalog/datacatalog.rst | 1237 ------ flyteidl/protos/docs/event/event.rst | 693 --- .../protos/flyteidl/service/dataproxy.proto | 6 +- 22 files changed, 249 insertions(+), 9568 deletions(-) delete mode 100644 flyteidl/clients/go/admin/mocks/DataProxyServer.go rename flyteidl/clients/go/admin/mocks/{DataProxyClient.go => DataProxyServiceClient.go} (52%) create mode 100644 flyteidl/clients/go/admin/mocks/DataProxyServiceServer.go delete mode 100644 flyteidl/protos/docs/admin/admin.rst delete mode 100644 flyteidl/protos/docs/core/core.rst delete mode 100644 flyteidl/protos/docs/datacatalog/datacatalog.rst delete mode 100644 flyteidl/protos/docs/event/event.rst diff --git a/flyteidl/boilerplate/flyte/golang_support_tools/go.mod b/flyteidl/boilerplate/flyte/golang_support_tools/go.mod index 048658863..441224926 100644 --- a/flyteidl/boilerplate/flyte/golang_support_tools/go.mod +++ b/flyteidl/boilerplate/flyte/golang_support_tools/go.mod @@ -163,15 +163,15 @@ require ( github.com/ultraware/whitespace v0.0.4 // indirect github.com/uudashr/gocognit v1.0.1 // indirect go.opencensus.io v0.22.6 // indirect - golang.org/x/crypto v0.0.0-20201221181555-eec23a3978ad // indirect + golang.org/x/crypto v0.0.0-20210921155107-089bfa567519 // indirect golang.org/x/lint v0.0.0-20201208152925-83fdc39ff7b5 // indirect - golang.org/x/mod v0.4.1 // indirect - golang.org/x/net v0.0.0-20210805182204-aaa1db679c0d // indirect + golang.org/x/mod v0.6.0-dev.0.20220106191415-9b9b3d81d5e3 // indirect + golang.org/x/net v0.0.0-20211015210444-4f30a5c0130f // indirect golang.org/x/oauth2 v0.0.0-20210126194326-f9ce19ea3013 // indirect - golang.org/x/sys v0.0.0-20210423082822-04245dca01da // indirect + golang.org/x/sys v0.0.0-20211019181941-9d821ace8654 // indirect golang.org/x/text v0.3.7 // indirect golang.org/x/time v0.0.0-20201208040808-7e3f01d25324 // indirect - golang.org/x/tools v0.1.0 // indirect + golang.org/x/tools v0.1.10 // indirect golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 // indirect google.golang.org/api v0.38.0 // indirect google.golang.org/appengine v1.6.7 // indirect diff --git a/flyteidl/boilerplate/flyte/golang_support_tools/go.sum b/flyteidl/boilerplate/flyte/golang_support_tools/go.sum index 92515a8cf..755f8cf6e 100644 --- a/flyteidl/boilerplate/flyte/golang_support_tools/go.sum +++ b/flyteidl/boilerplate/flyte/golang_support_tools/go.sum @@ -806,6 +806,7 @@ github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9de github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.1.32/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yuin/goldmark v1.4.1/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= go.etcd.io/bbolt v1.3.2/go.mod h1:IbVyRI1SCnLcuJnV2u8VeU0CEYM7e686BmAb1XKL+uU= go.etcd.io/bbolt v1.3.3/go.mod h1:IbVyRI1SCnLcuJnV2u8VeU0CEYM7e686BmAb1XKL+uU= go.etcd.io/etcd v0.0.0-20191023171146-3cf2f69b5738/go.mod h1:dnLIgRNXwCJa5e+c6mIZCrds/GIG4ncV9HhK5PX7jPg= @@ -839,8 +840,9 @@ golang.org/x/crypto v0.0.0-20190820162420-60c769a6c586/go.mod h1:yigFU9vqHzYiE8U golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/crypto v0.0.0-20201002170205-7f63de1d35b0/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= -golang.org/x/crypto v0.0.0-20201221181555-eec23a3978ad h1:DN0cp81fZ3njFcrLCytUHRSUkqBjfTo4Tx9RJTWs0EY= golang.org/x/crypto v0.0.0-20201221181555-eec23a3978ad/go.mod h1:jdWPYTVW3xRLrWPugEBEK3UY2ZEsg3UU495nc5E+M+I= +golang.org/x/crypto v0.0.0-20210921155107-089bfa567519 h1:7I4JAnoQBe7ZtJcBaYHi5UtiO8tQHbUSXxL+pnGRANg= +golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8= @@ -874,8 +876,9 @@ golang.org/x/mod v0.1.1-0.20191107180719-034126e5016b/go.mod h1:QqPTAvyqsEbceGzB golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.4.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= -golang.org/x/mod v0.4.1 h1:Kvvh58BN8Y9/lBi7hTekvtMpm07eUZ0ck5pRHpsMWrY= golang.org/x/mod v0.4.1/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.6.0-dev.0.20220106191415-9b9b3d81d5e3 h1:kQgndtyPBW/JIYERgdxfwMYh3AVStj88WQTlNDi2a+o= +golang.org/x/mod v0.6.0-dev.0.20220106191415-9b9b3d81d5e3/go.mod h1:3p9vT2HGsQu2K1YbXdKPJLVgG5VJdoTa1poYQBtP1AY= golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= @@ -921,9 +924,11 @@ golang.org/x/net v0.0.0-20201110031124-69a78807bb2b/go.mod h1:sp8m0HH+o8qH0wwXwY golang.org/x/net v0.0.0-20201202161906-c7110b5ffcbb/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= golang.org/x/net v0.0.0-20201209123823-ac852fbbde11/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= golang.org/x/net v0.0.0-20201224014010-6772e930b67b/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= +golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= golang.org/x/net v0.0.0-20210610132358-84b48f89b13b/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= -golang.org/x/net v0.0.0-20210805182204-aaa1db679c0d h1:20cMwl2fHAzkJMEA+8J4JgqBQcQGzbisXo31MIeenXI= golang.org/x/net v0.0.0-20210805182204-aaa1db679c0d/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= +golang.org/x/net v0.0.0-20211015210444-4f30a5c0130f h1:OfiFi4JbukWwe3lzw+xunroH1mnC1e2Gy5cxNJApiSY= +golang.org/x/net v0.0.0-20211015210444-4f30a5c0130f/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= @@ -945,6 +950,7 @@ golang.org/x/sync v0.0.0-20200317015054-43a5402ce75a/go.mod h1:RxMgew5VJxzue5/jJ golang.org/x/sync v0.0.0-20200625203802-6e8e738ad208/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sys v0.0.0-20180823144017-11551d06cbcc/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= @@ -1002,8 +1008,10 @@ golang.org/x/sys v0.0.0-20201201145000-ef89a241ccb3/go.mod h1:h1NjWce9XRLGQEsW7w golang.org/x/sys v0.0.0-20201214210602-f9fddec55a1e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210104204734-6f8348627aad/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210119212857-b64e53b001e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210423082822-04245dca01da h1:b3NXsE2LusjYGGjL5bxEVZZORm/YEFFrWFjR8eFrw/c= golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20211019181941-9d821ace8654 h1:id054HUawV2/6IGm2IV8KZQjqtwAOo2CYlOToYqa0d0= +golang.org/x/sys v0.0.0-20211019181941-9d821ace8654/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/term v0.0.0-20201117132131-f5c789dd3221/go.mod h1:Nr5EML6q2oocZ2LXRh80K7BxOlk5/8JxuGnuhpl+muw= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= @@ -1116,8 +1124,9 @@ golang.org/x/tools v0.0.0-20210104081019-d8d6ddbec6ee/go.mod h1:emZCQorbCU4vsT4f golang.org/x/tools v0.0.0-20210105154028-b0ab187a4818/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.0.0-20210106214847-113979e3529a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.0.0-20210108195828-e2f9c7f1fc8e/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= -golang.org/x/tools v0.1.0 h1:po9/4sTYwZU9lPhi1tOrb4hCv3qrhiQ77LZfGa2OjwY= golang.org/x/tools v0.1.0/go.mod h1:xkSsbof2nBLbhDlRMhhhyNLN/zl3eTqcnHD5viDpcZ0= +golang.org/x/tools v0.1.10 h1:QjFRCZxdOhBJ/UNgnBZLbNV13DlbnK0quyivTnXJM20= +golang.org/x/tools v0.1.10/go.mod h1:Uh6Zz+xoGYZom868N8YTex3t7RhtHDBrE8Gzo9bV56E= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= diff --git a/flyteidl/clients/go/admin/client.go b/flyteidl/clients/go/admin/client.go index d6f35af80..05cfd907f 100644 --- a/flyteidl/clients/go/admin/client.go +++ b/flyteidl/clients/go/admin/client.go @@ -29,7 +29,7 @@ type Clientset struct { authMetadataServiceClient service.AuthMetadataServiceClient healthServiceClient grpc_health_v1.HealthClient identityServiceClient service.IdentityServiceClient - dataProxyServiceClient service.DataProxyClient + dataProxyServiceClient service.DataProxyServiceClient authOpt grpc.DialOption } @@ -56,7 +56,7 @@ func (c Clientset) IdentityClient() service.IdentityServiceClient { return c.identityServiceClient } -func (c Clientset) DataProxyClient() service.DataProxyClient { +func (c Clientset) DataProxyClient() service.DataProxyServiceClient { return c.dataProxyServiceClient } @@ -202,7 +202,7 @@ func initializeClients(ctx context.Context, cfg *Config, tokenCache pkce.TokenCa cs.authMetadataServiceClient = service.NewAuthMetadataServiceClient(adminConnection) cs.identityServiceClient = service.NewIdentityServiceClient(adminConnection) cs.healthServiceClient = grpc_health_v1.NewHealthClient(adminConnection) - cs.dataProxyServiceClient = service.NewDataProxyClient(adminConnection) + cs.dataProxyServiceClient = service.NewDataProxyServiceClient(adminConnection) if authOpt != nil { cs.authOpt = authOpt } @@ -231,7 +231,7 @@ func InitializeMockClientset() *Clientset { adminServiceClient: &mocks.AdminServiceClient{}, authMetadataServiceClient: &mocks.AuthMetadataServiceClient{}, identityServiceClient: &mocks.IdentityServiceClient{}, - dataProxyServiceClient: &mocks.DataProxyClient{}, + dataProxyServiceClient: &mocks.DataProxyServiceClient{}, healthServiceClient: grpc_health_v1.NewHealthClient(nil), } } diff --git a/flyteidl/clients/go/admin/mocks/DataProxyServer.go b/flyteidl/clients/go/admin/mocks/DataProxyServer.go deleted file mode 100644 index 0219d690e..000000000 --- a/flyteidl/clients/go/admin/mocks/DataProxyServer.go +++ /dev/null @@ -1,56 +0,0 @@ -// Code generated by mockery v1.0.1. DO NOT EDIT. - -package mocks - -import ( - context "context" - - service "github.com/flyteorg/flyteidl/gen/pb-go/flyteidl/service" - mock "github.com/stretchr/testify/mock" -) - -// DataProxyServer is an autogenerated mock type for the DataProxyServer type -type DataProxyServer struct { - mock.Mock -} - -type DataProxyServer_CreateUploadLocation struct { - *mock.Call -} - -func (_m DataProxyServer_CreateUploadLocation) Return(_a0 *service.CreateUploadLocationResponse, _a1 error) *DataProxyServer_CreateUploadLocation { - return &DataProxyServer_CreateUploadLocation{Call: _m.Call.Return(_a0, _a1)} -} - -func (_m *DataProxyServer) OnCreateUploadLocation(_a0 context.Context, _a1 *service.CreateUploadLocationRequest) *DataProxyServer_CreateUploadLocation { - c := _m.On("CreateUploadLocation", _a0, _a1) - return &DataProxyServer_CreateUploadLocation{Call: c} -} - -func (_m *DataProxyServer) OnCreateUploadLocationMatch(matchers ...interface{}) *DataProxyServer_CreateUploadLocation { - c := _m.On("CreateUploadLocation", matchers...) - return &DataProxyServer_CreateUploadLocation{Call: c} -} - -// CreateUploadLocation provides a mock function with given fields: _a0, _a1 -func (_m *DataProxyServer) CreateUploadLocation(_a0 context.Context, _a1 *service.CreateUploadLocationRequest) (*service.CreateUploadLocationResponse, error) { - ret := _m.Called(_a0, _a1) - - var r0 *service.CreateUploadLocationResponse - if rf, ok := ret.Get(0).(func(context.Context, *service.CreateUploadLocationRequest) *service.CreateUploadLocationResponse); ok { - r0 = rf(_a0, _a1) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).(*service.CreateUploadLocationResponse) - } - } - - var r1 error - if rf, ok := ret.Get(1).(func(context.Context, *service.CreateUploadLocationRequest) error); ok { - r1 = rf(_a0, _a1) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} diff --git a/flyteidl/clients/go/admin/mocks/DataProxyClient.go b/flyteidl/clients/go/admin/mocks/DataProxyServiceClient.go similarity index 52% rename from flyteidl/clients/go/admin/mocks/DataProxyClient.go rename to flyteidl/clients/go/admin/mocks/DataProxyServiceClient.go index 71573872a..5897a4bee 100644 --- a/flyteidl/clients/go/admin/mocks/DataProxyClient.go +++ b/flyteidl/clients/go/admin/mocks/DataProxyServiceClient.go @@ -12,31 +12,31 @@ import ( service "github.com/flyteorg/flyteidl/gen/pb-go/flyteidl/service" ) -// DataProxyClient is an autogenerated mock type for the DataProxyClient type -type DataProxyClient struct { +// DataProxyServiceClient is an autogenerated mock type for the DataProxyServiceClient type +type DataProxyServiceClient struct { mock.Mock } -type DataProxyClient_CreateUploadLocation struct { +type DataProxyServiceClient_CreateUploadLocation struct { *mock.Call } -func (_m DataProxyClient_CreateUploadLocation) Return(_a0 *service.CreateUploadLocationResponse, _a1 error) *DataProxyClient_CreateUploadLocation { - return &DataProxyClient_CreateUploadLocation{Call: _m.Call.Return(_a0, _a1)} +func (_m DataProxyServiceClient_CreateUploadLocation) Return(_a0 *service.CreateUploadLocationResponse, _a1 error) *DataProxyServiceClient_CreateUploadLocation { + return &DataProxyServiceClient_CreateUploadLocation{Call: _m.Call.Return(_a0, _a1)} } -func (_m *DataProxyClient) OnCreateUploadLocation(ctx context.Context, in *service.CreateUploadLocationRequest, opts ...grpc.CallOption) *DataProxyClient_CreateUploadLocation { +func (_m *DataProxyServiceClient) OnCreateUploadLocation(ctx context.Context, in *service.CreateUploadLocationRequest, opts ...grpc.CallOption) *DataProxyServiceClient_CreateUploadLocation { c := _m.On("CreateUploadLocation", ctx, in, opts) - return &DataProxyClient_CreateUploadLocation{Call: c} + return &DataProxyServiceClient_CreateUploadLocation{Call: c} } -func (_m *DataProxyClient) OnCreateUploadLocationMatch(matchers ...interface{}) *DataProxyClient_CreateUploadLocation { +func (_m *DataProxyServiceClient) OnCreateUploadLocationMatch(matchers ...interface{}) *DataProxyServiceClient_CreateUploadLocation { c := _m.On("CreateUploadLocation", matchers...) - return &DataProxyClient_CreateUploadLocation{Call: c} + return &DataProxyServiceClient_CreateUploadLocation{Call: c} } // CreateUploadLocation provides a mock function with given fields: ctx, in, opts -func (_m *DataProxyClient) CreateUploadLocation(ctx context.Context, in *service.CreateUploadLocationRequest, opts ...grpc.CallOption) (*service.CreateUploadLocationResponse, error) { +func (_m *DataProxyServiceClient) CreateUploadLocation(ctx context.Context, in *service.CreateUploadLocationRequest, opts ...grpc.CallOption) (*service.CreateUploadLocationResponse, error) { _va := make([]interface{}, len(opts)) for _i := range opts { _va[_i] = opts[_i] diff --git a/flyteidl/clients/go/admin/mocks/DataProxyServiceServer.go b/flyteidl/clients/go/admin/mocks/DataProxyServiceServer.go new file mode 100644 index 000000000..cd7c01afb --- /dev/null +++ b/flyteidl/clients/go/admin/mocks/DataProxyServiceServer.go @@ -0,0 +1,56 @@ +// Code generated by mockery v1.0.1. DO NOT EDIT. + +package mocks + +import ( + context "context" + + service "github.com/flyteorg/flyteidl/gen/pb-go/flyteidl/service" + mock "github.com/stretchr/testify/mock" +) + +// DataProxyServiceServer is an autogenerated mock type for the DataProxyServiceServer type +type DataProxyServiceServer struct { + mock.Mock +} + +type DataProxyServiceServer_CreateUploadLocation struct { + *mock.Call +} + +func (_m DataProxyServiceServer_CreateUploadLocation) Return(_a0 *service.CreateUploadLocationResponse, _a1 error) *DataProxyServiceServer_CreateUploadLocation { + return &DataProxyServiceServer_CreateUploadLocation{Call: _m.Call.Return(_a0, _a1)} +} + +func (_m *DataProxyServiceServer) OnCreateUploadLocation(_a0 context.Context, _a1 *service.CreateUploadLocationRequest) *DataProxyServiceServer_CreateUploadLocation { + c := _m.On("CreateUploadLocation", _a0, _a1) + return &DataProxyServiceServer_CreateUploadLocation{Call: c} +} + +func (_m *DataProxyServiceServer) OnCreateUploadLocationMatch(matchers ...interface{}) *DataProxyServiceServer_CreateUploadLocation { + c := _m.On("CreateUploadLocation", matchers...) + return &DataProxyServiceServer_CreateUploadLocation{Call: c} +} + +// CreateUploadLocation provides a mock function with given fields: _a0, _a1 +func (_m *DataProxyServiceServer) CreateUploadLocation(_a0 context.Context, _a1 *service.CreateUploadLocationRequest) (*service.CreateUploadLocationResponse, error) { + ret := _m.Called(_a0, _a1) + + var r0 *service.CreateUploadLocationResponse + if rf, ok := ret.Get(0).(func(context.Context, *service.CreateUploadLocationRequest) *service.CreateUploadLocationResponse); ok { + r0 = rf(_a0, _a1) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*service.CreateUploadLocationResponse) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, *service.CreateUploadLocationRequest) error); ok { + r1 = rf(_a0, _a1) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} diff --git a/flyteidl/gen/pb-cpp/flyteidl/service/dataproxy.grpc.pb.cc b/flyteidl/gen/pb-cpp/flyteidl/service/dataproxy.grpc.pb.cc index d4bb4d473..4ed58d786 100644 --- a/flyteidl/gen/pb-cpp/flyteidl/service/dataproxy.grpc.pb.cc +++ b/flyteidl/gen/pb-cpp/flyteidl/service/dataproxy.grpc.pb.cc @@ -19,60 +19,60 @@ namespace flyteidl { namespace service { -static const char* DataProxy_method_names[] = { - "/flyteidl.service.DataProxy/CreateUploadLocation", +static const char* DataProxyService_method_names[] = { + "/flyteidl.service.DataProxyService/CreateUploadLocation", }; -std::unique_ptr< DataProxy::Stub> DataProxy::NewStub(const std::shared_ptr< ::grpc::ChannelInterface>& channel, const ::grpc::StubOptions& options) { +std::unique_ptr< DataProxyService::Stub> DataProxyService::NewStub(const std::shared_ptr< ::grpc::ChannelInterface>& channel, const ::grpc::StubOptions& options) { (void)options; - std::unique_ptr< DataProxy::Stub> stub(new DataProxy::Stub(channel)); + std::unique_ptr< DataProxyService::Stub> stub(new DataProxyService::Stub(channel)); return stub; } -DataProxy::Stub::Stub(const std::shared_ptr< ::grpc::ChannelInterface>& channel) - : channel_(channel), rpcmethod_CreateUploadLocation_(DataProxy_method_names[0], ::grpc::internal::RpcMethod::NORMAL_RPC, channel) +DataProxyService::Stub::Stub(const std::shared_ptr< ::grpc::ChannelInterface>& channel) + : channel_(channel), rpcmethod_CreateUploadLocation_(DataProxyService_method_names[0], ::grpc::internal::RpcMethod::NORMAL_RPC, channel) {} -::grpc::Status DataProxy::Stub::CreateUploadLocation(::grpc::ClientContext* context, const ::flyteidl::service::CreateUploadLocationRequest& request, ::flyteidl::service::CreateUploadLocationResponse* response) { +::grpc::Status DataProxyService::Stub::CreateUploadLocation(::grpc::ClientContext* context, const ::flyteidl::service::CreateUploadLocationRequest& request, ::flyteidl::service::CreateUploadLocationResponse* response) { return ::grpc::internal::BlockingUnaryCall(channel_.get(), rpcmethod_CreateUploadLocation_, context, request, response); } -void DataProxy::Stub::experimental_async::CreateUploadLocation(::grpc::ClientContext* context, const ::flyteidl::service::CreateUploadLocationRequest* request, ::flyteidl::service::CreateUploadLocationResponse* response, std::function f) { +void DataProxyService::Stub::experimental_async::CreateUploadLocation(::grpc::ClientContext* context, const ::flyteidl::service::CreateUploadLocationRequest* request, ::flyteidl::service::CreateUploadLocationResponse* response, std::function f) { ::grpc::internal::CallbackUnaryCall(stub_->channel_.get(), stub_->rpcmethod_CreateUploadLocation_, context, request, response, std::move(f)); } -void DataProxy::Stub::experimental_async::CreateUploadLocation(::grpc::ClientContext* context, const ::grpc::ByteBuffer* request, ::flyteidl::service::CreateUploadLocationResponse* response, std::function f) { +void DataProxyService::Stub::experimental_async::CreateUploadLocation(::grpc::ClientContext* context, const ::grpc::ByteBuffer* request, ::flyteidl::service::CreateUploadLocationResponse* response, std::function f) { ::grpc::internal::CallbackUnaryCall(stub_->channel_.get(), stub_->rpcmethod_CreateUploadLocation_, context, request, response, std::move(f)); } -void DataProxy::Stub::experimental_async::CreateUploadLocation(::grpc::ClientContext* context, const ::flyteidl::service::CreateUploadLocationRequest* request, ::flyteidl::service::CreateUploadLocationResponse* response, ::grpc::experimental::ClientUnaryReactor* reactor) { +void DataProxyService::Stub::experimental_async::CreateUploadLocation(::grpc::ClientContext* context, const ::flyteidl::service::CreateUploadLocationRequest* request, ::flyteidl::service::CreateUploadLocationResponse* response, ::grpc::experimental::ClientUnaryReactor* reactor) { ::grpc::internal::ClientCallbackUnaryFactory::Create(stub_->channel_.get(), stub_->rpcmethod_CreateUploadLocation_, context, request, response, reactor); } -void DataProxy::Stub::experimental_async::CreateUploadLocation(::grpc::ClientContext* context, const ::grpc::ByteBuffer* request, ::flyteidl::service::CreateUploadLocationResponse* response, ::grpc::experimental::ClientUnaryReactor* reactor) { +void DataProxyService::Stub::experimental_async::CreateUploadLocation(::grpc::ClientContext* context, const ::grpc::ByteBuffer* request, ::flyteidl::service::CreateUploadLocationResponse* response, ::grpc::experimental::ClientUnaryReactor* reactor) { ::grpc::internal::ClientCallbackUnaryFactory::Create(stub_->channel_.get(), stub_->rpcmethod_CreateUploadLocation_, context, request, response, reactor); } -::grpc::ClientAsyncResponseReader< ::flyteidl::service::CreateUploadLocationResponse>* DataProxy::Stub::AsyncCreateUploadLocationRaw(::grpc::ClientContext* context, const ::flyteidl::service::CreateUploadLocationRequest& request, ::grpc::CompletionQueue* cq) { +::grpc::ClientAsyncResponseReader< ::flyteidl::service::CreateUploadLocationResponse>* DataProxyService::Stub::AsyncCreateUploadLocationRaw(::grpc::ClientContext* context, const ::flyteidl::service::CreateUploadLocationRequest& request, ::grpc::CompletionQueue* cq) { return ::grpc::internal::ClientAsyncResponseReaderFactory< ::flyteidl::service::CreateUploadLocationResponse>::Create(channel_.get(), cq, rpcmethod_CreateUploadLocation_, context, request, true); } -::grpc::ClientAsyncResponseReader< ::flyteidl::service::CreateUploadLocationResponse>* DataProxy::Stub::PrepareAsyncCreateUploadLocationRaw(::grpc::ClientContext* context, const ::flyteidl::service::CreateUploadLocationRequest& request, ::grpc::CompletionQueue* cq) { +::grpc::ClientAsyncResponseReader< ::flyteidl::service::CreateUploadLocationResponse>* DataProxyService::Stub::PrepareAsyncCreateUploadLocationRaw(::grpc::ClientContext* context, const ::flyteidl::service::CreateUploadLocationRequest& request, ::grpc::CompletionQueue* cq) { return ::grpc::internal::ClientAsyncResponseReaderFactory< ::flyteidl::service::CreateUploadLocationResponse>::Create(channel_.get(), cq, rpcmethod_CreateUploadLocation_, context, request, false); } -DataProxy::Service::Service() { +DataProxyService::Service::Service() { AddMethod(new ::grpc::internal::RpcServiceMethod( - DataProxy_method_names[0], + DataProxyService_method_names[0], ::grpc::internal::RpcMethod::NORMAL_RPC, - new ::grpc::internal::RpcMethodHandler< DataProxy::Service, ::flyteidl::service::CreateUploadLocationRequest, ::flyteidl::service::CreateUploadLocationResponse>( - std::mem_fn(&DataProxy::Service::CreateUploadLocation), this))); + new ::grpc::internal::RpcMethodHandler< DataProxyService::Service, ::flyteidl::service::CreateUploadLocationRequest, ::flyteidl::service::CreateUploadLocationResponse>( + std::mem_fn(&DataProxyService::Service::CreateUploadLocation), this))); } -DataProxy::Service::~Service() { +DataProxyService::Service::~Service() { } -::grpc::Status DataProxy::Service::CreateUploadLocation(::grpc::ServerContext* context, const ::flyteidl::service::CreateUploadLocationRequest* request, ::flyteidl::service::CreateUploadLocationResponse* response) { +::grpc::Status DataProxyService::Service::CreateUploadLocation(::grpc::ServerContext* context, const ::flyteidl::service::CreateUploadLocationRequest* request, ::flyteidl::service::CreateUploadLocationResponse* response) { (void) context; (void) request; (void) response; diff --git a/flyteidl/gen/pb-cpp/flyteidl/service/dataproxy.grpc.pb.h b/flyteidl/gen/pb-cpp/flyteidl/service/dataproxy.grpc.pb.h index 326817dd8..5c01f48b8 100644 --- a/flyteidl/gen/pb-cpp/flyteidl/service/dataproxy.grpc.pb.h +++ b/flyteidl/gen/pb-cpp/flyteidl/service/dataproxy.grpc.pb.h @@ -40,16 +40,16 @@ class ServerContext; namespace flyteidl { namespace service { -// DataProxy defines an RPC Service that allows access to user-data in a controlled manner. -class DataProxy final { +// DataProxyService defines an RPC Service that allows access to user-data in a controlled manner. +class DataProxyService final { public: static constexpr char const* service_full_name() { - return "flyteidl.service.DataProxy"; + return "flyteidl.service.DataProxyService"; } class StubInterface { public: virtual ~StubInterface() {} - // Retrieves user information about the currently logged in user. + // CreateUploadLocation creates a signed url to upload artifacts to for a given project/domain. virtual ::grpc::Status CreateUploadLocation(::grpc::ClientContext* context, const ::flyteidl::service::CreateUploadLocationRequest& request, ::flyteidl::service::CreateUploadLocationResponse* response) = 0; std::unique_ptr< ::grpc::ClientAsyncResponseReaderInterface< ::flyteidl::service::CreateUploadLocationResponse>> AsyncCreateUploadLocation(::grpc::ClientContext* context, const ::flyteidl::service::CreateUploadLocationRequest& request, ::grpc::CompletionQueue* cq) { return std::unique_ptr< ::grpc::ClientAsyncResponseReaderInterface< ::flyteidl::service::CreateUploadLocationResponse>>(AsyncCreateUploadLocationRaw(context, request, cq)); @@ -60,7 +60,7 @@ class DataProxy final { class experimental_async_interface { public: virtual ~experimental_async_interface() {} - // Retrieves user information about the currently logged in user. + // CreateUploadLocation creates a signed url to upload artifacts to for a given project/domain. virtual void CreateUploadLocation(::grpc::ClientContext* context, const ::flyteidl::service::CreateUploadLocationRequest* request, ::flyteidl::service::CreateUploadLocationResponse* response, std::function) = 0; virtual void CreateUploadLocation(::grpc::ClientContext* context, const ::grpc::ByteBuffer* request, ::flyteidl::service::CreateUploadLocationResponse* response, std::function) = 0; virtual void CreateUploadLocation(::grpc::ClientContext* context, const ::flyteidl::service::CreateUploadLocationRequest* request, ::flyteidl::service::CreateUploadLocationResponse* response, ::grpc::experimental::ClientUnaryReactor* reactor) = 0; @@ -109,7 +109,7 @@ class DataProxy final { public: Service(); virtual ~Service(); - // Retrieves user information about the currently logged in user. + // CreateUploadLocation creates a signed url to upload artifacts to for a given project/domain. virtual ::grpc::Status CreateUploadLocation(::grpc::ServerContext* context, const ::flyteidl::service::CreateUploadLocationRequest* request, ::flyteidl::service::CreateUploadLocationResponse* response); }; template diff --git a/flyteidl/gen/pb-cpp/flyteidl/service/dataproxy.pb.cc b/flyteidl/gen/pb-cpp/flyteidl/service/dataproxy.pb.cc index 246941b1b..ea5a8d3d1 100644 --- a/flyteidl/gen/pb-cpp/flyteidl/service/dataproxy.pb.cc +++ b/flyteidl/gen/pb-cpp/flyteidl/service/dataproxy.pb.cc @@ -116,20 +116,20 @@ const char descriptor_table_protodef_flyteidl_2fservice_2fdataproxy_2eproto[] = "ateUploadLocationRequest\022\017\n\007project\030\001 \001(" "\t\022\016\n\006domain\030\002 \001(\t\022\016\n\006suffix\030\003 \001(\t\022-\n\nexp" "ires_in\030\004 \001(\0132\031.google.protobuf.Duration" - "2\376\001\n\tDataProxy\022\360\001\n\024CreateUploadLocation\022" - "-.flyteidl.service.CreateUploadLocationR" - "equest\032..flyteidl.service.CreateUploadLo" - "cationResponse\"y\202\323\344\223\002#\"\036/api/v1/dataprox" - "y/artifact_urn:\001*\222AM\032KCreates a write-on" - "ly http location that is accessible for " - "tasks at runtime.B9Z7github.com/flyteorg" - "/flyteidl/gen/pb-go/flyteidl/serviceb\006pr" - "oto3" + "2\205\002\n\020DataProxyService\022\360\001\n\024CreateUploadLo" + "cation\022-.flyteidl.service.CreateUploadLo" + "cationRequest\032..flyteidl.service.CreateU" + "ploadLocationResponse\"y\202\323\344\223\002#\"\036/api/v1/d" + "ataproxy/artifact_urn:\001*\222AM\032KCreates a w" + "rite-only http location that is accessib" + "le for tasks at runtime.B9Z7github.com/f" + "lyteorg/flyteidl/gen/pb-go/flyteidl/serv" + "iceb\006proto3" ; ::google::protobuf::internal::DescriptorTable descriptor_table_flyteidl_2fservice_2fdataproxy_2eproto = { false, InitDefaults_flyteidl_2fservice_2fdataproxy_2eproto, descriptor_table_protodef_flyteidl_2fservice_2fdataproxy_2eproto, - "flyteidl/service/dataproxy.proto", &assign_descriptors_table_flyteidl_2fservice_2fdataproxy_2eproto, 764, + "flyteidl/service/dataproxy.proto", &assign_descriptors_table_flyteidl_2fservice_2fdataproxy_2eproto, 771, }; void AddDescriptors_flyteidl_2fservice_2fdataproxy_2eproto() { diff --git a/flyteidl/gen/pb-go/flyteidl/service/dataproxy.pb.go b/flyteidl/gen/pb-go/flyteidl/service/dataproxy.pb.go index fdc8900af..4a86b5035 100644 --- a/flyteidl/gen/pb-go/flyteidl/service/dataproxy.pb.go +++ b/flyteidl/gen/pb-go/flyteidl/service/dataproxy.pb.go @@ -167,37 +167,37 @@ func init() { func init() { proto.RegisterFile("flyteidl/service/dataproxy.proto", fileDescriptor_bffb71366d75dab0) } var fileDescriptor_bffb71366d75dab0 = []byte{ - // 467 bytes of a gzipped FileDescriptorProto + // 471 bytes of a gzipped FileDescriptorProto 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x93, 0xc1, 0x6e, 0x13, 0x31, - 0x10, 0x86, 0xb5, 0x05, 0x15, 0xc5, 0x5c, 0xd0, 0x0a, 0xa1, 0x10, 0x4a, 0xb1, 0xc2, 0xa5, 0x42, - 0xc4, 0x16, 0xe5, 0x00, 0xe5, 0x56, 0xe8, 0x05, 0x01, 0x12, 0x8a, 0xe8, 0x85, 0x4b, 0x35, 0xbb, - 0x3b, 0xeb, 0x18, 0x36, 0xb6, 0xb1, 0x67, 0xd3, 0xe4, 0xca, 0x23, 0xc0, 0x81, 0x33, 0xcf, 0xc4, - 0x2b, 0xf0, 0x00, 0x3c, 0x01, 0x42, 0xeb, 0x75, 0x5a, 0x29, 0x44, 0x88, 0xd3, 0x6a, 0xe6, 0xff, - 0xbc, 0xfa, 0x6c, 0x8f, 0x19, 0xaf, 0x9b, 0x15, 0xa1, 0xae, 0x1a, 0x19, 0xd0, 0x2f, 0x74, 0x89, - 0xb2, 0x02, 0x02, 0xe7, 0xed, 0x72, 0x25, 0x9c, 0xb7, 0x64, 0xf3, 0x1b, 0x6b, 0x42, 0x24, 0x62, - 0xb4, 0xa7, 0xac, 0x55, 0x0d, 0x4a, 0x70, 0x5a, 0x82, 0x31, 0x96, 0x80, 0xb4, 0x35, 0xa1, 0xe7, - 0x47, 0x0f, 0xe3, 0xa7, 0x9c, 0x28, 0x34, 0x93, 0x70, 0x0e, 0x4a, 0xa1, 0x97, 0xd6, 0x45, 0x62, - 0x0b, 0xbd, 0x9f, 0xfe, 0x15, 0xab, 0xa2, 0xad, 0x65, 0xd5, 0xfa, 0x08, 0xa4, 0xfc, 0xde, 0x66, - 0x4e, 0x7a, 0x8e, 0x81, 0x60, 0xee, 0x7a, 0x60, 0xfc, 0x2d, 0x63, 0x7b, 0x2f, 0x3c, 0x02, 0xe1, - 0xa9, 0x6b, 0x2c, 0x54, 0xaf, 0x6d, 0x19, 0xd7, 0x4f, 0x31, 0x38, 0x6b, 0x02, 0xe6, 0x77, 0x19, - 0x0b, 0x5a, 0x19, 0xac, 0xce, 0x5a, 0xdf, 0x0c, 0x33, 0x9e, 0x1d, 0x0c, 0xa6, 0x83, 0xbe, 0x73, - 0xea, 0x9b, 0x2e, 0x36, 0x40, 0x7a, 0x81, 0x31, 0xde, 0xe9, 0xe3, 0xbe, 0xd3, 0xc5, 0x47, 0x8c, - 0xe1, 0xd2, 0x69, 0x8f, 0xe1, 0x0c, 0x68, 0x78, 0x85, 0x67, 0x07, 0xd7, 0x0f, 0x47, 0xa2, 0x97, - 0x12, 0x6b, 0x29, 0xf1, 0x6e, 0x2d, 0x35, 0x1d, 0x24, 0xfa, 0x98, 0xc6, 0xdf, 0x33, 0x76, 0x67, - 0xbb, 0xd9, 0xa7, 0x16, 0x03, 0xe5, 0x43, 0x76, 0xcd, 0x79, 0xfb, 0x01, 0x4b, 0x4a, 0x56, 0xeb, - 0x32, 0xbf, 0xc5, 0x76, 0x2b, 0x3b, 0x07, 0x6d, 0x92, 0x4f, 0xaa, 0xba, 0x7e, 0x68, 0xeb, 0x5a, - 0x2f, 0xa3, 0xc8, 0x60, 0x9a, 0xaa, 0xfc, 0xe9, 0xa5, 0xa4, 0x36, 0xc3, 0xab, 0x51, 0xf2, 0xf6, - 0x5f, 0x92, 0x27, 0xe9, 0x64, 0x2f, 0x1c, 0x5f, 0x9a, 0xc3, 0xdf, 0x19, 0x1b, 0x9c, 0x00, 0xc1, - 0xdb, 0xee, 0xc2, 0xf3, 0x5f, 0x19, 0xbb, 0xb9, 0xcd, 0x38, 0x9f, 0x88, 0xcd, 0x21, 0x10, 0xff, - 0xd8, 0xd9, 0x48, 0xfc, 0x2f, 0xde, 0x5f, 0xd1, 0x78, 0xf5, 0xe5, 0xf8, 0xcd, 0xe8, 0x55, 0x8f, - 0x04, 0x0e, 0xfc, 0xdc, 0x6b, 0xc2, 0x89, 0x35, 0xcd, 0x8a, 0xcf, 0x88, 0x1c, 0x6f, 0xd2, 0x02, - 0x4e, 0x33, 0x20, 0xae, 0x03, 0x87, 0xb2, 0xc4, 0x10, 0x74, 0xd1, 0x20, 0xaf, 0xad, 0xe7, 0x04, - 0xe1, 0x63, 0xe0, 0x40, 0xdc, 0xb7, 0xa6, 0x9b, 0x0f, 0xf1, 0xf9, 0xc7, 0xcf, 0xaf, 0x3b, 0xf7, - 0xc7, 0xfb, 0x71, 0x44, 0x17, 0x8f, 0x2e, 0x67, 0x5a, 0x82, 0x27, 0x5d, 0x43, 0x49, 0x67, 0xad, - 0x37, 0xcf, 0xb2, 0x07, 0xcf, 0x8f, 0xde, 0x3f, 0x51, 0x9a, 0x66, 0x6d, 0x21, 0x4a, 0x3b, 0x97, - 0x51, 0xdb, 0x7a, 0x25, 0x2f, 0x5e, 0x85, 0x42, 0x23, 0x5d, 0x31, 0x51, 0x56, 0x6e, 0x3e, 0x94, - 0x62, 0x37, 0x9e, 0xec, 0xe3, 0x3f, 0x01, 0x00, 0x00, 0xff, 0xff, 0x53, 0xe9, 0xf7, 0x55, 0x43, - 0x03, 0x00, 0x00, + 0x10, 0x86, 0xb5, 0x05, 0x15, 0xc5, 0x5c, 0xaa, 0x15, 0x42, 0x21, 0x94, 0x62, 0x85, 0x4b, 0x85, + 0x88, 0x2d, 0xca, 0x01, 0xca, 0xad, 0xd0, 0x0b, 0x02, 0x24, 0x14, 0xe8, 0x85, 0x4b, 0x34, 0xd9, + 0x9d, 0x75, 0x0c, 0x1b, 0xdb, 0xd8, 0xb3, 0x69, 0x72, 0x45, 0xe2, 0x05, 0xe0, 0xc0, 0x99, 0x67, + 0xe2, 0x15, 0x78, 0x00, 0x1e, 0x01, 0xad, 0xd7, 0x69, 0xa5, 0x10, 0x21, 0x4e, 0xd1, 0xcc, 0xff, + 0x39, 0xfa, 0xd6, 0x33, 0x66, 0xbc, 0xaa, 0x57, 0x84, 0xba, 0xac, 0x65, 0x40, 0xbf, 0xd0, 0x05, + 0xca, 0x12, 0x08, 0x9c, 0xb7, 0xcb, 0x95, 0x70, 0xde, 0x92, 0xcd, 0xf7, 0xd6, 0x84, 0x48, 0xc4, + 0x60, 0x5f, 0x59, 0xab, 0x6a, 0x94, 0xe0, 0xb4, 0x04, 0x63, 0x2c, 0x01, 0x69, 0x6b, 0x42, 0xc7, + 0x0f, 0x1e, 0xc4, 0x9f, 0x62, 0xa4, 0xd0, 0x8c, 0xc2, 0x39, 0x28, 0x85, 0x5e, 0x5a, 0x17, 0x89, + 0x2d, 0xf4, 0x41, 0xfa, 0xaf, 0x58, 0x4d, 0x9b, 0x4a, 0x96, 0x8d, 0x8f, 0x40, 0xca, 0xef, 0x6e, + 0xe6, 0xa4, 0xe7, 0x18, 0x08, 0xe6, 0xae, 0x03, 0x86, 0xdf, 0x33, 0xb6, 0xff, 0xdc, 0x23, 0x10, + 0x9e, 0xb9, 0xda, 0x42, 0xf9, 0xca, 0x16, 0xf1, 0xfc, 0x18, 0x83, 0xb3, 0x26, 0x60, 0x7e, 0x87, + 0xb1, 0xa0, 0x95, 0xc1, 0x72, 0xd2, 0xf8, 0xba, 0x9f, 0xf1, 0xec, 0xb0, 0x37, 0xee, 0x75, 0x9d, + 0x33, 0x5f, 0xb7, 0xb1, 0x01, 0xd2, 0x0b, 0x8c, 0xf1, 0x4e, 0x17, 0x77, 0x9d, 0x36, 0x3e, 0x66, + 0x0c, 0x97, 0x4e, 0x7b, 0x0c, 0x13, 0xa0, 0xfe, 0x15, 0x9e, 0x1d, 0x5e, 0x3f, 0x1a, 0x88, 0x4e, + 0x4a, 0xac, 0xa5, 0xc4, 0xbb, 0xb5, 0xd4, 0xb8, 0x97, 0xe8, 0x13, 0x1a, 0xfe, 0xc8, 0xd8, 0xed, + 0xed, 0x66, 0x9f, 0x1a, 0x0c, 0x94, 0xf7, 0xd9, 0x35, 0xe7, 0xed, 0x07, 0x2c, 0x28, 0x59, 0xad, + 0xcb, 0xfc, 0x26, 0xdb, 0x2d, 0xed, 0x1c, 0xb4, 0x49, 0x3e, 0xa9, 0x6a, 0xfb, 0xa1, 0xa9, 0x2a, + 0xbd, 0x8c, 0x22, 0xbd, 0x71, 0xaa, 0xf2, 0x27, 0x97, 0x92, 0xda, 0xf4, 0xaf, 0x46, 0xc9, 0x5b, + 0x7f, 0x49, 0x9e, 0xa6, 0x9b, 0xbd, 0x70, 0x7c, 0x61, 0x8e, 0xbe, 0xec, 0xb0, 0xbd, 0x53, 0x20, + 0x78, 0xd3, 0x0e, 0xfc, 0x6d, 0x37, 0xdf, 0xfc, 0x77, 0xc6, 0x6e, 0x6c, 0x13, 0xcf, 0x47, 0x62, + 0x73, 0x17, 0xc4, 0x3f, 0x3e, 0x70, 0x20, 0xfe, 0x17, 0xef, 0x26, 0x35, 0x5c, 0x7d, 0x3d, 0x79, + 0x3d, 0x78, 0xd9, 0x21, 0x81, 0x03, 0x3f, 0xf7, 0x9a, 0x70, 0x64, 0x4d, 0xbd, 0xe2, 0x33, 0x22, + 0xc7, 0xeb, 0x74, 0x80, 0xd3, 0x0c, 0x88, 0xeb, 0xc0, 0xa1, 0x28, 0x30, 0x04, 0x3d, 0xad, 0x91, + 0x57, 0xd6, 0x73, 0x82, 0xf0, 0x31, 0x70, 0x20, 0xee, 0x1b, 0xd3, 0xae, 0x89, 0xf8, 0xfc, 0xf3, + 0xd7, 0xb7, 0x9d, 0x7b, 0xc3, 0x83, 0xb8, 0xa9, 0x8b, 0x87, 0x97, 0xab, 0x2d, 0xc1, 0x93, 0xae, + 0xa0, 0xa0, 0x49, 0xe3, 0xcd, 0xd3, 0xec, 0xfe, 0xb3, 0xe3, 0xf7, 0x8f, 0x95, 0xa6, 0x59, 0x33, + 0x15, 0x85, 0x9d, 0xcb, 0xa8, 0x6d, 0xbd, 0x92, 0x17, 0x8f, 0x43, 0xa1, 0x91, 0x6e, 0x3a, 0x52, + 0x56, 0x6e, 0xbe, 0x97, 0xe9, 0x6e, 0xbc, 0xe0, 0x47, 0x7f, 0x02, 0x00, 0x00, 0xff, 0xff, 0x28, + 0xb1, 0xe0, 0xaf, 0x4a, 0x03, 0x00, 0x00, } // Reference imports to suppress errors if they are not otherwise used. @@ -208,74 +208,74 @@ var _ grpc.ClientConn // is compatible with the grpc package it is being compiled against. const _ = grpc.SupportPackageIsVersion4 -// DataProxyClient is the client API for DataProxy service. +// DataProxyServiceClient is the client API for DataProxyService service. // // For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. -type DataProxyClient interface { - // Retrieves user information about the currently logged in user. +type DataProxyServiceClient interface { + // CreateUploadLocation creates a signed url to upload artifacts to for a given project/domain. CreateUploadLocation(ctx context.Context, in *CreateUploadLocationRequest, opts ...grpc.CallOption) (*CreateUploadLocationResponse, error) } -type dataProxyClient struct { +type dataProxyServiceClient struct { cc *grpc.ClientConn } -func NewDataProxyClient(cc *grpc.ClientConn) DataProxyClient { - return &dataProxyClient{cc} +func NewDataProxyServiceClient(cc *grpc.ClientConn) DataProxyServiceClient { + return &dataProxyServiceClient{cc} } -func (c *dataProxyClient) CreateUploadLocation(ctx context.Context, in *CreateUploadLocationRequest, opts ...grpc.CallOption) (*CreateUploadLocationResponse, error) { +func (c *dataProxyServiceClient) CreateUploadLocation(ctx context.Context, in *CreateUploadLocationRequest, opts ...grpc.CallOption) (*CreateUploadLocationResponse, error) { out := new(CreateUploadLocationResponse) - err := c.cc.Invoke(ctx, "/flyteidl.service.DataProxy/CreateUploadLocation", in, out, opts...) + err := c.cc.Invoke(ctx, "/flyteidl.service.DataProxyService/CreateUploadLocation", in, out, opts...) if err != nil { return nil, err } return out, nil } -// DataProxyServer is the server API for DataProxy service. -type DataProxyServer interface { - // Retrieves user information about the currently logged in user. +// DataProxyServiceServer is the server API for DataProxyService service. +type DataProxyServiceServer interface { + // CreateUploadLocation creates a signed url to upload artifacts to for a given project/domain. CreateUploadLocation(context.Context, *CreateUploadLocationRequest) (*CreateUploadLocationResponse, error) } -// UnimplementedDataProxyServer can be embedded to have forward compatible implementations. -type UnimplementedDataProxyServer struct { +// UnimplementedDataProxyServiceServer can be embedded to have forward compatible implementations. +type UnimplementedDataProxyServiceServer struct { } -func (*UnimplementedDataProxyServer) CreateUploadLocation(ctx context.Context, req *CreateUploadLocationRequest) (*CreateUploadLocationResponse, error) { +func (*UnimplementedDataProxyServiceServer) CreateUploadLocation(ctx context.Context, req *CreateUploadLocationRequest) (*CreateUploadLocationResponse, error) { return nil, status.Errorf(codes.Unimplemented, "method CreateUploadLocation not implemented") } -func RegisterDataProxyServer(s *grpc.Server, srv DataProxyServer) { - s.RegisterService(&_DataProxy_serviceDesc, srv) +func RegisterDataProxyServiceServer(s *grpc.Server, srv DataProxyServiceServer) { + s.RegisterService(&_DataProxyService_serviceDesc, srv) } -func _DataProxy_CreateUploadLocation_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { +func _DataProxyService_CreateUploadLocation_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { in := new(CreateUploadLocationRequest) if err := dec(in); err != nil { return nil, err } if interceptor == nil { - return srv.(DataProxyServer).CreateUploadLocation(ctx, in) + return srv.(DataProxyServiceServer).CreateUploadLocation(ctx, in) } info := &grpc.UnaryServerInfo{ Server: srv, - FullMethod: "/flyteidl.service.DataProxy/CreateUploadLocation", + FullMethod: "/flyteidl.service.DataProxyService/CreateUploadLocation", } handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(DataProxyServer).CreateUploadLocation(ctx, req.(*CreateUploadLocationRequest)) + return srv.(DataProxyServiceServer).CreateUploadLocation(ctx, req.(*CreateUploadLocationRequest)) } return interceptor(ctx, in, info, handler) } -var _DataProxy_serviceDesc = grpc.ServiceDesc{ - ServiceName: "flyteidl.service.DataProxy", - HandlerType: (*DataProxyServer)(nil), +var _DataProxyService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "flyteidl.service.DataProxyService", + HandlerType: (*DataProxyServiceServer)(nil), Methods: []grpc.MethodDesc{ { MethodName: "CreateUploadLocation", - Handler: _DataProxy_CreateUploadLocation_Handler, + Handler: _DataProxyService_CreateUploadLocation_Handler, }, }, Streams: []grpc.StreamDesc{}, diff --git a/flyteidl/gen/pb-go/flyteidl/service/dataproxy.pb.gw.go b/flyteidl/gen/pb-go/flyteidl/service/dataproxy.pb.gw.go index 76695e390..600705b0c 100644 --- a/flyteidl/gen/pb-go/flyteidl/service/dataproxy.pb.gw.go +++ b/flyteidl/gen/pb-go/flyteidl/service/dataproxy.pb.gw.go @@ -28,7 +28,7 @@ var _ status.Status var _ = runtime.String var _ = utilities.NewDoubleArray -func request_DataProxy_CreateUploadLocation_0(ctx context.Context, marshaler runtime.Marshaler, client DataProxyClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { +func request_DataProxyService_CreateUploadLocation_0(ctx context.Context, marshaler runtime.Marshaler, client DataProxyServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { var protoReq CreateUploadLocationRequest var metadata runtime.ServerMetadata @@ -45,9 +45,9 @@ func request_DataProxy_CreateUploadLocation_0(ctx context.Context, marshaler run } -// RegisterDataProxyHandlerFromEndpoint is same as RegisterDataProxyHandler but +// RegisterDataProxyServiceHandlerFromEndpoint is same as RegisterDataProxyServiceHandler but // automatically dials to "endpoint" and closes the connection when "ctx" gets done. -func RegisterDataProxyHandlerFromEndpoint(ctx context.Context, mux *runtime.ServeMux, endpoint string, opts []grpc.DialOption) (err error) { +func RegisterDataProxyServiceHandlerFromEndpoint(ctx context.Context, mux *runtime.ServeMux, endpoint string, opts []grpc.DialOption) (err error) { conn, err := grpc.Dial(endpoint, opts...) if err != nil { return err @@ -67,23 +67,23 @@ func RegisterDataProxyHandlerFromEndpoint(ctx context.Context, mux *runtime.Serv }() }() - return RegisterDataProxyHandler(ctx, mux, conn) + return RegisterDataProxyServiceHandler(ctx, mux, conn) } -// RegisterDataProxyHandler registers the http handlers for service DataProxy to "mux". +// RegisterDataProxyServiceHandler registers the http handlers for service DataProxyService to "mux". // The handlers forward requests to the grpc endpoint over "conn". -func RegisterDataProxyHandler(ctx context.Context, mux *runtime.ServeMux, conn *grpc.ClientConn) error { - return RegisterDataProxyHandlerClient(ctx, mux, NewDataProxyClient(conn)) +func RegisterDataProxyServiceHandler(ctx context.Context, mux *runtime.ServeMux, conn *grpc.ClientConn) error { + return RegisterDataProxyServiceHandlerClient(ctx, mux, NewDataProxyServiceClient(conn)) } -// RegisterDataProxyHandlerClient registers the http handlers for service DataProxy -// to "mux". The handlers forward requests to the grpc endpoint over the given implementation of "DataProxyClient". -// Note: the gRPC framework executes interceptors within the gRPC handler. If the passed in "DataProxyClient" +// RegisterDataProxyServiceHandlerClient registers the http handlers for service DataProxyService +// to "mux". The handlers forward requests to the grpc endpoint over the given implementation of "DataProxyServiceClient". +// Note: the gRPC framework executes interceptors within the gRPC handler. If the passed in "DataProxyServiceClient" // doesn't go through the normal gRPC flow (creating a gRPC client etc.) then it will be up to the passed in -// "DataProxyClient" to call the correct interceptors. -func RegisterDataProxyHandlerClient(ctx context.Context, mux *runtime.ServeMux, client DataProxyClient) error { +// "DataProxyServiceClient" to call the correct interceptors. +func RegisterDataProxyServiceHandlerClient(ctx context.Context, mux *runtime.ServeMux, client DataProxyServiceClient) error { - mux.Handle("POST", pattern_DataProxy_CreateUploadLocation_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle("POST", pattern_DataProxyService_CreateUploadLocation_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) @@ -92,14 +92,14 @@ func RegisterDataProxyHandlerClient(ctx context.Context, mux *runtime.ServeMux, runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := request_DataProxy_CreateUploadLocation_0(rctx, inboundMarshaler, client, req, pathParams) + resp, md, err := request_DataProxyService_CreateUploadLocation_0(rctx, inboundMarshaler, client, req, pathParams) ctx = runtime.NewServerMetadataContext(ctx, md) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - forward_DataProxy_CreateUploadLocation_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + forward_DataProxyService_CreateUploadLocation_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) @@ -107,9 +107,9 @@ func RegisterDataProxyHandlerClient(ctx context.Context, mux *runtime.ServeMux, } var ( - pattern_DataProxy_CreateUploadLocation_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 2, 3}, []string{"api", "v1", "dataproxy", "artifact_urn"}, "")) + pattern_DataProxyService_CreateUploadLocation_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 2, 3}, []string{"api", "v1", "dataproxy", "artifact_urn"}, "")) ) var ( - forward_DataProxy_CreateUploadLocation_0 = runtime.ForwardResponseMessage + forward_DataProxyService_CreateUploadLocation_0 = runtime.ForwardResponseMessage ) diff --git a/flyteidl/gen/pb-go/flyteidl/service/dataproxy.swagger.json b/flyteidl/gen/pb-go/flyteidl/service/dataproxy.swagger.json index 2d1a3f5f0..6e5b2fdbf 100644 --- a/flyteidl/gen/pb-go/flyteidl/service/dataproxy.swagger.json +++ b/flyteidl/gen/pb-go/flyteidl/service/dataproxy.swagger.json @@ -17,7 +17,7 @@ "paths": { "/api/v1/dataproxy/artifact_urn": { "post": { - "summary": "Retrieves user information about the currently logged in user.", + "summary": "CreateUploadLocation creates a signed url to upload artifacts to for a given project/domain.", "description": "Creates a write-only http location that is accessible for tasks at runtime.", "operationId": "CreateUploadLocation", "responses": { @@ -39,7 +39,7 @@ } ], "tags": [ - "DataProxy" + "DataProxyService" ] } } diff --git a/flyteidl/gen/pb-java/flyteidl/service/Dataproxy.java b/flyteidl/gen/pb-java/flyteidl/service/Dataproxy.java index 27745b2de..4d86783aa 100644 --- a/flyteidl/gen/pb-java/flyteidl/service/Dataproxy.java +++ b/flyteidl/gen/pb-java/flyteidl/service/Dataproxy.java @@ -2306,15 +2306,15 @@ public flyteidl.service.Dataproxy.CreateUploadLocationRequest getDefaultInstance "ateUploadLocationRequest\022\017\n\007project\030\001 \001(" + "\t\022\016\n\006domain\030\002 \001(\t\022\016\n\006suffix\030\003 \001(\t\022-\n\nexp" + "ires_in\030\004 \001(\0132\031.google.protobuf.Duration" + - "2\376\001\n\tDataProxy\022\360\001\n\024CreateUploadLocation\022" + - "-.flyteidl.service.CreateUploadLocationR" + - "equest\032..flyteidl.service.CreateUploadLo" + - "cationResponse\"y\202\323\344\223\002#\"\036/api/v1/dataprox" + - "y/artifact_urn:\001*\222AM\032KCreates a write-on" + - "ly http location that is accessible for " + - "tasks at runtime.B9Z7github.com/flyteorg" + - "/flyteidl/gen/pb-go/flyteidl/serviceb\006pr" + - "oto3" + "2\205\002\n\020DataProxyService\022\360\001\n\024CreateUploadLo" + + "cation\022-.flyteidl.service.CreateUploadLo" + + "cationRequest\032..flyteidl.service.CreateU" + + "ploadLocationResponse\"y\202\323\344\223\002#\"\036/api/v1/d" + + "ataproxy/artifact_urn:\001*\222AM\032KCreates a w" + + "rite-only http location that is accessib" + + "le for tasks at runtime.B9Z7github.com/f" + + "lyteorg/flyteidl/gen/pb-go/flyteidl/serv" + + "iceb\006proto3" }; com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = new com.google.protobuf.Descriptors.FileDescriptor. InternalDescriptorAssigner() { diff --git a/flyteidl/gen/pb-js/flyteidl.d.ts b/flyteidl/gen/pb-js/flyteidl.d.ts index ec1a885f7..95fd4c5f5 100644 --- a/flyteidl/gen/pb-js/flyteidl.d.ts +++ b/flyteidl/gen/pb-js/flyteidl.d.ts @@ -17092,11 +17092,11 @@ export namespace flyteidl { public static verify(message: { [k: string]: any }): (string|null); } - /** Represents a DataProxy */ - class DataProxy extends $protobuf.rpc.Service { + /** Represents a DataProxyService */ + class DataProxyService extends $protobuf.rpc.Service { /** - * Constructs a new DataProxy service. + * Constructs a new DataProxyService service. * @param rpcImpl RPC implementation * @param [requestDelimited=false] Whether requests are length-delimited * @param [responseDelimited=false] Whether responses are length-delimited @@ -17104,20 +17104,20 @@ export namespace flyteidl { constructor(rpcImpl: $protobuf.RPCImpl, requestDelimited?: boolean, responseDelimited?: boolean); /** - * Creates new DataProxy service using the specified rpc implementation. + * Creates new DataProxyService service using the specified rpc implementation. * @param rpcImpl RPC implementation * @param [requestDelimited=false] Whether requests are length-delimited * @param [responseDelimited=false] Whether responses are length-delimited * @returns RPC service. Useful where requests and/or responses are streamed. */ - public static create(rpcImpl: $protobuf.RPCImpl, requestDelimited?: boolean, responseDelimited?: boolean): DataProxy; + public static create(rpcImpl: $protobuf.RPCImpl, requestDelimited?: boolean, responseDelimited?: boolean): DataProxyService; /** * Calls CreateUploadLocation. * @param request CreateUploadLocationRequest message or plain object * @param callback Node-style callback called with the error, if any, and CreateUploadLocationResponse */ - public createUploadLocation(request: flyteidl.service.ICreateUploadLocationRequest, callback: flyteidl.service.DataProxy.CreateUploadLocationCallback): void; + public createUploadLocation(request: flyteidl.service.ICreateUploadLocationRequest, callback: flyteidl.service.DataProxyService.CreateUploadLocationCallback): void; /** * Calls CreateUploadLocation. @@ -17127,10 +17127,10 @@ export namespace flyteidl { public createUploadLocation(request: flyteidl.service.ICreateUploadLocationRequest): Promise; } - namespace DataProxy { + namespace DataProxyService { /** - * Callback as used by {@link flyteidl.service.DataProxy#createUploadLocation}. + * Callback as used by {@link flyteidl.service.DataProxyService#createUploadLocation}. * @param error Error, if any * @param [response] CreateUploadLocationResponse */ diff --git a/flyteidl/gen/pb-js/flyteidl.js b/flyteidl/gen/pb-js/flyteidl.js index 56af2509d..ab2d1bdda 100644 --- a/flyteidl/gen/pb-js/flyteidl.js +++ b/flyteidl/gen/pb-js/flyteidl.js @@ -39820,41 +39820,41 @@ export const flyteidl = $root.flyteidl = (() => { return CreateUploadLocationRequest; })(); - service.DataProxy = (function() { + service.DataProxyService = (function() { /** - * Constructs a new DataProxy service. + * Constructs a new DataProxyService service. * @memberof flyteidl.service - * @classdesc Represents a DataProxy + * @classdesc Represents a DataProxyService * @extends $protobuf.rpc.Service * @constructor * @param {$protobuf.RPCImpl} rpcImpl RPC implementation * @param {boolean} [requestDelimited=false] Whether requests are length-delimited * @param {boolean} [responseDelimited=false] Whether responses are length-delimited */ - function DataProxy(rpcImpl, requestDelimited, responseDelimited) { + function DataProxyService(rpcImpl, requestDelimited, responseDelimited) { $protobuf.rpc.Service.call(this, rpcImpl, requestDelimited, responseDelimited); } - (DataProxy.prototype = Object.create($protobuf.rpc.Service.prototype)).constructor = DataProxy; + (DataProxyService.prototype = Object.create($protobuf.rpc.Service.prototype)).constructor = DataProxyService; /** - * Creates new DataProxy service using the specified rpc implementation. + * Creates new DataProxyService service using the specified rpc implementation. * @function create - * @memberof flyteidl.service.DataProxy + * @memberof flyteidl.service.DataProxyService * @static * @param {$protobuf.RPCImpl} rpcImpl RPC implementation * @param {boolean} [requestDelimited=false] Whether requests are length-delimited * @param {boolean} [responseDelimited=false] Whether responses are length-delimited - * @returns {DataProxy} RPC service. Useful where requests and/or responses are streamed. + * @returns {DataProxyService} RPC service. Useful where requests and/or responses are streamed. */ - DataProxy.create = function create(rpcImpl, requestDelimited, responseDelimited) { + DataProxyService.create = function create(rpcImpl, requestDelimited, responseDelimited) { return new this(rpcImpl, requestDelimited, responseDelimited); }; /** - * Callback as used by {@link flyteidl.service.DataProxy#createUploadLocation}. - * @memberof flyteidl.service.DataProxy + * Callback as used by {@link flyteidl.service.DataProxyService#createUploadLocation}. + * @memberof flyteidl.service.DataProxyService * @typedef CreateUploadLocationCallback * @type {function} * @param {Error|null} error Error, if any @@ -39864,28 +39864,28 @@ export const flyteidl = $root.flyteidl = (() => { /** * Calls CreateUploadLocation. * @function createUploadLocation - * @memberof flyteidl.service.DataProxy + * @memberof flyteidl.service.DataProxyService * @instance * @param {flyteidl.service.ICreateUploadLocationRequest} request CreateUploadLocationRequest message or plain object - * @param {flyteidl.service.DataProxy.CreateUploadLocationCallback} callback Node-style callback called with the error, if any, and CreateUploadLocationResponse + * @param {flyteidl.service.DataProxyService.CreateUploadLocationCallback} callback Node-style callback called with the error, if any, and CreateUploadLocationResponse * @returns {undefined} * @variation 1 */ - Object.defineProperty(DataProxy.prototype.createUploadLocation = function createUploadLocation(request, callback) { + Object.defineProperty(DataProxyService.prototype.createUploadLocation = function createUploadLocation(request, callback) { return this.rpcCall(createUploadLocation, $root.flyteidl.service.CreateUploadLocationRequest, $root.flyteidl.service.CreateUploadLocationResponse, request, callback); }, "name", { value: "CreateUploadLocation" }); /** * Calls CreateUploadLocation. * @function createUploadLocation - * @memberof flyteidl.service.DataProxy + * @memberof flyteidl.service.DataProxyService * @instance * @param {flyteidl.service.ICreateUploadLocationRequest} request CreateUploadLocationRequest message or plain object * @returns {Promise} Promise * @variation 2 */ - return DataProxy; + return DataProxyService; })(); service.UserInfoRequest = (function() { diff --git a/flyteidl/gen/pb_python/flyteidl/service/dataproxy_pb2.py b/flyteidl/gen/pb_python/flyteidl/service/dataproxy_pb2.py index 1fc7790f0..ee8bb435a 100644 --- a/flyteidl/gen/pb_python/flyteidl/service/dataproxy_pb2.py +++ b/flyteidl/gen/pb_python/flyteidl/service/dataproxy_pb2.py @@ -24,7 +24,7 @@ package='flyteidl.service', syntax='proto3', serialized_options=_b('Z7github.com/flyteorg/flyteidl/gen/pb-go/flyteidl/service'), - serialized_pb=_b('\n flyteidl/service/dataproxy.proto\x12\x10\x66lyteidl.service\x1a\x1cgoogle/api/annotations.proto\x1a,protoc-gen-swagger/options/annotations.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1fgoogle/protobuf/timestamp.proto\"v\n\x1c\x43reateUploadLocationResponse\x12\x12\n\nsigned_url\x18\x01 \x01(\t\x12\x12\n\nnative_url\x18\x02 \x01(\t\x12.\n\nexpires_at\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\"}\n\x1b\x43reateUploadLocationRequest\x12\x0f\n\x07project\x18\x01 \x01(\t\x12\x0e\n\x06\x64omain\x18\x02 \x01(\t\x12\x0e\n\x06suffix\x18\x03 \x01(\t\x12-\n\nexpires_in\x18\x04 \x01(\x0b\x32\x19.google.protobuf.Duration2\xfe\x01\n\tDataProxy\x12\xf0\x01\n\x14\x43reateUploadLocation\x12-.flyteidl.service.CreateUploadLocationRequest\x1a..flyteidl.service.CreateUploadLocationResponse\"y\x82\xd3\xe4\x93\x02#\"\x1e/api/v1/dataproxy/artifact_urn:\x01*\x92\x41M\x1aKCreates a write-only http location that is accessible for tasks at runtime.B9Z7github.com/flyteorg/flyteidl/gen/pb-go/flyteidl/serviceb\x06proto3') + serialized_pb=_b('\n flyteidl/service/dataproxy.proto\x12\x10\x66lyteidl.service\x1a\x1cgoogle/api/annotations.proto\x1a,protoc-gen-swagger/options/annotations.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1fgoogle/protobuf/timestamp.proto\"v\n\x1c\x43reateUploadLocationResponse\x12\x12\n\nsigned_url\x18\x01 \x01(\t\x12\x12\n\nnative_url\x18\x02 \x01(\t\x12.\n\nexpires_at\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\"}\n\x1b\x43reateUploadLocationRequest\x12\x0f\n\x07project\x18\x01 \x01(\t\x12\x0e\n\x06\x64omain\x18\x02 \x01(\t\x12\x0e\n\x06suffix\x18\x03 \x01(\t\x12-\n\nexpires_in\x18\x04 \x01(\x0b\x32\x19.google.protobuf.Duration2\x85\x02\n\x10\x44\x61taProxyService\x12\xf0\x01\n\x14\x43reateUploadLocation\x12-.flyteidl.service.CreateUploadLocationRequest\x1a..flyteidl.service.CreateUploadLocationResponse\"y\x82\xd3\xe4\x93\x02#\"\x1e/api/v1/dataproxy/artifact_urn:\x01*\x92\x41M\x1aKCreates a write-only http location that is accessible for tasks at runtime.B9Z7github.com/flyteorg/flyteidl/gen/pb-go/flyteidl/serviceb\x06proto3') , dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR,protoc__gen__swagger_dot_options_dot_annotations__pb2.DESCRIPTOR,google_dot_protobuf_dot_duration__pb2.DESCRIPTOR,google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR,]) @@ -150,18 +150,18 @@ DESCRIPTOR._options = None -_DATAPROXY = _descriptor.ServiceDescriptor( - name='DataProxy', - full_name='flyteidl.service.DataProxy', +_DATAPROXYSERVICE = _descriptor.ServiceDescriptor( + name='DataProxyService', + full_name='flyteidl.service.DataProxyService', file=DESCRIPTOR, index=0, serialized_options=None, serialized_start=443, - serialized_end=697, + serialized_end=704, methods=[ _descriptor.MethodDescriptor( name='CreateUploadLocation', - full_name='flyteidl.service.DataProxy.CreateUploadLocation', + full_name='flyteidl.service.DataProxyService.CreateUploadLocation', index=0, containing_service=None, input_type=_CREATEUPLOADLOCATIONREQUEST, @@ -169,8 +169,8 @@ serialized_options=_b('\202\323\344\223\002#\"\036/api/v1/dataproxy/artifact_urn:\001*\222AM\032KCreates a write-only http location that is accessible for tasks at runtime.'), ), ]) -_sym_db.RegisterServiceDescriptor(_DATAPROXY) +_sym_db.RegisterServiceDescriptor(_DATAPROXYSERVICE) -DESCRIPTOR.services_by_name['DataProxy'] = _DATAPROXY +DESCRIPTOR.services_by_name['DataProxyService'] = _DATAPROXYSERVICE # @@protoc_insertion_point(module_scope) diff --git a/flyteidl/gen/pb_python/flyteidl/service/dataproxy_pb2_grpc.py b/flyteidl/gen/pb_python/flyteidl/service/dataproxy_pb2_grpc.py index 0294a071f..591abab62 100644 --- a/flyteidl/gen/pb_python/flyteidl/service/dataproxy_pb2_grpc.py +++ b/flyteidl/gen/pb_python/flyteidl/service/dataproxy_pb2_grpc.py @@ -4,8 +4,8 @@ from flyteidl.service import dataproxy_pb2 as flyteidl_dot_service_dot_dataproxy__pb2 -class DataProxyStub(object): - """DataProxy defines an RPC Service that allows access to user-data in a controlled manner. +class DataProxyServiceStub(object): + """DataProxyService defines an RPC Service that allows access to user-data in a controlled manner. """ def __init__(self, channel): @@ -15,25 +15,25 @@ def __init__(self, channel): channel: A grpc.Channel. """ self.CreateUploadLocation = channel.unary_unary( - '/flyteidl.service.DataProxy/CreateUploadLocation', + '/flyteidl.service.DataProxyService/CreateUploadLocation', request_serializer=flyteidl_dot_service_dot_dataproxy__pb2.CreateUploadLocationRequest.SerializeToString, response_deserializer=flyteidl_dot_service_dot_dataproxy__pb2.CreateUploadLocationResponse.FromString, ) -class DataProxyServicer(object): - """DataProxy defines an RPC Service that allows access to user-data in a controlled manner. +class DataProxyServiceServicer(object): + """DataProxyService defines an RPC Service that allows access to user-data in a controlled manner. """ def CreateUploadLocation(self, request, context): - """Retrieves user information about the currently logged in user. + """CreateUploadLocation creates a signed url to upload artifacts to for a given project/domain. """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') -def add_DataProxyServicer_to_server(servicer, server): +def add_DataProxyServiceServicer_to_server(servicer, server): rpc_method_handlers = { 'CreateUploadLocation': grpc.unary_unary_rpc_method_handler( servicer.CreateUploadLocation, @@ -42,5 +42,5 @@ def add_DataProxyServicer_to_server(servicer, server): ), } generic_handler = grpc.method_handlers_generic_handler( - 'flyteidl.service.DataProxy', rpc_method_handlers) + 'flyteidl.service.DataProxyService', rpc_method_handlers) server.add_generic_rpc_handlers((generic_handler,)) diff --git a/flyteidl/protos/docs/admin/admin.rst b/flyteidl/protos/docs/admin/admin.rst deleted file mode 100644 index a6200604a..000000000 --- a/flyteidl/protos/docs/admin/admin.rst +++ /dev/null @@ -1,3742 +0,0 @@ -###################### -Protocol Documentation -###################### - - - - -.. _ref_flyteidl/admin/cluster_assignment.proto: - -flyteidl/admin/cluster_assignment.proto -================================================================== - - - - - -.. _ref_flyteidl.admin.Affinity: - -Affinity ------------------------------------------------------------------- - -Defines a set of constraints used to select eligible objects based on labels they possess. - - - -.. csv-table:: Affinity type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "selectors", ":ref:`ref_flyteidl.admin.Selector`", "repeated", "Multiples selectors are 'and'-ed together to produce the list of matching, eligible objects." - - - - - - - -.. _ref_flyteidl.admin.ClusterAssignment: - -ClusterAssignment ------------------------------------------------------------------- - -Encapsulates specifications for routing an execution onto a specific cluster. - - - -.. csv-table:: ClusterAssignment type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "affinity", ":ref:`ref_flyteidl.admin.Affinity`", "", "" - - - - - - - -.. _ref_flyteidl.admin.Selector: - -Selector ------------------------------------------------------------------- - -A Selector is a specification for identifying a set of objects with corresponding labels. - - - -.. csv-table:: Selector type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "key", ":ref:`ref_string`", "", "The label key." - "value", ":ref:`ref_string`", "repeated", "One or more values used to match labels. For equality (or inequality) requirements, values must contain a single element. For set-based requirements, values may contain one or more elements." - "operator", ":ref:`ref_flyteidl.admin.Selector.Operator`", "", "" - - - - - - - - - -.. _ref_flyteidl.admin.Selector.Operator: - -Selector.Operator ------------------------------------------------------------------- - -Defines how a label with a corresponding key and value is selected or excluded. - -.. csv-table:: Enum Selector.Operator values - :header: "Name", "Number", "Description" - :widths: auto - - "EQUALS", "0", "" - "NOT_EQUALS", "1", "" - "IN", "2", "" - "NOT_IN", "3", "" - "EXISTS", "4", "A label key with any value" - - - - - - - - - - -.. _ref_flyteidl/admin/common.proto: - -flyteidl/admin/common.proto -================================================================== - - - - - -.. _ref_flyteidl.admin.Annotations: - -Annotations ------------------------------------------------------------------- - -Annotation values to be applied to an execution resource. -In the future a mode (e.g. OVERRIDE, APPEND, etc) can be defined -to specify how to merge annotations defined at registration and execution time. - - - -.. csv-table:: Annotations type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "values", ":ref:`ref_flyteidl.admin.Annotations.ValuesEntry`", "repeated", "Map of custom annotations to be applied to the execution resource." - - - - - - - -.. _ref_flyteidl.admin.Annotations.ValuesEntry: - -Annotations.ValuesEntry ------------------------------------------------------------------- - - - - - -.. csv-table:: Annotations.ValuesEntry type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "key", ":ref:`ref_string`", "", "" - "value", ":ref:`ref_string`", "", "" - - - - - - - -.. _ref_flyteidl.admin.AuthRole: - -AuthRole ------------------------------------------------------------------- - -Defines permissions associated with executions created by this launch plan spec. -Use either of these roles when they have permissions required by your workflow execution. -Deprecated. - - - -.. csv-table:: AuthRole type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "assumable_iam_role", ":ref:`ref_string`", "", "Defines an optional iam role which will be used for tasks run in executions created with this launch plan." - "kubernetes_service_account", ":ref:`ref_string`", "", "Defines an optional kubernetes service account which will be used for tasks run in executions created with this launch plan." - - - - - - - -.. _ref_flyteidl.admin.EmailNotification: - -EmailNotification ------------------------------------------------------------------- - -Defines an email notification specification. - - - -.. csv-table:: EmailNotification type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "recipients_email", ":ref:`ref_string`", "repeated", "The list of email addresses recipients for this notification. +required" - - - - - - - -.. _ref_flyteidl.admin.Labels: - -Labels ------------------------------------------------------------------- - -Label values to be applied to an execution resource. -In the future a mode (e.g. OVERRIDE, APPEND, etc) can be defined -to specify how to merge labels defined at registration and execution time. - - - -.. csv-table:: Labels type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "values", ":ref:`ref_flyteidl.admin.Labels.ValuesEntry`", "repeated", "Map of custom labels to be applied to the execution resource." - - - - - - - -.. _ref_flyteidl.admin.Labels.ValuesEntry: - -Labels.ValuesEntry ------------------------------------------------------------------- - - - - - -.. csv-table:: Labels.ValuesEntry type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "key", ":ref:`ref_string`", "", "" - "value", ":ref:`ref_string`", "", "" - - - - - - - -.. _ref_flyteidl.admin.NamedEntity: - -NamedEntity ------------------------------------------------------------------- - -Encapsulates information common to a NamedEntity, a Flyte resource such as a task, -workflow or launch plan. A NamedEntity is exclusively identified by its resource type -and identifier. - - - -.. csv-table:: NamedEntity type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "resource_type", ":ref:`ref_flyteidl.core.ResourceType`", "", "Resource type of the named entity. One of Task, Workflow or LaunchPlan." - "id", ":ref:`ref_flyteidl.admin.NamedEntityIdentifier`", "", "" - "metadata", ":ref:`ref_flyteidl.admin.NamedEntityMetadata`", "", "Additional metadata around a named entity." - - - - - - - -.. _ref_flyteidl.admin.NamedEntityGetRequest: - -NamedEntityGetRequest ------------------------------------------------------------------- - -A request to retrieve the metadata associated with a NamedEntityIdentifier - - - -.. csv-table:: NamedEntityGetRequest type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "resource_type", ":ref:`ref_flyteidl.core.ResourceType`", "", "Resource type of the metadata to get. One of Task, Workflow or LaunchPlan. +required" - "id", ":ref:`ref_flyteidl.admin.NamedEntityIdentifier`", "", "The identifier for the named entity for which to fetch metadata. +required" - - - - - - - -.. _ref_flyteidl.admin.NamedEntityIdentifier: - -NamedEntityIdentifier ------------------------------------------------------------------- - -Encapsulation of fields that identifies a Flyte resource. -A Flyte resource can be a task, workflow or launch plan. -A resource can internally have multiple versions and is uniquely identified -by project, domain, and name. - - - -.. csv-table:: NamedEntityIdentifier type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "project", ":ref:`ref_string`", "", "Name of the project the resource belongs to." - "domain", ":ref:`ref_string`", "", "Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project." - "name", ":ref:`ref_string`", "", "User provided value for the resource. The combination of project + domain + name uniquely identifies the resource. +optional - in certain contexts - like 'List API', 'Launch plans'" - - - - - - - -.. _ref_flyteidl.admin.NamedEntityIdentifierList: - -NamedEntityIdentifierList ------------------------------------------------------------------- - -Represents a list of NamedEntityIdentifiers. - - - -.. csv-table:: NamedEntityIdentifierList type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "entities", ":ref:`ref_flyteidl.admin.NamedEntityIdentifier`", "repeated", "A list of identifiers." - "token", ":ref:`ref_string`", "", "In the case of multiple pages of results, the server-provided token can be used to fetch the next page in a query. If there are no more results, this value will be empty." - - - - - - - -.. _ref_flyteidl.admin.NamedEntityIdentifierListRequest: - -NamedEntityIdentifierListRequest ------------------------------------------------------------------- - -Represents a request structure to list NamedEntityIdentifiers. - - - -.. csv-table:: NamedEntityIdentifierListRequest type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "project", ":ref:`ref_string`", "", "Name of the project that contains the identifiers. +required" - "domain", ":ref:`ref_string`", "", "Name of the domain the identifiers belongs to within the project. +required" - "limit", ":ref:`ref_uint32`", "", "Indicates the number of resources to be returned. +required" - "token", ":ref:`ref_string`", "", "In the case of multiple pages of results, the server-provided token can be used to fetch the next page in a query. +optional" - "sort_by", ":ref:`ref_flyteidl.admin.Sort`", "", "Specifies how listed entities should be sorted in the response. +optional" - "filters", ":ref:`ref_string`", "", "Indicates a list of filters passed as string. +optional" - - - - - - - -.. _ref_flyteidl.admin.NamedEntityList: - -NamedEntityList ------------------------------------------------------------------- - -Represents a list of NamedEntityIdentifiers. - - - -.. csv-table:: NamedEntityList type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "entities", ":ref:`ref_flyteidl.admin.NamedEntity`", "repeated", "A list of NamedEntity objects" - "token", ":ref:`ref_string`", "", "In the case of multiple pages of results, the server-provided token can be used to fetch the next page in a query. If there are no more results, this value will be empty." - - - - - - - -.. _ref_flyteidl.admin.NamedEntityListRequest: - -NamedEntityListRequest ------------------------------------------------------------------- - -Represents a request structure to list NamedEntity objects - - - -.. csv-table:: NamedEntityListRequest type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "resource_type", ":ref:`ref_flyteidl.core.ResourceType`", "", "Resource type of the metadata to query. One of Task, Workflow or LaunchPlan. +required" - "project", ":ref:`ref_string`", "", "Name of the project that contains the identifiers. +required" - "domain", ":ref:`ref_string`", "", "Name of the domain the identifiers belongs to within the project." - "limit", ":ref:`ref_uint32`", "", "Indicates the number of resources to be returned." - "token", ":ref:`ref_string`", "", "In the case of multiple pages of results, the server-provided token can be used to fetch the next page in a query. +optional" - "sort_by", ":ref:`ref_flyteidl.admin.Sort`", "", "Specifies how listed entities should be sorted in the response. +optional" - "filters", ":ref:`ref_string`", "", "Indicates a list of filters passed as string. +optional" - - - - - - - -.. _ref_flyteidl.admin.NamedEntityMetadata: - -NamedEntityMetadata ------------------------------------------------------------------- - -Additional metadata around a named entity. - - - -.. csv-table:: NamedEntityMetadata type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "description", ":ref:`ref_string`", "", "Common description across all versions of the entity +optional" - "state", ":ref:`ref_flyteidl.admin.NamedEntityState`", "", "Shared state across all version of the entity At this point in time, only workflow entities can have their state archived." - - - - - - - -.. _ref_flyteidl.admin.NamedEntityUpdateRequest: - -NamedEntityUpdateRequest ------------------------------------------------------------------- - -Request to set the referenced named entity state to the configured value. - - - -.. csv-table:: NamedEntityUpdateRequest type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "resource_type", ":ref:`ref_flyteidl.core.ResourceType`", "", "Resource type of the metadata to update +required" - "id", ":ref:`ref_flyteidl.admin.NamedEntityIdentifier`", "", "Identifier of the metadata to update +required" - "metadata", ":ref:`ref_flyteidl.admin.NamedEntityMetadata`", "", "Metadata object to set as the new value +required" - - - - - - - -.. _ref_flyteidl.admin.NamedEntityUpdateResponse: - -NamedEntityUpdateResponse ------------------------------------------------------------------- - -Purposefully empty, may be populated in the future. - - - - - - - - -.. _ref_flyteidl.admin.Notification: - -Notification ------------------------------------------------------------------- - -Represents a structure for notifications based on execution status. -The notification content is configured within flyte admin but can be templatized. -Future iterations could expose configuring notifications with custom content. - - - -.. csv-table:: Notification type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "phases", ":ref:`ref_flyteidl.core.WorkflowExecution.Phase`", "repeated", "A list of phases to which users can associate the notifications to. +required" - "email", ":ref:`ref_flyteidl.admin.EmailNotification`", "", "" - "pager_duty", ":ref:`ref_flyteidl.admin.PagerDutyNotification`", "", "" - "slack", ":ref:`ref_flyteidl.admin.SlackNotification`", "", "" - - - - - - - -.. _ref_flyteidl.admin.ObjectGetRequest: - -ObjectGetRequest ------------------------------------------------------------------- - -Shared request structure to fetch a single resource. -Resources include: Task, Workflow, LaunchPlan - - - -.. csv-table:: ObjectGetRequest type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "id", ":ref:`ref_flyteidl.core.Identifier`", "", "Indicates a unique version of resource. +required" - - - - - - - -.. _ref_flyteidl.admin.PagerDutyNotification: - -PagerDutyNotification ------------------------------------------------------------------- - -Defines a pager duty notification specification. - - - -.. csv-table:: PagerDutyNotification type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "recipients_email", ":ref:`ref_string`", "repeated", "Currently, PagerDuty notifications leverage email to trigger a notification. +required" - - - - - - - -.. _ref_flyteidl.admin.RawOutputDataConfig: - -RawOutputDataConfig ------------------------------------------------------------------- - -Encapsulates user settings pertaining to offloaded data (i.e. Blobs, Schema, query data, etc.). -See https://github.com/flyteorg/flyte/issues/211 for more background information. - - - -.. csv-table:: RawOutputDataConfig type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "output_location_prefix", ":ref:`ref_string`", "", "Prefix for where offloaded data from user workflows will be written e.g. s3://bucket/key or s3://bucket/" - - - - - - - -.. _ref_flyteidl.admin.ResourceListRequest: - -ResourceListRequest ------------------------------------------------------------------- - -Shared request structure to retrieve a list of resources. -Resources include: Task, Workflow, LaunchPlan - - - -.. csv-table:: ResourceListRequest type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "id", ":ref:`ref_flyteidl.admin.NamedEntityIdentifier`", "", "id represents the unique identifier of the resource. +required" - "limit", ":ref:`ref_uint32`", "", "Indicates the number of resources to be returned. +required" - "token", ":ref:`ref_string`", "", "In the case of multiple pages of results, this server-provided token can be used to fetch the next page in a query. +optional" - "filters", ":ref:`ref_string`", "", "Indicates a list of filters passed as string. More info on constructing filters : +optional" - "sort_by", ":ref:`ref_flyteidl.admin.Sort`", "", "Sort ordering. +optional" - - - - - - - -.. _ref_flyteidl.admin.SlackNotification: - -SlackNotification ------------------------------------------------------------------- - -Defines a slack notification specification. - - - -.. csv-table:: SlackNotification type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "recipients_email", ":ref:`ref_string`", "repeated", "Currently, Slack notifications leverage email to trigger a notification. +required" - - - - - - - -.. _ref_flyteidl.admin.Sort: - -Sort ------------------------------------------------------------------- - -Specifies sort ordering in a list request. - - - -.. csv-table:: Sort type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "key", ":ref:`ref_string`", "", "Indicates an attribute to sort the response values. +required" - "direction", ":ref:`ref_flyteidl.admin.Sort.Direction`", "", "Indicates the direction to apply sort key for response values. +optional" - - - - - - - -.. _ref_flyteidl.admin.UrlBlob: - -UrlBlob ------------------------------------------------------------------- - -Represents a string url and associated metadata used throughout the platform. - - - -.. csv-table:: UrlBlob type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "url", ":ref:`ref_string`", "", "Actual url value." - "bytes", ":ref:`ref_int64`", "", "Represents the size of the file accessible at the above url." - - - - - - - - - -.. _ref_flyteidl.admin.NamedEntityState: - -NamedEntityState ------------------------------------------------------------------- - -The status of the named entity is used to control its visibility in the UI. - -.. csv-table:: Enum NamedEntityState values - :header: "Name", "Number", "Description" - :widths: auto - - "NAMED_ENTITY_ACTIVE", "0", "By default, all named entities are considered active and under development." - "NAMED_ENTITY_ARCHIVED", "1", "Archived named entities are no longer visible in the UI." - "SYSTEM_GENERATED", "2", "System generated entities that aren't explicitly created or managed by a user." - - - -.. _ref_flyteidl.admin.Sort.Direction: - -Sort.Direction ------------------------------------------------------------------- - - - -.. csv-table:: Enum Sort.Direction values - :header: "Name", "Number", "Description" - :widths: auto - - "DESCENDING", "0", "By default, fields are sorted in descending order." - "ASCENDING", "1", "" - - - - - - - - - - -.. _ref_flyteidl/admin/event.proto: - -flyteidl/admin/event.proto -================================================================== - - - - - -.. _ref_flyteidl.admin.EventErrorAlreadyInTerminalState: - -EventErrorAlreadyInTerminalState ------------------------------------------------------------------- - -Indicates that a sent event was not used to update execution state due to -the referenced execution already being terminated (and therefore ineligible -for further state transitions). - - - -.. csv-table:: EventErrorAlreadyInTerminalState type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "current_phase", ":ref:`ref_string`", "", "+required" - - - - - - - -.. _ref_flyteidl.admin.EventErrorIncompatibleCluster: - -EventErrorIncompatibleCluster ------------------------------------------------------------------- - -Indicates an event was rejected because it came from a different cluster than -is on record as running the execution. - - - -.. csv-table:: EventErrorIncompatibleCluster type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "cluster", ":ref:`ref_string`", "", "The cluster which has been recorded as processing the execution. +required" - - - - - - - -.. _ref_flyteidl.admin.EventFailureReason: - -EventFailureReason ------------------------------------------------------------------- - -Indicates why a sent event was not used to update execution. - - - -.. csv-table:: EventFailureReason type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "already_in_terminal_state", ":ref:`ref_flyteidl.admin.EventErrorAlreadyInTerminalState`", "", "" - "incompatible_cluster", ":ref:`ref_flyteidl.admin.EventErrorIncompatibleCluster`", "", "" - - - - - - - -.. _ref_flyteidl.admin.NodeExecutionEventRequest: - -NodeExecutionEventRequest ------------------------------------------------------------------- - -Request to send a notification that a node execution event has occurred. - - - -.. csv-table:: NodeExecutionEventRequest type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "request_id", ":ref:`ref_string`", "", "Unique ID for this request that can be traced between services" - "event", ":ref:`ref_flyteidl.event.NodeExecutionEvent`", "", "Details about the event that occurred." - - - - - - - -.. _ref_flyteidl.admin.NodeExecutionEventResponse: - -NodeExecutionEventResponse ------------------------------------------------------------------- - -Purposefully empty, may be populated in the future. - - - - - - - - -.. _ref_flyteidl.admin.TaskExecutionEventRequest: - -TaskExecutionEventRequest ------------------------------------------------------------------- - -Request to send a notification that a task execution event has occurred. - - - -.. csv-table:: TaskExecutionEventRequest type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "request_id", ":ref:`ref_string`", "", "Unique ID for this request that can be traced between services" - "event", ":ref:`ref_flyteidl.event.TaskExecutionEvent`", "", "Details about the event that occurred." - - - - - - - -.. _ref_flyteidl.admin.TaskExecutionEventResponse: - -TaskExecutionEventResponse ------------------------------------------------------------------- - -Purposefully empty, may be populated in the future. - - - - - - - - -.. _ref_flyteidl.admin.WorkflowExecutionEventRequest: - -WorkflowExecutionEventRequest ------------------------------------------------------------------- - -Request to send a notification that a workflow execution event has occurred. - - - -.. csv-table:: WorkflowExecutionEventRequest type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "request_id", ":ref:`ref_string`", "", "Unique ID for this request that can be traced between services" - "event", ":ref:`ref_flyteidl.event.WorkflowExecutionEvent`", "", "Details about the event that occurred." - - - - - - - -.. _ref_flyteidl.admin.WorkflowExecutionEventResponse: - -WorkflowExecutionEventResponse ------------------------------------------------------------------- - -Purposefully empty, may be populated in the future. - - - - - - - - - - - - - - - - - -.. _ref_flyteidl/admin/execution.proto: - -flyteidl/admin/execution.proto -================================================================== - - - - - -.. _ref_flyteidl.admin.AbortMetadata: - -AbortMetadata ------------------------------------------------------------------- - -Specifies metadata around an aborted workflow execution. - - - -.. csv-table:: AbortMetadata type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "cause", ":ref:`ref_string`", "", "In the case of a user-specified abort, this will pass along the user-supplied cause." - "principal", ":ref:`ref_string`", "", "Identifies the entity (if any) responsible for terminating the execution" - - - - - - - -.. _ref_flyteidl.admin.Execution: - -Execution ------------------------------------------------------------------- - -A workflow execution represents an instantiated workflow, including all inputs and additional -metadata as well as computed results included state, outputs, and duration-based attributes. -Used as a response object used in Get and List execution requests. - - - -.. csv-table:: Execution type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "id", ":ref:`ref_flyteidl.core.WorkflowExecutionIdentifier`", "", "Unique identifier of the workflow execution." - "spec", ":ref:`ref_flyteidl.admin.ExecutionSpec`", "", "User-provided configuration and inputs for launching the execution." - "closure", ":ref:`ref_flyteidl.admin.ExecutionClosure`", "", "Execution results." - - - - - - - -.. _ref_flyteidl.admin.ExecutionClosure: - -ExecutionClosure ------------------------------------------------------------------- - -Encapsulates the results of the Execution - - - -.. csv-table:: ExecutionClosure type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "outputs", ":ref:`ref_flyteidl.admin.LiteralMapBlob`", "", "**Deprecated.** Output URI in the case of a successful execution. DEPRECATED. Use GetExecutionData to fetch output data instead." - "error", ":ref:`ref_flyteidl.core.ExecutionError`", "", "Error information in the case of a failed execution." - "abort_cause", ":ref:`ref_string`", "", "**Deprecated.** In the case of a user-specified abort, this will pass along the user-supplied cause." - "abort_metadata", ":ref:`ref_flyteidl.admin.AbortMetadata`", "", "In the case of a user-specified abort, this will pass along the user and their supplied cause." - "output_data", ":ref:`ref_flyteidl.core.LiteralMap`", "", "**Deprecated.** Raw output data produced by this execution. DEPRECATED. Use GetExecutionData to fetch output data instead." - "computed_inputs", ":ref:`ref_flyteidl.core.LiteralMap`", "", "**Deprecated.** Inputs computed and passed for execution. computed_inputs depends on inputs in ExecutionSpec, fixed and default inputs in launch plan" - "phase", ":ref:`ref_flyteidl.core.WorkflowExecution.Phase`", "", "Most recent recorded phase for the execution." - "started_at", ":ref:`ref_google.protobuf.Timestamp`", "", "Reported time at which the execution began running." - "duration", ":ref:`ref_google.protobuf.Duration`", "", "The amount of time the execution spent running." - "created_at", ":ref:`ref_google.protobuf.Timestamp`", "", "Reported time at which the execution was created." - "updated_at", ":ref:`ref_google.protobuf.Timestamp`", "", "Reported time at which the execution was last updated." - "notifications", ":ref:`ref_flyteidl.admin.Notification`", "repeated", "The notification settings to use after merging the CreateExecutionRequest and the launch plan notification settings. An execution launched with notifications will always prefer that definition to notifications defined statically in a launch plan." - "workflow_id", ":ref:`ref_flyteidl.core.Identifier`", "", "Identifies the workflow definition for this execution." - "state_change_details", ":ref:`ref_flyteidl.admin.ExecutionStateChangeDetails`", "", "Provides the details of the last stage change" - - - - - - - -.. _ref_flyteidl.admin.ExecutionCreateRequest: - -ExecutionCreateRequest ------------------------------------------------------------------- - -Request to launch an execution with the given project, domain and optionally-assigned name. - - - -.. csv-table:: ExecutionCreateRequest type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "project", ":ref:`ref_string`", "", "Name of the project the execution belongs to. +required" - "domain", ":ref:`ref_string`", "", "Name of the domain the execution belongs to. A domain can be considered as a subset within a specific project. +required" - "name", ":ref:`ref_string`", "", "User provided value for the resource. If none is provided the system will generate a unique string. +optional" - "spec", ":ref:`ref_flyteidl.admin.ExecutionSpec`", "", "Additional fields necessary to launch the execution. +optional" - "inputs", ":ref:`ref_flyteidl.core.LiteralMap`", "", "The inputs required to start the execution. All required inputs must be included in this map. If not required and not provided, defaults apply. +optional" - - - - - - - -.. _ref_flyteidl.admin.ExecutionCreateResponse: - -ExecutionCreateResponse ------------------------------------------------------------------- - -The unique identifier for a successfully created execution. -If the name was *not* specified in the create request, this identifier will include a generated name. - - - -.. csv-table:: ExecutionCreateResponse type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "id", ":ref:`ref_flyteidl.core.WorkflowExecutionIdentifier`", "", "" - - - - - - - -.. _ref_flyteidl.admin.ExecutionList: - -ExecutionList ------------------------------------------------------------------- - -Used as a response for request to list executions. -See :ref:`ref_flyteidl.admin.Execution` for more details - - - -.. csv-table:: ExecutionList type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "executions", ":ref:`ref_flyteidl.admin.Execution`", "repeated", "" - "token", ":ref:`ref_string`", "", "In the case of multiple pages of results, the server-provided token can be used to fetch the next page in a query. If there are no more results, this value will be empty." - - - - - - - -.. _ref_flyteidl.admin.ExecutionMetadata: - -ExecutionMetadata ------------------------------------------------------------------- - -Represents attributes about an execution which are not required to launch the execution but are useful to record. -These attributes are assigned at launch time and do not change. - - - -.. csv-table:: ExecutionMetadata type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "mode", ":ref:`ref_flyteidl.admin.ExecutionMetadata.ExecutionMode`", "", "" - "principal", ":ref:`ref_string`", "", "Identifier of the entity that triggered this execution. For systems using back-end authentication any value set here will be discarded in favor of the authenticated user context." - "nesting", ":ref:`ref_uint32`", "", "Indicates the nestedness of this execution. If a user launches a workflow execution, the default nesting is 0. If this execution further launches a workflow (child workflow), the nesting level is incremented by 0 => 1 Generally, if workflow at nesting level k launches a workflow then the child workflow will have nesting = k + 1." - "scheduled_at", ":ref:`ref_google.protobuf.Timestamp`", "", "For scheduled executions, the requested time for execution for this specific schedule invocation." - "parent_node_execution", ":ref:`ref_flyteidl.core.NodeExecutionIdentifier`", "", "Which subworkflow node (if any) launched this execution" - "reference_execution", ":ref:`ref_flyteidl.core.WorkflowExecutionIdentifier`", "", "Optional, a reference workflow execution related to this execution. In the case of a relaunch, this references the original workflow execution." - "system_metadata", ":ref:`ref_flyteidl.admin.SystemMetadata`", "", "Optional, platform-specific metadata about the execution. In this the future this may be gated behind an ACL or some sort of authorization." - - - - - - - -.. _ref_flyteidl.admin.ExecutionRecoverRequest: - -ExecutionRecoverRequest ------------------------------------------------------------------- - -Request to recover the referenced execution. - - - -.. csv-table:: ExecutionRecoverRequest type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "id", ":ref:`ref_flyteidl.core.WorkflowExecutionIdentifier`", "", "Identifier of the workflow execution to recover." - "name", ":ref:`ref_string`", "", "User provided value for the recovered execution. If none is provided the system will generate a unique string. +optional" - "metadata", ":ref:`ref_flyteidl.admin.ExecutionMetadata`", "", "Additional metadata which will be used to overwrite any metadata in the reference execution when triggering a recovery execution." - - - - - - - -.. _ref_flyteidl.admin.ExecutionRelaunchRequest: - -ExecutionRelaunchRequest ------------------------------------------------------------------- - -Request to relaunch the referenced execution. - - - -.. csv-table:: ExecutionRelaunchRequest type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "id", ":ref:`ref_flyteidl.core.WorkflowExecutionIdentifier`", "", "Identifier of the workflow execution to relaunch. +required" - "name", ":ref:`ref_string`", "", "User provided value for the relaunched execution. If none is provided the system will generate a unique string. +optional" - - - - - - - -.. _ref_flyteidl.admin.ExecutionSpec: - -ExecutionSpec ------------------------------------------------------------------- - -An ExecutionSpec encompasses all data used to launch this execution. The Spec does not change over the lifetime -of an execution as it progresses across phase changes. - - - -.. csv-table:: ExecutionSpec type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "launch_plan", ":ref:`ref_flyteidl.core.Identifier`", "", "Launch plan to be executed" - "inputs", ":ref:`ref_flyteidl.core.LiteralMap`", "", "**Deprecated.** Input values to be passed for the execution" - "metadata", ":ref:`ref_flyteidl.admin.ExecutionMetadata`", "", "Metadata for the execution" - "notifications", ":ref:`ref_flyteidl.admin.NotificationList`", "", "List of notifications based on Execution status transitions When this list is not empty it is used rather than any notifications defined in the referenced launch plan. When this list is empty, the notifications defined for the launch plan will be applied." - "disable_all", ":ref:`ref_bool`", "", "This should be set to true if all notifications are intended to be disabled for this execution." - "labels", ":ref:`ref_flyteidl.admin.Labels`", "", "Labels to apply to the execution resource." - "annotations", ":ref:`ref_flyteidl.admin.Annotations`", "", "Annotations to apply to the execution resource." - "security_context", ":ref:`ref_flyteidl.core.SecurityContext`", "", "Optional: security context override to apply this execution." - "auth_role", ":ref:`ref_flyteidl.admin.AuthRole`", "", "**Deprecated.** Optional: auth override to apply this execution." - "quality_of_service", ":ref:`ref_flyteidl.core.QualityOfService`", "", "Indicates the runtime priority of the execution." - "max_parallelism", ":ref:`ref_int32`", "", "Controls the maximum number of task nodes that can be run in parallel for the entire workflow. This is useful to achieve fairness. Note: MapTasks are regarded as one unit, and parallelism/concurrency of MapTasks is independent from this." - "raw_output_data_config", ":ref:`ref_flyteidl.admin.RawOutputDataConfig`", "", "User setting to configure where to store offloaded data (i.e. Blobs, structured datasets, query data, etc.). This should be a prefix like s3://my-bucket/my-data" - "cluster_assignment", ":ref:`ref_flyteidl.admin.ClusterAssignment`", "", "Controls how to select an available cluster on which this execution should run." - - - - - - - -.. _ref_flyteidl.admin.ExecutionStateChangeDetails: - -ExecutionStateChangeDetails ------------------------------------------------------------------- - - - - - -.. csv-table:: ExecutionStateChangeDetails type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "state", ":ref:`ref_flyteidl.admin.ExecutionState`", "", "The state of the execution is used to control its visibility in the UI/CLI." - "occurred_at", ":ref:`ref_google.protobuf.Timestamp`", "", "This timestamp represents when the state changed." - "principal", ":ref:`ref_string`", "", "Identifies the entity (if any) responsible for causing the state change of the execution" - - - - - - - -.. _ref_flyteidl.admin.ExecutionTerminateRequest: - -ExecutionTerminateRequest ------------------------------------------------------------------- - -Request to terminate an in-progress execution. This action is irreversible. -If an execution is already terminated, this request will simply be a no-op. -This request will fail if it references a non-existent execution. -If the request succeeds the phase "ABORTED" will be recorded for the termination -with the optional cause added to the output_result. - - - -.. csv-table:: ExecutionTerminateRequest type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "id", ":ref:`ref_flyteidl.core.WorkflowExecutionIdentifier`", "", "Uniquely identifies the individual workflow execution to be terminated." - "cause", ":ref:`ref_string`", "", "Optional reason for aborting." - - - - - - - -.. _ref_flyteidl.admin.ExecutionTerminateResponse: - -ExecutionTerminateResponse ------------------------------------------------------------------- - -Purposefully empty, may be populated in the future. - - - - - - - - -.. _ref_flyteidl.admin.ExecutionUpdateRequest: - -ExecutionUpdateRequest ------------------------------------------------------------------- - - - - - -.. csv-table:: ExecutionUpdateRequest type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "id", ":ref:`ref_flyteidl.core.WorkflowExecutionIdentifier`", "", "Identifier of the execution to update" - "state", ":ref:`ref_flyteidl.admin.ExecutionState`", "", "State to set as the new value active/archive" - - - - - - - -.. _ref_flyteidl.admin.ExecutionUpdateResponse: - -ExecutionUpdateResponse ------------------------------------------------------------------- - - - - - - - - - - -.. _ref_flyteidl.admin.LiteralMapBlob: - -LiteralMapBlob ------------------------------------------------------------------- - -Input/output data can represented by actual values or a link to where values are stored - - - -.. csv-table:: LiteralMapBlob type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "values", ":ref:`ref_flyteidl.core.LiteralMap`", "", "**Deprecated.** Data in LiteralMap format" - "uri", ":ref:`ref_string`", "", "In the event that the map is too large, we return a uri to the data" - - - - - - - -.. _ref_flyteidl.admin.NotificationList: - -NotificationList ------------------------------------------------------------------- - - - - - -.. csv-table:: NotificationList type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "notifications", ":ref:`ref_flyteidl.admin.Notification`", "repeated", "" - - - - - - - -.. _ref_flyteidl.admin.SystemMetadata: - -SystemMetadata ------------------------------------------------------------------- - -Represents system, rather than user-facing, metadata about an execution. - - - -.. csv-table:: SystemMetadata type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "execution_cluster", ":ref:`ref_string`", "", "Which execution cluster this execution ran on." - - - - - - - -.. _ref_flyteidl.admin.WorkflowExecutionGetDataRequest: - -WorkflowExecutionGetDataRequest ------------------------------------------------------------------- - -Request structure to fetch inputs, output and other data produced by an execution. -By default this data is not returned inline in :ref:`ref_flyteidl.admin.WorkflowExecutionGetRequest` - - - -.. csv-table:: WorkflowExecutionGetDataRequest type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "id", ":ref:`ref_flyteidl.core.WorkflowExecutionIdentifier`", "", "The identifier of the execution for which to fetch inputs and outputs." - - - - - - - -.. _ref_flyteidl.admin.WorkflowExecutionGetDataResponse: - -WorkflowExecutionGetDataResponse ------------------------------------------------------------------- - -Response structure for WorkflowExecutionGetDataRequest which contains inputs and outputs for an execution. - - - -.. csv-table:: WorkflowExecutionGetDataResponse type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "outputs", ":ref:`ref_flyteidl.admin.UrlBlob`", "", "**Deprecated.** Signed url to fetch a core.LiteralMap of execution outputs. Deprecated: Please use full_outputs instead." - "inputs", ":ref:`ref_flyteidl.admin.UrlBlob`", "", "**Deprecated.** Signed url to fetch a core.LiteralMap of execution inputs. Deprecated: Please use full_inputs instead." - "full_inputs", ":ref:`ref_flyteidl.core.LiteralMap`", "", "Full_inputs will only be populated if they are under a configured size threshold." - "full_outputs", ":ref:`ref_flyteidl.core.LiteralMap`", "", "Full_outputs will only be populated if they are under a configured size threshold." - - - - - - - -.. _ref_flyteidl.admin.WorkflowExecutionGetRequest: - -WorkflowExecutionGetRequest ------------------------------------------------------------------- - -A message used to fetch a single workflow execution entity. -See :ref:`ref_flyteidl.admin.Execution` for more details - - - -.. csv-table:: WorkflowExecutionGetRequest type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "id", ":ref:`ref_flyteidl.core.WorkflowExecutionIdentifier`", "", "Uniquely identifies an individual workflow execution." - - - - - - - - - -.. _ref_flyteidl.admin.ExecutionMetadata.ExecutionMode: - -ExecutionMetadata.ExecutionMode ------------------------------------------------------------------- - -The method by which this execution was launched. - -.. csv-table:: Enum ExecutionMetadata.ExecutionMode values - :header: "Name", "Number", "Description" - :widths: auto - - "MANUAL", "0", "The default execution mode, MANUAL implies that an execution was launched by an individual." - "SCHEDULED", "1", "A schedule triggered this execution launch." - "SYSTEM", "2", "A system process was responsible for launching this execution rather an individual." - "RELAUNCH", "3", "This execution was launched with identical inputs as a previous execution." - "CHILD_WORKFLOW", "4", "This execution was triggered by another execution." - "RECOVERED", "5", "This execution was recovered from another execution." - - - -.. _ref_flyteidl.admin.ExecutionState: - -ExecutionState ------------------------------------------------------------------- - -The state of the execution is used to control its visibility in the UI/CLI. - -.. csv-table:: Enum ExecutionState values - :header: "Name", "Number", "Description" - :widths: auto - - "EXECUTION_ACTIVE", "0", "By default, all executions are considered active." - "EXECUTION_ARCHIVED", "1", "Archived executions are no longer visible in the UI." - - - - - - - - - - -.. _ref_flyteidl/admin/launch_plan.proto: - -flyteidl/admin/launch_plan.proto -================================================================== - - - - - -.. _ref_flyteidl.admin.ActiveLaunchPlanListRequest: - -ActiveLaunchPlanListRequest ------------------------------------------------------------------- - -Represents a request structure to list active launch plans within a project/domain. -See :ref:`ref_flyteidl.admin.LaunchPlan` for more details - - - -.. csv-table:: ActiveLaunchPlanListRequest type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "project", ":ref:`ref_string`", "", "Name of the project that contains the identifiers. +required." - "domain", ":ref:`ref_string`", "", "Name of the domain the identifiers belongs to within the project. +required." - "limit", ":ref:`ref_uint32`", "", "Indicates the number of resources to be returned. +required." - "token", ":ref:`ref_string`", "", "In the case of multiple pages of results, the server-provided token can be used to fetch the next page in a query. +optional" - "sort_by", ":ref:`ref_flyteidl.admin.Sort`", "", "Sort ordering. +optional" - - - - - - - -.. _ref_flyteidl.admin.ActiveLaunchPlanRequest: - -ActiveLaunchPlanRequest ------------------------------------------------------------------- - -Represents a request struct for finding an active launch plan for a given NamedEntityIdentifier -See :ref:`ref_flyteidl.admin.LaunchPlan` for more details - - - -.. csv-table:: ActiveLaunchPlanRequest type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "id", ":ref:`ref_flyteidl.admin.NamedEntityIdentifier`", "", "+required." - - - - - - - -.. _ref_flyteidl.admin.Auth: - -Auth ------------------------------------------------------------------- - -Defines permissions associated with executions created by this launch plan spec. -Use either of these roles when they have permissions required by your workflow execution. -Deprecated. - - - -.. csv-table:: Auth type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "assumable_iam_role", ":ref:`ref_string`", "", "Defines an optional iam role which will be used for tasks run in executions created with this launch plan." - "kubernetes_service_account", ":ref:`ref_string`", "", "Defines an optional kubernetes service account which will be used for tasks run in executions created with this launch plan." - - - - - - - -.. _ref_flyteidl.admin.LaunchPlan: - -LaunchPlan ------------------------------------------------------------------- - -A LaunchPlan provides the capability to templatize workflow executions. -Launch plans simplify associating one or more schedules, inputs and notifications with your workflows. -Launch plans can be shared and used to trigger executions with predefined inputs even when a workflow -definition doesn't necessarily have a default value for said input. - - - -.. csv-table:: LaunchPlan type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "id", ":ref:`ref_flyteidl.core.Identifier`", "", "Uniquely identifies a launch plan entity." - "spec", ":ref:`ref_flyteidl.admin.LaunchPlanSpec`", "", "User-provided launch plan details, including reference workflow, inputs and other metadata." - "closure", ":ref:`ref_flyteidl.admin.LaunchPlanClosure`", "", "Values computed by the flyte platform after launch plan registration." - - - - - - - -.. _ref_flyteidl.admin.LaunchPlanClosure: - -LaunchPlanClosure ------------------------------------------------------------------- - -Values computed by the flyte platform after launch plan registration. -These include expected_inputs required to be present in a CreateExecutionRequest -to launch the reference workflow as well timestamp values associated with the launch plan. - - - -.. csv-table:: LaunchPlanClosure type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "state", ":ref:`ref_flyteidl.admin.LaunchPlanState`", "", "Indicate the Launch plan state." - "expected_inputs", ":ref:`ref_flyteidl.core.ParameterMap`", "", "Indicates the set of inputs expected when creating an execution with the Launch plan" - "expected_outputs", ":ref:`ref_flyteidl.core.VariableMap`", "", "Indicates the set of outputs expected to be produced by creating an execution with the Launch plan" - "created_at", ":ref:`ref_google.protobuf.Timestamp`", "", "Time at which the launch plan was created." - "updated_at", ":ref:`ref_google.protobuf.Timestamp`", "", "Time at which the launch plan was last updated." - - - - - - - -.. _ref_flyteidl.admin.LaunchPlanCreateRequest: - -LaunchPlanCreateRequest ------------------------------------------------------------------- - -Request to register a launch plan. The included LaunchPlanSpec may have a complete or incomplete set of inputs required -to launch a workflow execution. By default all launch plans are registered in state INACTIVE. If you wish to -set the state to ACTIVE, you must submit a LaunchPlanUpdateRequest, after you have successfully created a launch plan. - - - -.. csv-table:: LaunchPlanCreateRequest type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "id", ":ref:`ref_flyteidl.core.Identifier`", "", "Uniquely identifies a launch plan entity." - "spec", ":ref:`ref_flyteidl.admin.LaunchPlanSpec`", "", "User-provided launch plan details, including reference workflow, inputs and other metadata." - - - - - - - -.. _ref_flyteidl.admin.LaunchPlanCreateResponse: - -LaunchPlanCreateResponse ------------------------------------------------------------------- - -Purposefully empty, may be populated in the future. - - - - - - - - -.. _ref_flyteidl.admin.LaunchPlanList: - -LaunchPlanList ------------------------------------------------------------------- - -Response object for list launch plan requests. -See :ref:`ref_flyteidl.admin.LaunchPlan` for more details - - - -.. csv-table:: LaunchPlanList type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "launch_plans", ":ref:`ref_flyteidl.admin.LaunchPlan`", "repeated", "" - "token", ":ref:`ref_string`", "", "In the case of multiple pages of results, the server-provided token can be used to fetch the next page in a query. If there are no more results, this value will be empty." - - - - - - - -.. _ref_flyteidl.admin.LaunchPlanMetadata: - -LaunchPlanMetadata ------------------------------------------------------------------- - -Additional launch plan attributes included in the LaunchPlanSpec not strictly required to launch -the reference workflow. - - - -.. csv-table:: LaunchPlanMetadata type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "schedule", ":ref:`ref_flyteidl.admin.Schedule`", "", "Schedule to execute the Launch Plan" - "notifications", ":ref:`ref_flyteidl.admin.Notification`", "repeated", "List of notifications based on Execution status transitions" - - - - - - - -.. _ref_flyteidl.admin.LaunchPlanSpec: - -LaunchPlanSpec ------------------------------------------------------------------- - -User-provided launch plan definition and configuration values. - - - -.. csv-table:: LaunchPlanSpec type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "workflow_id", ":ref:`ref_flyteidl.core.Identifier`", "", "Reference to the Workflow template that the launch plan references" - "entity_metadata", ":ref:`ref_flyteidl.admin.LaunchPlanMetadata`", "", "Metadata for the Launch Plan" - "default_inputs", ":ref:`ref_flyteidl.core.ParameterMap`", "", "Input values to be passed for the execution. These can be overriden when an execution is created with this launch plan." - "fixed_inputs", ":ref:`ref_flyteidl.core.LiteralMap`", "", "Fixed, non-overridable inputs for the Launch Plan. These can not be overriden when an execution is created with this launch plan." - "role", ":ref:`ref_string`", "", "**Deprecated.** String to indicate the role to use to execute the workflow underneath" - "labels", ":ref:`ref_flyteidl.admin.Labels`", "", "Custom labels to be applied to the execution resource." - "annotations", ":ref:`ref_flyteidl.admin.Annotations`", "", "Custom annotations to be applied to the execution resource." - "auth", ":ref:`ref_flyteidl.admin.Auth`", "", "**Deprecated.** Indicates the permission associated with workflow executions triggered with this launch plan." - "auth_role", ":ref:`ref_flyteidl.admin.AuthRole`", "", "**Deprecated.** " - "security_context", ":ref:`ref_flyteidl.core.SecurityContext`", "", "Indicates security context for permissions triggered with this launch plan" - "quality_of_service", ":ref:`ref_flyteidl.core.QualityOfService`", "", "Indicates the runtime priority of the execution." - "raw_output_data_config", ":ref:`ref_flyteidl.admin.RawOutputDataConfig`", "", "Encapsulates user settings pertaining to offloaded data (i.e. Blobs, Schema, query data, etc.)." - "max_parallelism", ":ref:`ref_int32`", "", "Controls the maximum number of tasknodes that can be run in parallel for the entire workflow. This is useful to achieve fairness. Note: MapTasks are regarded as one unit, and parallelism/concurrency of MapTasks is independent from this." - - - - - - - -.. _ref_flyteidl.admin.LaunchPlanUpdateRequest: - -LaunchPlanUpdateRequest ------------------------------------------------------------------- - -Request to set the referenced launch plan state to the configured value. -See :ref:`ref_flyteidl.admin.LaunchPlan` for more details - - - -.. csv-table:: LaunchPlanUpdateRequest type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "id", ":ref:`ref_flyteidl.core.Identifier`", "", "Identifier of launch plan for which to change state. +required." - "state", ":ref:`ref_flyteidl.admin.LaunchPlanState`", "", "Desired state to apply to the launch plan. +required." - - - - - - - -.. _ref_flyteidl.admin.LaunchPlanUpdateResponse: - -LaunchPlanUpdateResponse ------------------------------------------------------------------- - -Purposefully empty, may be populated in the future. - - - - - - - - - - -.. _ref_flyteidl.admin.LaunchPlanState: - -LaunchPlanState ------------------------------------------------------------------- - -By default any launch plan regardless of state can be used to launch a workflow execution. -However, at most one version of a launch plan -(e.g. a NamedEntityIdentifier set of shared project, domain and name values) can be -active at a time in regards to *schedules*. That is, at most one schedule in a NamedEntityIdentifier -group will be observed and trigger executions at a defined cadence. - -.. csv-table:: Enum LaunchPlanState values - :header: "Name", "Number", "Description" - :widths: auto - - "INACTIVE", "0", "" - "ACTIVE", "1", "" - - - - - - - - - - -.. _ref_flyteidl/admin/matchable_resource.proto: - -flyteidl/admin/matchable_resource.proto -================================================================== - - - - - -.. _ref_flyteidl.admin.ClusterResourceAttributes: - -ClusterResourceAttributes ------------------------------------------------------------------- - - - - - -.. csv-table:: ClusterResourceAttributes type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "attributes", ":ref:`ref_flyteidl.admin.ClusterResourceAttributes.AttributesEntry`", "repeated", "Custom resource attributes which will be applied in cluster resource creation (e.g. quotas). Map keys are the *case-sensitive* names of variables in templatized resource files. Map values should be the custom values which get substituted during resource creation." - - - - - - - -.. _ref_flyteidl.admin.ClusterResourceAttributes.AttributesEntry: - -ClusterResourceAttributes.AttributesEntry ------------------------------------------------------------------- - - - - - -.. csv-table:: ClusterResourceAttributes.AttributesEntry type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "key", ":ref:`ref_string`", "", "" - "value", ":ref:`ref_string`", "", "" - - - - - - - -.. _ref_flyteidl.admin.ExecutionClusterLabel: - -ExecutionClusterLabel ------------------------------------------------------------------- - - - - - -.. csv-table:: ExecutionClusterLabel type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "value", ":ref:`ref_string`", "", "Label value to determine where the execution will be run" - - - - - - - -.. _ref_flyteidl.admin.ExecutionQueueAttributes: - -ExecutionQueueAttributes ------------------------------------------------------------------- - - - - - -.. csv-table:: ExecutionQueueAttributes type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "tags", ":ref:`ref_string`", "repeated", "Tags used for assigning execution queues for tasks defined within this project." - - - - - - - -.. _ref_flyteidl.admin.ListMatchableAttributesRequest: - -ListMatchableAttributesRequest ------------------------------------------------------------------- - -Request all matching resource attributes for a resource type. -See :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for more details - - - -.. csv-table:: ListMatchableAttributesRequest type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "resource_type", ":ref:`ref_flyteidl.admin.MatchableResource`", "", "+required" - - - - - - - -.. _ref_flyteidl.admin.ListMatchableAttributesResponse: - -ListMatchableAttributesResponse ------------------------------------------------------------------- - -Response for a request for all matching resource attributes for a resource type. -See :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for more details - - - -.. csv-table:: ListMatchableAttributesResponse type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "configurations", ":ref:`ref_flyteidl.admin.MatchableAttributesConfiguration`", "repeated", "" - - - - - - - -.. _ref_flyteidl.admin.MatchableAttributesConfiguration: - -MatchableAttributesConfiguration ------------------------------------------------------------------- - -Represents a custom set of attributes applied for either a domain; a domain and project; or -domain, project and workflow name. -These are used to override system level defaults for kubernetes cluster resource management, -default execution values, and more all across different levels of specificity. - - - -.. csv-table:: MatchableAttributesConfiguration type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "attributes", ":ref:`ref_flyteidl.admin.MatchingAttributes`", "", "" - "domain", ":ref:`ref_string`", "", "" - "project", ":ref:`ref_string`", "", "" - "workflow", ":ref:`ref_string`", "", "" - "launch_plan", ":ref:`ref_string`", "", "" - - - - - - - -.. _ref_flyteidl.admin.MatchingAttributes: - -MatchingAttributes ------------------------------------------------------------------- - -Generic container for encapsulating all types of the above attributes messages. - - - -.. csv-table:: MatchingAttributes type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "task_resource_attributes", ":ref:`ref_flyteidl.admin.TaskResourceAttributes`", "", "" - "cluster_resource_attributes", ":ref:`ref_flyteidl.admin.ClusterResourceAttributes`", "", "" - "execution_queue_attributes", ":ref:`ref_flyteidl.admin.ExecutionQueueAttributes`", "", "" - "execution_cluster_label", ":ref:`ref_flyteidl.admin.ExecutionClusterLabel`", "", "" - "quality_of_service", ":ref:`ref_flyteidl.core.QualityOfService`", "", "" - "plugin_overrides", ":ref:`ref_flyteidl.admin.PluginOverrides`", "", "" - "workflow_execution_config", ":ref:`ref_flyteidl.admin.WorkflowExecutionConfig`", "", "" - "cluster_assignment", ":ref:`ref_flyteidl.admin.ClusterAssignment`", "", "" - - - - - - - -.. _ref_flyteidl.admin.PluginOverride: - -PluginOverride ------------------------------------------------------------------- - -This MatchableAttribute configures selecting alternate plugin implementations for a given task type. -In addition to an override implementation a selection of fallbacks can be provided or other modes -for handling cases where the desired plugin override is not enabled in a given Flyte deployment. - - - -.. csv-table:: PluginOverride type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "task_type", ":ref:`ref_string`", "", "A predefined yet extensible Task type identifier." - "plugin_id", ":ref:`ref_string`", "repeated", "A set of plugin ids which should handle tasks of this type instead of the default registered plugin. The list will be tried in order until a plugin is found with that id." - "missing_plugin_behavior", ":ref:`ref_flyteidl.admin.PluginOverride.MissingPluginBehavior`", "", "Defines the behavior when no plugin from the plugin_id list is not found." - - - - - - - -.. _ref_flyteidl.admin.PluginOverrides: - -PluginOverrides ------------------------------------------------------------------- - - - - - -.. csv-table:: PluginOverrides type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "overrides", ":ref:`ref_flyteidl.admin.PluginOverride`", "repeated", "" - - - - - - - -.. _ref_flyteidl.admin.TaskResourceAttributes: - -TaskResourceAttributes ------------------------------------------------------------------- - -Defines task resource defaults and limits that will be applied at task registration. - - - -.. csv-table:: TaskResourceAttributes type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "defaults", ":ref:`ref_flyteidl.admin.TaskResourceSpec`", "", "" - "limits", ":ref:`ref_flyteidl.admin.TaskResourceSpec`", "", "" - - - - - - - -.. _ref_flyteidl.admin.TaskResourceSpec: - -TaskResourceSpec ------------------------------------------------------------------- - -Defines a set of overridable task resource attributes set during task registration. - - - -.. csv-table:: TaskResourceSpec type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "cpu", ":ref:`ref_string`", "", "" - "gpu", ":ref:`ref_string`", "", "" - "memory", ":ref:`ref_string`", "", "" - "storage", ":ref:`ref_string`", "", "" - "ephemeral_storage", ":ref:`ref_string`", "", "" - - - - - - - -.. _ref_flyteidl.admin.WorkflowExecutionConfig: - -WorkflowExecutionConfig ------------------------------------------------------------------- - -Adds defaults for customizable workflow-execution specifications and overrides. - - - -.. csv-table:: WorkflowExecutionConfig type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "max_parallelism", ":ref:`ref_int32`", "", "Can be used to control the number of parallel nodes to run within the workflow. This is useful to achieve fairness." - "security_context", ":ref:`ref_flyteidl.core.SecurityContext`", "", "Indicates security context permissions for executions triggered with this matchable attribute." - "raw_output_data_config", ":ref:`ref_flyteidl.admin.RawOutputDataConfig`", "", "Encapsulates user settings pertaining to offloaded data (i.e. Blobs, Schema, query data, etc.)." - "labels", ":ref:`ref_flyteidl.admin.Labels`", "", "Custom labels to be applied to a triggered execution resource." - "annotations", ":ref:`ref_flyteidl.admin.Annotations`", "", "Custom annotations to be applied to a triggered execution resource." - - - - - - - - - -.. _ref_flyteidl.admin.MatchableResource: - -MatchableResource ------------------------------------------------------------------- - -Defines a resource that can be configured by customizable Project-, ProjectDomain- or WorkflowAttributes -based on matching tags. - -.. csv-table:: Enum MatchableResource values - :header: "Name", "Number", "Description" - :widths: auto - - "TASK_RESOURCE", "0", "Applies to customizable task resource requests and limits." - "CLUSTER_RESOURCE", "1", "Applies to configuring templated kubernetes cluster resources." - "EXECUTION_QUEUE", "2", "Configures task and dynamic task execution queue assignment." - "EXECUTION_CLUSTER_LABEL", "3", "Configures the K8s cluster label to be used for execution to be run" - "QUALITY_OF_SERVICE_SPECIFICATION", "4", "Configures default quality of service when undefined in an execution spec." - "PLUGIN_OVERRIDE", "5", "Selects configurable plugin implementation behavior for a given task type." - "WORKFLOW_EXECUTION_CONFIG", "6", "Adds defaults for customizable workflow-execution specifications and overrides." - "CLUSTER_ASSIGNMENT", "7", "Controls how to select an available cluster on which this execution should run." - - - -.. _ref_flyteidl.admin.PluginOverride.MissingPluginBehavior: - -PluginOverride.MissingPluginBehavior ------------------------------------------------------------------- - - - -.. csv-table:: Enum PluginOverride.MissingPluginBehavior values - :header: "Name", "Number", "Description" - :widths: auto - - "FAIL", "0", "By default, if this plugin is not enabled for a Flyte deployment then execution will fail." - "USE_DEFAULT", "1", "Uses the system-configured default implementation." - - - - - - - - - - -.. _ref_flyteidl/admin/node_execution.proto: - -flyteidl/admin/node_execution.proto -================================================================== - - - - - -.. _ref_flyteidl.admin.DynamicWorkflowNodeMetadata: - -DynamicWorkflowNodeMetadata ------------------------------------------------------------------- - -For dynamic workflow nodes we capture information about the dynamic workflow definition that gets generated. - - - -.. csv-table:: DynamicWorkflowNodeMetadata type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "id", ":ref:`ref_flyteidl.core.Identifier`", "", "id represents the unique identifier of the workflow." - "compiled_workflow", ":ref:`ref_flyteidl.core.CompiledWorkflowClosure`", "", "Represents the compiled representation of the embedded dynamic workflow." - - - - - - - -.. _ref_flyteidl.admin.NodeExecution: - -NodeExecution ------------------------------------------------------------------- - -Encapsulates all details for a single node execution entity. -A node represents a component in the overall workflow graph. A node launch a task, multiple tasks, an entire nested -sub-workflow, or even a separate child-workflow execution. -The same task can be called repeatedly in a single workflow but each node is unique. - - - -.. csv-table:: NodeExecution type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "id", ":ref:`ref_flyteidl.core.NodeExecutionIdentifier`", "", "Uniquely identifies an individual node execution." - "input_uri", ":ref:`ref_string`", "", "Path to remote data store where input blob is stored." - "closure", ":ref:`ref_flyteidl.admin.NodeExecutionClosure`", "", "Computed results associated with this node execution." - "metadata", ":ref:`ref_flyteidl.admin.NodeExecutionMetaData`", "", "Metadata for Node Execution" - - - - - - - -.. _ref_flyteidl.admin.NodeExecutionClosure: - -NodeExecutionClosure ------------------------------------------------------------------- - -Container for node execution details and results. - - - -.. csv-table:: NodeExecutionClosure type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "output_uri", ":ref:`ref_string`", "", "**Deprecated.** Links to a remotely stored, serialized core.LiteralMap of node execution outputs. DEPRECATED. Use GetNodeExecutionData to fetch output data instead." - "error", ":ref:`ref_flyteidl.core.ExecutionError`", "", "Error information for the Node" - "output_data", ":ref:`ref_flyteidl.core.LiteralMap`", "", "**Deprecated.** Raw output data produced by this node execution. DEPRECATED. Use GetNodeExecutionData to fetch output data instead." - "phase", ":ref:`ref_flyteidl.core.NodeExecution.Phase`", "", "The last recorded phase for this node execution." - "started_at", ":ref:`ref_google.protobuf.Timestamp`", "", "Time at which the node execution began running." - "duration", ":ref:`ref_google.protobuf.Duration`", "", "The amount of time the node execution spent running." - "created_at", ":ref:`ref_google.protobuf.Timestamp`", "", "Time at which the node execution was created." - "updated_at", ":ref:`ref_google.protobuf.Timestamp`", "", "Time at which the node execution was last updated." - "workflow_node_metadata", ":ref:`ref_flyteidl.admin.WorkflowNodeMetadata`", "", "" - "task_node_metadata", ":ref:`ref_flyteidl.admin.TaskNodeMetadata`", "", "" - - - - - - - -.. _ref_flyteidl.admin.NodeExecutionForTaskListRequest: - -NodeExecutionForTaskListRequest ------------------------------------------------------------------- - -Represents a request structure to retrieve a list of node execution entities launched by a specific task. -This can arise when a task yields a subworkflow. - - - -.. csv-table:: NodeExecutionForTaskListRequest type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "task_execution_id", ":ref:`ref_flyteidl.core.TaskExecutionIdentifier`", "", "Indicates the node execution to filter by. +required" - "limit", ":ref:`ref_uint32`", "", "Indicates the number of resources to be returned. +required" - "token", ":ref:`ref_string`", "", "In the case of multiple pages of results, the, server-provided token can be used to fetch the next page in a query. +optional" - "filters", ":ref:`ref_string`", "", "Indicates a list of filters passed as string. More info on constructing filters : +optional" - "sort_by", ":ref:`ref_flyteidl.admin.Sort`", "", "Sort ordering. +optional" - - - - - - - -.. _ref_flyteidl.admin.NodeExecutionGetDataRequest: - -NodeExecutionGetDataRequest ------------------------------------------------------------------- - -Request structure to fetch inputs and output for a node execution. -By default, these are not returned in :ref:`ref_flyteidl.admin.NodeExecutionGetRequest` - - - -.. csv-table:: NodeExecutionGetDataRequest type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "id", ":ref:`ref_flyteidl.core.NodeExecutionIdentifier`", "", "The identifier of the node execution for which to fetch inputs and outputs." - - - - - - - -.. _ref_flyteidl.admin.NodeExecutionGetDataResponse: - -NodeExecutionGetDataResponse ------------------------------------------------------------------- - -Response structure for NodeExecutionGetDataRequest which contains inputs and outputs for a node execution. - - - -.. csv-table:: NodeExecutionGetDataResponse type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "inputs", ":ref:`ref_flyteidl.admin.UrlBlob`", "", "**Deprecated.** Signed url to fetch a core.LiteralMap of node execution inputs. Deprecated: Please use full_inputs instead." - "outputs", ":ref:`ref_flyteidl.admin.UrlBlob`", "", "**Deprecated.** Signed url to fetch a core.LiteralMap of node execution outputs. Deprecated: Please use full_outputs instead." - "full_inputs", ":ref:`ref_flyteidl.core.LiteralMap`", "", "Full_inputs will only be populated if they are under a configured size threshold." - "full_outputs", ":ref:`ref_flyteidl.core.LiteralMap`", "", "Full_outputs will only be populated if they are under a configured size threshold." - "dynamic_workflow", ":ref:`ref_flyteidl.admin.DynamicWorkflowNodeMetadata`", "", "Optional Workflow closure for a dynamically generated workflow, in the case this node yields a dynamic workflow we return its structure here." - - - - - - - -.. _ref_flyteidl.admin.NodeExecutionGetRequest: - -NodeExecutionGetRequest ------------------------------------------------------------------- - -A message used to fetch a single node execution entity. -See :ref:`ref_flyteidl.admin.NodeExecution` for more details - - - -.. csv-table:: NodeExecutionGetRequest type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "id", ":ref:`ref_flyteidl.core.NodeExecutionIdentifier`", "", "Uniquely identifies an individual node execution. +required" - - - - - - - -.. _ref_flyteidl.admin.NodeExecutionList: - -NodeExecutionList ------------------------------------------------------------------- - -Request structure to retrieve a list of node execution entities. -See :ref:`ref_flyteidl.admin.NodeExecution` for more details - - - -.. csv-table:: NodeExecutionList type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "node_executions", ":ref:`ref_flyteidl.admin.NodeExecution`", "repeated", "" - "token", ":ref:`ref_string`", "", "In the case of multiple pages of results, the server-provided token can be used to fetch the next page in a query. If there are no more results, this value will be empty." - - - - - - - -.. _ref_flyteidl.admin.NodeExecutionListRequest: - -NodeExecutionListRequest ------------------------------------------------------------------- - -Represents a request structure to retrieve a list of node execution entities. -See :ref:`ref_flyteidl.admin.NodeExecution` for more details - - - -.. csv-table:: NodeExecutionListRequest type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "workflow_execution_id", ":ref:`ref_flyteidl.core.WorkflowExecutionIdentifier`", "", "Indicates the workflow execution to filter by. +required" - "limit", ":ref:`ref_uint32`", "", "Indicates the number of resources to be returned. +required" - "token", ":ref:`ref_string`", "", "" - "filters", ":ref:`ref_string`", "", "Indicates a list of filters passed as string. More info on constructing filters : +optional" - "sort_by", ":ref:`ref_flyteidl.admin.Sort`", "", "Sort ordering. +optional" - "unique_parent_id", ":ref:`ref_string`", "", "Unique identifier of the parent node in the execution +optional" - - - - - - - -.. _ref_flyteidl.admin.NodeExecutionMetaData: - -NodeExecutionMetaData ------------------------------------------------------------------- - -Represents additional attributes related to a Node Execution - - - -.. csv-table:: NodeExecutionMetaData type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "retry_group", ":ref:`ref_string`", "", "Node executions are grouped depending on retries of the parent Retry group is unique within the context of a parent node." - "is_parent_node", ":ref:`ref_bool`", "", "Boolean flag indicating if the node has child nodes under it This can be true when a node contains a dynamic workflow which then produces child nodes." - "spec_node_id", ":ref:`ref_string`", "", "Node id of the node in the original workflow This maps to value of WorkflowTemplate.nodes[X].id" - "is_dynamic", ":ref:`ref_bool`", "", "Boolean flag indicating if the node has contains a dynamic workflow which then produces child nodes. This is to distinguish between subworkflows and dynamic workflows which can both have is_parent_node as true." - - - - - - - -.. _ref_flyteidl.admin.TaskNodeMetadata: - -TaskNodeMetadata ------------------------------------------------------------------- - -Metadata for the case in which the node is a TaskNode - - - -.. csv-table:: TaskNodeMetadata type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "cache_status", ":ref:`ref_flyteidl.core.CatalogCacheStatus`", "", "Captures the status of caching for this execution." - "catalog_key", ":ref:`ref_flyteidl.core.CatalogMetadata`", "", "This structure carries the catalog artifact information" - - - - - - - -.. _ref_flyteidl.admin.WorkflowNodeMetadata: - -WorkflowNodeMetadata ------------------------------------------------------------------- - -Metadata for a WorkflowNode - - - -.. csv-table:: WorkflowNodeMetadata type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "executionId", ":ref:`ref_flyteidl.core.WorkflowExecutionIdentifier`", "", "The identifier for a workflow execution launched by a node." - - - - - - - - - - - - - - - - -.. _ref_flyteidl/admin/notification.proto: - -flyteidl/admin/notification.proto -================================================================== - - - - - -.. _ref_flyteidl.admin.EmailMessage: - -EmailMessage ------------------------------------------------------------------- - -Represents the Email object that is sent to a publisher/subscriber -to forward the notification. -Note: This is internal to Admin and doesn't need to be exposed to other components. - - - -.. csv-table:: EmailMessage type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "recipients_email", ":ref:`ref_string`", "repeated", "The list of email addresses to receive an email with the content populated in the other fields. Currently, each email recipient will receive its own email. This populates the TO field." - "sender_email", ":ref:`ref_string`", "", "The email of the sender. This populates the FROM field." - "subject_line", ":ref:`ref_string`", "", "The content of the subject line. This populates the SUBJECT field." - "body", ":ref:`ref_string`", "", "The content of the email body. This populates the BODY field." - - - - - - - - - - - - - - - - -.. _ref_flyteidl/admin/project.proto: - -flyteidl/admin/project.proto -================================================================== - - - - - -.. _ref_flyteidl.admin.Domain: - -Domain ------------------------------------------------------------------- - -Namespace within a project commonly used to differentiate between different service instances. -e.g. "production", "development", etc. - - - -.. csv-table:: Domain type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "id", ":ref:`ref_string`", "", "Globally unique domain name." - "name", ":ref:`ref_string`", "", "Display name." - - - - - - - -.. _ref_flyteidl.admin.Project: - -Project ------------------------------------------------------------------- - -Top-level namespace used to classify different entities like workflows and executions. - - - -.. csv-table:: Project type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "id", ":ref:`ref_string`", "", "Globally unique project name." - "name", ":ref:`ref_string`", "", "Display name." - "domains", ":ref:`ref_flyteidl.admin.Domain`", "repeated", "" - "description", ":ref:`ref_string`", "", "" - "labels", ":ref:`ref_flyteidl.admin.Labels`", "", "Leverage Labels from flyteidel.admin.common.proto to tag projects with ownership information." - "state", ":ref:`ref_flyteidl.admin.Project.ProjectState`", "", "" - - - - - - - -.. _ref_flyteidl.admin.ProjectListRequest: - -ProjectListRequest ------------------------------------------------------------------- - -Request to retrieve a list of projects matching specified filters. -See :ref:`ref_flyteidl.admin.Project` for more details - - - -.. csv-table:: ProjectListRequest type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "limit", ":ref:`ref_uint32`", "", "Indicates the number of projects to be returned. +required" - "token", ":ref:`ref_string`", "", "In the case of multiple pages of results, this server-provided token can be used to fetch the next page in a query. +optional" - "filters", ":ref:`ref_string`", "", "Indicates a list of filters passed as string. More info on constructing filters : +optional" - "sort_by", ":ref:`ref_flyteidl.admin.Sort`", "", "Sort ordering. +optional" - - - - - - - -.. _ref_flyteidl.admin.ProjectRegisterRequest: - -ProjectRegisterRequest ------------------------------------------------------------------- - -Adds a new user-project within the Flyte deployment. -See :ref:`ref_flyteidl.admin.Project` for more details - - - -.. csv-table:: ProjectRegisterRequest type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "project", ":ref:`ref_flyteidl.admin.Project`", "", "+required" - - - - - - - -.. _ref_flyteidl.admin.ProjectRegisterResponse: - -ProjectRegisterResponse ------------------------------------------------------------------- - -Purposefully empty, may be updated in the future. - - - - - - - - -.. _ref_flyteidl.admin.ProjectUpdateResponse: - -ProjectUpdateResponse ------------------------------------------------------------------- - -Purposefully empty, may be updated in the future. - - - - - - - - -.. _ref_flyteidl.admin.Projects: - -Projects ------------------------------------------------------------------- - -Represents a list of projects. -See :ref:`ref_flyteidl.admin.Project` for more details - - - -.. csv-table:: Projects type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "projects", ":ref:`ref_flyteidl.admin.Project`", "repeated", "" - "token", ":ref:`ref_string`", "", "In the case of multiple pages of results, the server-provided token can be used to fetch the next page in a query. If there are no more results, this value will be empty." - - - - - - - - - -.. _ref_flyteidl.admin.Project.ProjectState: - -Project.ProjectState ------------------------------------------------------------------- - -The state of the project is used to control its visibility in the UI and validity. - -.. csv-table:: Enum Project.ProjectState values - :header: "Name", "Number", "Description" - :widths: auto - - "ACTIVE", "0", "By default, all projects are considered active." - "ARCHIVED", "1", "Archived projects are no longer visible in the UI and no longer valid." - "SYSTEM_GENERATED", "2", "System generated projects that aren't explicitly created or managed by a user." - - - - - - - - - - -.. _ref_flyteidl/admin/project_domain_attributes.proto: - -flyteidl/admin/project_domain_attributes.proto -================================================================== - - - - - -.. _ref_flyteidl.admin.ProjectDomainAttributes: - -ProjectDomainAttributes ------------------------------------------------------------------- - -Defines a set of custom matching attributes which defines resource defaults for a project and domain. -For more info on matchable attributes, see :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` - - - -.. csv-table:: ProjectDomainAttributes type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "project", ":ref:`ref_string`", "", "Unique project id for which this set of attributes will be applied." - "domain", ":ref:`ref_string`", "", "Unique domain id for which this set of attributes will be applied." - "matching_attributes", ":ref:`ref_flyteidl.admin.MatchingAttributes`", "", "" - - - - - - - -.. _ref_flyteidl.admin.ProjectDomainAttributesDeleteRequest: - -ProjectDomainAttributesDeleteRequest ------------------------------------------------------------------- - -Request to delete a set matchable project domain attribute override. -For more info on matchable attributes, see :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` - - - -.. csv-table:: ProjectDomainAttributesDeleteRequest type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "project", ":ref:`ref_string`", "", "Unique project id which this set of attributes references. +required" - "domain", ":ref:`ref_string`", "", "Unique domain id which this set of attributes references. +required" - "resource_type", ":ref:`ref_flyteidl.admin.MatchableResource`", "", "Which type of matchable attributes to delete. +required" - - - - - - - -.. _ref_flyteidl.admin.ProjectDomainAttributesDeleteResponse: - -ProjectDomainAttributesDeleteResponse ------------------------------------------------------------------- - -Purposefully empty, may be populated in the future. - - - - - - - - -.. _ref_flyteidl.admin.ProjectDomainAttributesGetRequest: - -ProjectDomainAttributesGetRequest ------------------------------------------------------------------- - -Request to get an individual project domain attribute override. -For more info on matchable attributes, see :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` - - - -.. csv-table:: ProjectDomainAttributesGetRequest type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "project", ":ref:`ref_string`", "", "Unique project id which this set of attributes references. +required" - "domain", ":ref:`ref_string`", "", "Unique domain id which this set of attributes references. +required" - "resource_type", ":ref:`ref_flyteidl.admin.MatchableResource`", "", "Which type of matchable attributes to return. +required" - - - - - - - -.. _ref_flyteidl.admin.ProjectDomainAttributesGetResponse: - -ProjectDomainAttributesGetResponse ------------------------------------------------------------------- - -Response to get an individual project domain attribute override. -For more info on matchable attributes, see :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` - - - -.. csv-table:: ProjectDomainAttributesGetResponse type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "attributes", ":ref:`ref_flyteidl.admin.ProjectDomainAttributes`", "", "" - - - - - - - -.. _ref_flyteidl.admin.ProjectDomainAttributesUpdateRequest: - -ProjectDomainAttributesUpdateRequest ------------------------------------------------------------------- - -Sets custom attributes for a project-domain combination. -For more info on matchable attributes, see :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` - - - -.. csv-table:: ProjectDomainAttributesUpdateRequest type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "attributes", ":ref:`ref_flyteidl.admin.ProjectDomainAttributes`", "", "+required" - - - - - - - -.. _ref_flyteidl.admin.ProjectDomainAttributesUpdateResponse: - -ProjectDomainAttributesUpdateResponse ------------------------------------------------------------------- - -Purposefully empty, may be populated in the future. - - - - - - - - - - - - - - - - - -.. _ref_flyteidl/admin/schedule.proto: - -flyteidl/admin/schedule.proto -================================================================== - - - - - -.. _ref_flyteidl.admin.CronSchedule: - -CronSchedule ------------------------------------------------------------------- - -Options for schedules to run according to a cron expression. - - - -.. csv-table:: CronSchedule type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "schedule", ":ref:`ref_string`", "", "Standard/default cron implementation as described by https://en.wikipedia.org/wiki/Cron#CRON_expression; Also supports nonstandard predefined scheduling definitions as described by https://docs.aws.amazon.com/AmazonCloudWatch/latest/events/ScheduledEvents.html#CronExpressions except @reboot" - "offset", ":ref:`ref_string`", "", "ISO 8601 duration as described by https://en.wikipedia.org/wiki/ISO_8601#Durations" - - - - - - - -.. _ref_flyteidl.admin.FixedRate: - -FixedRate ------------------------------------------------------------------- - -Option for schedules run at a certain frequency e.g. every 2 minutes. - - - -.. csv-table:: FixedRate type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "value", ":ref:`ref_uint32`", "", "" - "unit", ":ref:`ref_flyteidl.admin.FixedRateUnit`", "", "" - - - - - - - -.. _ref_flyteidl.admin.Schedule: - -Schedule ------------------------------------------------------------------- - -Defines complete set of information required to trigger an execution on a schedule. - - - -.. csv-table:: Schedule type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "cron_expression", ":ref:`ref_string`", "", "**Deprecated.** Uses AWS syntax: Minutes Hours Day-of-month Month Day-of-week Year e.g. for a schedule that runs every 15 minutes: 0/15 * * * ? *" - "rate", ":ref:`ref_flyteidl.admin.FixedRate`", "", "" - "cron_schedule", ":ref:`ref_flyteidl.admin.CronSchedule`", "", "" - "kickoff_time_input_arg", ":ref:`ref_string`", "", "Name of the input variable that the kickoff time will be supplied to when the workflow is kicked off." - - - - - - - - - -.. _ref_flyteidl.admin.FixedRateUnit: - -FixedRateUnit ------------------------------------------------------------------- - -Represents a frequency at which to run a schedule. - -.. csv-table:: Enum FixedRateUnit values - :header: "Name", "Number", "Description" - :widths: auto - - "MINUTE", "0", "" - "HOUR", "1", "" - "DAY", "2", "" - - - - - - - - - - -.. _ref_flyteidl/admin/task.proto: - -flyteidl/admin/task.proto -================================================================== - - - - - -.. _ref_flyteidl.admin.Task: - -Task ------------------------------------------------------------------- - -Flyte workflows are composed of many ordered tasks. That is small, reusable, self-contained logical blocks -arranged to process workflow inputs and produce a deterministic set of outputs. -Tasks can come in many varieties tuned for specialized behavior. - - - -.. csv-table:: Task type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "id", ":ref:`ref_flyteidl.core.Identifier`", "", "id represents the unique identifier of the task." - "closure", ":ref:`ref_flyteidl.admin.TaskClosure`", "", "closure encapsulates all the fields that maps to a compiled version of the task." - - - - - - - -.. _ref_flyteidl.admin.TaskClosure: - -TaskClosure ------------------------------------------------------------------- - -Compute task attributes which include values derived from the TaskSpec, as well as plugin-specific data -and task metadata. - - - -.. csv-table:: TaskClosure type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "compiled_task", ":ref:`ref_flyteidl.core.CompiledTask`", "", "Represents the compiled representation of the task from the specification provided." - "created_at", ":ref:`ref_google.protobuf.Timestamp`", "", "Time at which the task was created." - - - - - - - -.. _ref_flyteidl.admin.TaskCreateRequest: - -TaskCreateRequest ------------------------------------------------------------------- - -Represents a request structure to create a revision of a task. -See :ref:`ref_flyteidl.admin.Task` for more details - - - -.. csv-table:: TaskCreateRequest type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "id", ":ref:`ref_flyteidl.core.Identifier`", "", "id represents the unique identifier of the task. +required" - "spec", ":ref:`ref_flyteidl.admin.TaskSpec`", "", "Represents the specification for task. +required" - - - - - - - -.. _ref_flyteidl.admin.TaskCreateResponse: - -TaskCreateResponse ------------------------------------------------------------------- - -Represents a response structure if task creation succeeds. - -Purposefully empty, may be populated in the future. - - - - - - - - -.. _ref_flyteidl.admin.TaskList: - -TaskList ------------------------------------------------------------------- - -Represents a list of tasks returned from the admin. -See :ref:`ref_flyteidl.admin.Task` for more details - - - -.. csv-table:: TaskList type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "tasks", ":ref:`ref_flyteidl.admin.Task`", "repeated", "A list of tasks returned based on the request." - "token", ":ref:`ref_string`", "", "In the case of multiple pages of results, the server-provided token can be used to fetch the next page in a query. If there are no more results, this value will be empty." - - - - - - - -.. _ref_flyteidl.admin.TaskSpec: - -TaskSpec ------------------------------------------------------------------- - -Represents a structure that encapsulates the user-configured specification of the task. - - - -.. csv-table:: TaskSpec type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "template", ":ref:`ref_flyteidl.core.TaskTemplate`", "", "Template of the task that encapsulates all the metadata of the task." - - - - - - - - - - - - - - - - -.. _ref_flyteidl/admin/task_execution.proto: - -flyteidl/admin/task_execution.proto -================================================================== - - - - - -.. _ref_flyteidl.admin.TaskExecution: - -TaskExecution ------------------------------------------------------------------- - -Encapsulates all details for a single task execution entity. -A task execution represents an instantiated task, including all inputs and additional -metadata as well as computed results included state, outputs, and duration-based attributes. - - - -.. csv-table:: TaskExecution type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "id", ":ref:`ref_flyteidl.core.TaskExecutionIdentifier`", "", "Unique identifier for the task execution." - "input_uri", ":ref:`ref_string`", "", "Path to remote data store where input blob is stored." - "closure", ":ref:`ref_flyteidl.admin.TaskExecutionClosure`", "", "Task execution details and results." - "is_parent", ":ref:`ref_bool`", "", "Whether this task spawned nodes." - - - - - - - -.. _ref_flyteidl.admin.TaskExecutionClosure: - -TaskExecutionClosure ------------------------------------------------------------------- - -Container for task execution details and results. - - - -.. csv-table:: TaskExecutionClosure type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "output_uri", ":ref:`ref_string`", "", "**Deprecated.** Path to remote data store where output blob is stored if the execution succeeded (and produced outputs). DEPRECATED. Use GetTaskExecutionData to fetch output data instead." - "error", ":ref:`ref_flyteidl.core.ExecutionError`", "", "Error information for the task execution. Populated if the execution failed." - "output_data", ":ref:`ref_flyteidl.core.LiteralMap`", "", "**Deprecated.** Raw output data produced by this task execution. DEPRECATED. Use GetTaskExecutionData to fetch output data instead." - "phase", ":ref:`ref_flyteidl.core.TaskExecution.Phase`", "", "The last recorded phase for this task execution." - "logs", ":ref:`ref_flyteidl.core.TaskLog`", "repeated", "Detailed log information output by the task execution." - "started_at", ":ref:`ref_google.protobuf.Timestamp`", "", "Time at which the task execution began running." - "duration", ":ref:`ref_google.protobuf.Duration`", "", "The amount of time the task execution spent running." - "created_at", ":ref:`ref_google.protobuf.Timestamp`", "", "Time at which the task execution was created." - "updated_at", ":ref:`ref_google.protobuf.Timestamp`", "", "Time at which the task execution was last updated." - "custom_info", ":ref:`ref_google.protobuf.Struct`", "", "Custom data specific to the task plugin." - "reason", ":ref:`ref_string`", "", "If there is an explanation for the most recent phase transition, the reason will capture it." - "task_type", ":ref:`ref_string`", "", "A predefined yet extensible Task type identifier." - "metadata", ":ref:`ref_flyteidl.event.TaskExecutionMetadata`", "", "Metadata around how a task was executed." - - - - - - - -.. _ref_flyteidl.admin.TaskExecutionGetDataRequest: - -TaskExecutionGetDataRequest ------------------------------------------------------------------- - -Request structure to fetch inputs and output for a task execution. -By default this data is not returned inline in :ref:`ref_flyteidl.admin.TaskExecutionGetRequest` - - - -.. csv-table:: TaskExecutionGetDataRequest type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "id", ":ref:`ref_flyteidl.core.TaskExecutionIdentifier`", "", "The identifier of the task execution for which to fetch inputs and outputs. +required" - - - - - - - -.. _ref_flyteidl.admin.TaskExecutionGetDataResponse: - -TaskExecutionGetDataResponse ------------------------------------------------------------------- - -Response structure for TaskExecutionGetDataRequest which contains inputs and outputs for a task execution. - - - -.. csv-table:: TaskExecutionGetDataResponse type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "inputs", ":ref:`ref_flyteidl.admin.UrlBlob`", "", "**Deprecated.** Signed url to fetch a core.LiteralMap of task execution inputs. Deprecated: Please use full_inputs instead." - "outputs", ":ref:`ref_flyteidl.admin.UrlBlob`", "", "**Deprecated.** Signed url to fetch a core.LiteralMap of task execution outputs. Deprecated: Please use full_outputs instead." - "full_inputs", ":ref:`ref_flyteidl.core.LiteralMap`", "", "Full_inputs will only be populated if they are under a configured size threshold." - "full_outputs", ":ref:`ref_flyteidl.core.LiteralMap`", "", "Full_outputs will only be populated if they are under a configured size threshold." - - - - - - - -.. _ref_flyteidl.admin.TaskExecutionGetRequest: - -TaskExecutionGetRequest ------------------------------------------------------------------- - -A message used to fetch a single task execution entity. -See :ref:`ref_flyteidl.admin.TaskExecution` for more details - - - -.. csv-table:: TaskExecutionGetRequest type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "id", ":ref:`ref_flyteidl.core.TaskExecutionIdentifier`", "", "Unique identifier for the task execution. +required" - - - - - - - -.. _ref_flyteidl.admin.TaskExecutionList: - -TaskExecutionList ------------------------------------------------------------------- - -Response structure for a query to list of task execution entities. -See :ref:`ref_flyteidl.admin.TaskExecution` for more details - - - -.. csv-table:: TaskExecutionList type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "task_executions", ":ref:`ref_flyteidl.admin.TaskExecution`", "repeated", "" - "token", ":ref:`ref_string`", "", "In the case of multiple pages of results, the server-provided token can be used to fetch the next page in a query. If there are no more results, this value will be empty." - - - - - - - -.. _ref_flyteidl.admin.TaskExecutionListRequest: - -TaskExecutionListRequest ------------------------------------------------------------------- - -Represents a request structure to retrieve a list of task execution entities yielded by a specific node execution. -See :ref:`ref_flyteidl.admin.TaskExecution` for more details - - - -.. csv-table:: TaskExecutionListRequest type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "node_execution_id", ":ref:`ref_flyteidl.core.NodeExecutionIdentifier`", "", "Indicates the node execution to filter by. +required" - "limit", ":ref:`ref_uint32`", "", "Indicates the number of resources to be returned. +required" - "token", ":ref:`ref_string`", "", "In the case of multiple pages of results, the server-provided token can be used to fetch the next page in a query. +optional" - "filters", ":ref:`ref_string`", "", "Indicates a list of filters passed as string. More info on constructing filters : +optional" - "sort_by", ":ref:`ref_flyteidl.admin.Sort`", "", "Sort ordering for returned list. +optional" - - - - - - - - - - - - - - - - -.. _ref_flyteidl/admin/version.proto: - -flyteidl/admin/version.proto -================================================================== - - - - - -.. _ref_flyteidl.admin.GetVersionRequest: - -GetVersionRequest ------------------------------------------------------------------- - -Empty request for GetVersion - - - - - - - - -.. _ref_flyteidl.admin.GetVersionResponse: - -GetVersionResponse ------------------------------------------------------------------- - -Response for the GetVersion API - - - -.. csv-table:: GetVersionResponse type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "control_plane_version", ":ref:`ref_flyteidl.admin.Version`", "", "The control plane version information. FlyteAdmin and related components form the control plane of Flyte" - - - - - - - -.. _ref_flyteidl.admin.Version: - -Version ------------------------------------------------------------------- - -Provides Version information for a component - - - -.. csv-table:: Version type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "Build", ":ref:`ref_string`", "", "Specifies the GIT sha of the build" - "Version", ":ref:`ref_string`", "", "Version for the build, should follow a semver" - "BuildTime", ":ref:`ref_string`", "", "Build timestamp" - - - - - - - - - - - - - - - - -.. _ref_flyteidl/admin/workflow.proto: - -flyteidl/admin/workflow.proto -================================================================== - - - - - -.. _ref_flyteidl.admin.Workflow: - -Workflow ------------------------------------------------------------------- - -Represents the workflow structure stored in the Admin -A workflow is created by ordering tasks and associating outputs to inputs -in order to produce a directed-acyclic execution graph. - - - -.. csv-table:: Workflow type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "id", ":ref:`ref_flyteidl.core.Identifier`", "", "id represents the unique identifier of the workflow." - "closure", ":ref:`ref_flyteidl.admin.WorkflowClosure`", "", "closure encapsulates all the fields that maps to a compiled version of the workflow." - - - - - - - -.. _ref_flyteidl.admin.WorkflowClosure: - -WorkflowClosure ------------------------------------------------------------------- - -A container holding the compiled workflow produced from the WorkflowSpec and additional metadata. - - - -.. csv-table:: WorkflowClosure type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "compiled_workflow", ":ref:`ref_flyteidl.core.CompiledWorkflowClosure`", "", "Represents the compiled representation of the workflow from the specification provided." - "created_at", ":ref:`ref_google.protobuf.Timestamp`", "", "Time at which the workflow was created." - - - - - - - -.. _ref_flyteidl.admin.WorkflowCreateRequest: - -WorkflowCreateRequest ------------------------------------------------------------------- - -Represents a request structure to create a revision of a workflow. -See :ref:`ref_flyteidl.admin.Workflow` for more details - - - -.. csv-table:: WorkflowCreateRequest type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "id", ":ref:`ref_flyteidl.core.Identifier`", "", "id represents the unique identifier of the workflow. +required" - "spec", ":ref:`ref_flyteidl.admin.WorkflowSpec`", "", "Represents the specification for workflow. +required" - - - - - - - -.. _ref_flyteidl.admin.WorkflowCreateResponse: - -WorkflowCreateResponse ------------------------------------------------------------------- - -Purposefully empty, may be populated in the future. - - - - - - - - -.. _ref_flyteidl.admin.WorkflowList: - -WorkflowList ------------------------------------------------------------------- - -Represents a list of workflows returned from the admin. -See :ref:`ref_flyteidl.admin.Workflow` for more details - - - -.. csv-table:: WorkflowList type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "workflows", ":ref:`ref_flyteidl.admin.Workflow`", "repeated", "A list of workflows returned based on the request." - "token", ":ref:`ref_string`", "", "In the case of multiple pages of results, the server-provided token can be used to fetch the next page in a query. If there are no more results, this value will be empty." - - - - - - - -.. _ref_flyteidl.admin.WorkflowSpec: - -WorkflowSpec ------------------------------------------------------------------- - -Represents a structure that encapsulates the specification of the workflow. - - - -.. csv-table:: WorkflowSpec type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "template", ":ref:`ref_flyteidl.core.WorkflowTemplate`", "", "Template of the task that encapsulates all the metadata of the workflow." - "sub_workflows", ":ref:`ref_flyteidl.core.WorkflowTemplate`", "repeated", "Workflows that are embedded into other workflows need to be passed alongside the parent workflow to the propeller compiler (since the compiler doesn't have any knowledge of other workflows - ie, it doesn't reach out to Admin to see other registered workflows). In fact, subworkflows do not even need to be registered." - - - - - - - - - - - - - - - - -.. _ref_flyteidl/admin/workflow_attributes.proto: - -flyteidl/admin/workflow_attributes.proto -================================================================== - - - - - -.. _ref_flyteidl.admin.WorkflowAttributes: - -WorkflowAttributes ------------------------------------------------------------------- - -Defines a set of custom matching attributes which defines resource defaults for a project, domain and workflow. -For more info on matchable attributes, see :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` - - - -.. csv-table:: WorkflowAttributes type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "project", ":ref:`ref_string`", "", "Unique project id for which this set of attributes will be applied." - "domain", ":ref:`ref_string`", "", "Unique domain id for which this set of attributes will be applied." - "workflow", ":ref:`ref_string`", "", "Workflow name for which this set of attributes will be applied." - "matching_attributes", ":ref:`ref_flyteidl.admin.MatchingAttributes`", "", "" - - - - - - - -.. _ref_flyteidl.admin.WorkflowAttributesDeleteRequest: - -WorkflowAttributesDeleteRequest ------------------------------------------------------------------- - -Request to delete a set matchable workflow attribute override. -For more info on matchable attributes, see :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` - - - -.. csv-table:: WorkflowAttributesDeleteRequest type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "project", ":ref:`ref_string`", "", "Unique project id which this set of attributes references. +required" - "domain", ":ref:`ref_string`", "", "Unique domain id which this set of attributes references. +required" - "workflow", ":ref:`ref_string`", "", "Workflow name which this set of attributes references. +required" - "resource_type", ":ref:`ref_flyteidl.admin.MatchableResource`", "", "Which type of matchable attributes to delete. +required" - - - - - - - -.. _ref_flyteidl.admin.WorkflowAttributesDeleteResponse: - -WorkflowAttributesDeleteResponse ------------------------------------------------------------------- - -Purposefully empty, may be populated in the future. - - - - - - - - -.. _ref_flyteidl.admin.WorkflowAttributesGetRequest: - -WorkflowAttributesGetRequest ------------------------------------------------------------------- - -Request to get an individual workflow attribute override. -For more info on matchable attributes, see :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` - - - -.. csv-table:: WorkflowAttributesGetRequest type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "project", ":ref:`ref_string`", "", "Unique project id which this set of attributes references. +required" - "domain", ":ref:`ref_string`", "", "Unique domain id which this set of attributes references. +required" - "workflow", ":ref:`ref_string`", "", "Workflow name which this set of attributes references. +required" - "resource_type", ":ref:`ref_flyteidl.admin.MatchableResource`", "", "Which type of matchable attributes to return. +required" - - - - - - - -.. _ref_flyteidl.admin.WorkflowAttributesGetResponse: - -WorkflowAttributesGetResponse ------------------------------------------------------------------- - -Response to get an individual workflow attribute override. - - - -.. csv-table:: WorkflowAttributesGetResponse type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "attributes", ":ref:`ref_flyteidl.admin.WorkflowAttributes`", "", "" - - - - - - - -.. _ref_flyteidl.admin.WorkflowAttributesUpdateRequest: - -WorkflowAttributesUpdateRequest ------------------------------------------------------------------- - -Sets custom attributes for a project, domain and workflow combination. -For more info on matchable attributes, see :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` - - - -.. csv-table:: WorkflowAttributesUpdateRequest type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "attributes", ":ref:`ref_flyteidl.admin.WorkflowAttributes`", "", "" - - - - - - - -.. _ref_flyteidl.admin.WorkflowAttributesUpdateResponse: - -WorkflowAttributesUpdateResponse ------------------------------------------------------------------- - -Purposefully empty, may be populated in the future. - - - - - - - - - - - - - - - - - -.. _ref_google/protobuf/duration.proto: - -google/protobuf/duration.proto -================================================================== - - - - - -.. _ref_google.protobuf.Duration: - -Duration ------------------------------------------------------------------- - -A Duration represents a signed, fixed-length span of time represented -as a count of seconds and fractions of seconds at nanosecond -resolution. It is independent of any calendar and concepts like "day" -or "month". It is related to Timestamp in that the difference between -two Timestamp values is a Duration and it can be added or subtracted -from a Timestamp. Range is approximately +-10,000 years. - -# Examples - -Example 1: Compute Duration from two Timestamps in pseudo code. - - Timestamp start = ...; - Timestamp end = ...; - Duration duration = ...; - - duration.seconds = end.seconds - start.seconds; - duration.nanos = end.nanos - start.nanos; - - if (duration.seconds < 0 && duration.nanos > 0) { - duration.seconds += 1; - duration.nanos -= 1000000000; - } else if (duration.seconds > 0 && duration.nanos < 0) { - duration.seconds -= 1; - duration.nanos += 1000000000; - } - -Example 2: Compute Timestamp from Timestamp + Duration in pseudo code. - - Timestamp start = ...; - Duration duration = ...; - Timestamp end = ...; - - end.seconds = start.seconds + duration.seconds; - end.nanos = start.nanos + duration.nanos; - - if (end.nanos < 0) { - end.seconds -= 1; - end.nanos += 1000000000; - } else if (end.nanos >= 1000000000) { - end.seconds += 1; - end.nanos -= 1000000000; - } - -Example 3: Compute Duration from datetime.timedelta in Python. - - td = datetime.timedelta(days=3, minutes=10) - duration = Duration() - duration.FromTimedelta(td) - -# JSON Mapping - -In JSON format, the Duration type is encoded as a string rather than an -object, where the string ends in the suffix "s" (indicating seconds) and -is preceded by the number of seconds, with nanoseconds expressed as -fractional seconds. For example, 3 seconds with 0 nanoseconds should be -encoded in JSON format as "3s", while 3 seconds and 1 nanosecond should -be expressed in JSON format as "3.000000001s", and 3 seconds and 1 -microsecond should be expressed in JSON format as "3.000001s". - - - -.. csv-table:: Duration type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "seconds", ":ref:`ref_int64`", "", "Signed seconds of the span of time. Must be from -315,576,000,000 to +315,576,000,000 inclusive. Note: these bounds are computed from: 60 sec/min * 60 min/hr * 24 hr/day * 365.25 days/year * 10000 years" - "nanos", ":ref:`ref_int32`", "", "Signed fractions of a second at nanosecond resolution of the span of time. Durations less than one second are represented with a 0 `seconds` field and a positive or negative `nanos` field. For durations of one second or more, a non-zero value for the `nanos` field must be of the same sign as the `seconds` field. Must be from -999,999,999 to +999,999,999 inclusive." - - - - - - - - - - - - - - diff --git a/flyteidl/protos/docs/core/core.rst b/flyteidl/protos/docs/core/core.rst deleted file mode 100644 index 31e8d3f3c..000000000 --- a/flyteidl/protos/docs/core/core.rst +++ /dev/null @@ -1,3656 +0,0 @@ -###################### -Protocol Documentation -###################### - - - - -.. _ref_flyteidl/core/catalog.proto: - -flyteidl/core/catalog.proto -================================================================== - - - - - -.. _ref_flyteidl.core.CatalogArtifactTag: - -CatalogArtifactTag ------------------------------------------------------------------- - - - - - -.. csv-table:: CatalogArtifactTag type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "artifact_id", ":ref:`ref_string`", "", "Artifact ID is generated name" - "name", ":ref:`ref_string`", "", "Flyte computes the tag automatically, as the hash of the values" - - - - - - - -.. _ref_flyteidl.core.CatalogMetadata: - -CatalogMetadata ------------------------------------------------------------------- - -Catalog artifact information with specific metadata - - - -.. csv-table:: CatalogMetadata type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "dataset_id", ":ref:`ref_flyteidl.core.Identifier`", "", "Dataset ID in the catalog" - "artifact_tag", ":ref:`ref_flyteidl.core.CatalogArtifactTag`", "", "Artifact tag in the catalog" - "source_task_execution", ":ref:`ref_flyteidl.core.TaskExecutionIdentifier`", "", "Today we only support TaskExecutionIdentifier as a source, as catalog caching only works for task executions" - - - - - - - -.. _ref_flyteidl.core.CatalogReservation: - -CatalogReservation ------------------------------------------------------------------- - - - - - - - - - - - - -.. _ref_flyteidl.core.CatalogCacheStatus: - -CatalogCacheStatus ------------------------------------------------------------------- - -Indicates the status of CatalogCaching. The reason why this is not embedded in TaskNodeMetadata is, that we may use for other types of nodes as well in the future - -.. csv-table:: Enum CatalogCacheStatus values - :header: "Name", "Number", "Description" - :widths: auto - - "CACHE_DISABLED", "0", "Used to indicate that caching was disabled" - "CACHE_MISS", "1", "Used to indicate that the cache lookup resulted in no matches" - "CACHE_HIT", "2", "used to indicate that the associated artifact was a result of a previous execution" - "CACHE_POPULATED", "3", "used to indicate that the resultant artifact was added to the cache" - "CACHE_LOOKUP_FAILURE", "4", "Used to indicate that cache lookup failed because of an error" - "CACHE_PUT_FAILURE", "5", "Used to indicate that cache lookup failed because of an error" - - - -.. _ref_flyteidl.core.CatalogReservation.Status: - -CatalogReservation.Status ------------------------------------------------------------------- - -Indicates the status of a catalog reservation operation. - -.. csv-table:: Enum CatalogReservation.Status values - :header: "Name", "Number", "Description" - :widths: auto - - "RESERVATION_DISABLED", "0", "Used to indicate that reservations are disabled" - "RESERVATION_ACQUIRED", "1", "Used to indicate that a reservation was successfully acquired or extended" - "RESERVATION_EXISTS", "2", "Used to indicate that an active reservation currently exists" - "RESERVATION_RELEASED", "3", "Used to indicate that the reservation has been successfully released" - "RESERVATION_FAILURE", "4", "Used to indicate that a reservation operation resulted in failure" - - - - - - - - - - -.. _ref_flyteidl/core/compiler.proto: - -flyteidl/core/compiler.proto -================================================================== - - - - - -.. _ref_flyteidl.core.CompiledTask: - -CompiledTask ------------------------------------------------------------------- - -Output of the Compilation step. This object represent one Task. We store more metadata at this layer - - - -.. csv-table:: CompiledTask type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "template", ":ref:`ref_flyteidl.core.TaskTemplate`", "", "Completely contained TaskTemplate" - - - - - - - -.. _ref_flyteidl.core.CompiledWorkflow: - -CompiledWorkflow ------------------------------------------------------------------- - -Output of the compilation Step. This object represents one workflow. We store more metadata at this layer - - - -.. csv-table:: CompiledWorkflow type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "template", ":ref:`ref_flyteidl.core.WorkflowTemplate`", "", "Completely contained Workflow Template" - "connections", ":ref:`ref_flyteidl.core.ConnectionSet`", "", "For internal use only! This field is used by the system and must not be filled in. Any values set will be ignored." - - - - - - - -.. _ref_flyteidl.core.CompiledWorkflowClosure: - -CompiledWorkflowClosure ------------------------------------------------------------------- - -A Compiled Workflow Closure contains all the information required to start a new execution, or to visualize a workflow -and its details. The CompiledWorkflowClosure should always contain a primary workflow, that is the main workflow that -will being the execution. All subworkflows are denormalized. WorkflowNodes refer to the workflow identifiers of -compiled subworkflows. - - - -.. csv-table:: CompiledWorkflowClosure type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "primary", ":ref:`ref_flyteidl.core.CompiledWorkflow`", "", "+required" - "sub_workflows", ":ref:`ref_flyteidl.core.CompiledWorkflow`", "repeated", "Guaranteed that there will only exist one and only one workflow with a given id, i.e., every sub workflow has a unique identifier. Also every enclosed subworkflow is used either by a primary workflow or by a subworkflow as an inlined workflow +optional" - "tasks", ":ref:`ref_flyteidl.core.CompiledTask`", "repeated", "Guaranteed that there will only exist one and only one task with a given id, i.e., every task has a unique id +required (at least 1)" - - - - - - - -.. _ref_flyteidl.core.ConnectionSet: - -ConnectionSet ------------------------------------------------------------------- - -Adjacency list for the workflow. This is created as part of the compilation process. Every process after the compilation -step uses this created ConnectionSet - - - -.. csv-table:: ConnectionSet type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "downstream", ":ref:`ref_flyteidl.core.ConnectionSet.DownstreamEntry`", "repeated", "A list of all the node ids that are downstream from a given node id" - "upstream", ":ref:`ref_flyteidl.core.ConnectionSet.UpstreamEntry`", "repeated", "A list of all the node ids, that are upstream of this node id" - - - - - - - -.. _ref_flyteidl.core.ConnectionSet.DownstreamEntry: - -ConnectionSet.DownstreamEntry ------------------------------------------------------------------- - - - - - -.. csv-table:: ConnectionSet.DownstreamEntry type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "key", ":ref:`ref_string`", "", "" - "value", ":ref:`ref_flyteidl.core.ConnectionSet.IdList`", "", "" - - - - - - - -.. _ref_flyteidl.core.ConnectionSet.IdList: - -ConnectionSet.IdList ------------------------------------------------------------------- - - - - - -.. csv-table:: ConnectionSet.IdList type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "ids", ":ref:`ref_string`", "repeated", "" - - - - - - - -.. _ref_flyteidl.core.ConnectionSet.UpstreamEntry: - -ConnectionSet.UpstreamEntry ------------------------------------------------------------------- - - - - - -.. csv-table:: ConnectionSet.UpstreamEntry type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "key", ":ref:`ref_string`", "", "" - "value", ":ref:`ref_flyteidl.core.ConnectionSet.IdList`", "", "" - - - - - - - - - - - - - - - - -.. _ref_flyteidl/core/condition.proto: - -flyteidl/core/condition.proto -================================================================== - - - - - -.. _ref_flyteidl.core.BooleanExpression: - -BooleanExpression ------------------------------------------------------------------- - -Defines a boolean expression tree. It can be a simple or a conjunction expression. -Multiple expressions can be combined using a conjunction or a disjunction to result in a final boolean result. - - - -.. csv-table:: BooleanExpression type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "conjunction", ":ref:`ref_flyteidl.core.ConjunctionExpression`", "", "" - "comparison", ":ref:`ref_flyteidl.core.ComparisonExpression`", "", "" - - - - - - - -.. _ref_flyteidl.core.ComparisonExpression: - -ComparisonExpression ------------------------------------------------------------------- - -Defines a 2-level tree where the root is a comparison operator and Operands are primitives or known variables. -Each expression results in a boolean result. - - - -.. csv-table:: ComparisonExpression type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "operator", ":ref:`ref_flyteidl.core.ComparisonExpression.Operator`", "", "" - "left_value", ":ref:`ref_flyteidl.core.Operand`", "", "" - "right_value", ":ref:`ref_flyteidl.core.Operand`", "", "" - - - - - - - -.. _ref_flyteidl.core.ConjunctionExpression: - -ConjunctionExpression ------------------------------------------------------------------- - -Defines a conjunction expression of two boolean expressions. - - - -.. csv-table:: ConjunctionExpression type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "operator", ":ref:`ref_flyteidl.core.ConjunctionExpression.LogicalOperator`", "", "" - "left_expression", ":ref:`ref_flyteidl.core.BooleanExpression`", "", "" - "right_expression", ":ref:`ref_flyteidl.core.BooleanExpression`", "", "" - - - - - - - -.. _ref_flyteidl.core.Operand: - -Operand ------------------------------------------------------------------- - -Defines an operand to a comparison expression. - - - -.. csv-table:: Operand type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "primitive", ":ref:`ref_flyteidl.core.Primitive`", "", "Can be a constant" - "var", ":ref:`ref_string`", "", "Or one of this node's input variables" - - - - - - - - - -.. _ref_flyteidl.core.ComparisonExpression.Operator: - -ComparisonExpression.Operator ------------------------------------------------------------------- - -Binary Operator for each expression - -.. csv-table:: Enum ComparisonExpression.Operator values - :header: "Name", "Number", "Description" - :widths: auto - - "EQ", "0", "" - "NEQ", "1", "" - "GT", "2", "Greater Than" - "GTE", "3", "" - "LT", "4", "Less Than" - "LTE", "5", "" - - - -.. _ref_flyteidl.core.ConjunctionExpression.LogicalOperator: - -ConjunctionExpression.LogicalOperator ------------------------------------------------------------------- - -Nested conditions. They can be conjoined using AND / OR -Order of evaluation is not important as the operators are Commutative - -.. csv-table:: Enum ConjunctionExpression.LogicalOperator values - :header: "Name", "Number", "Description" - :widths: auto - - "AND", "0", "Conjunction" - "OR", "1", "" - - - - - - - - - - -.. _ref_flyteidl/core/dynamic_job.proto: - -flyteidl/core/dynamic_job.proto -================================================================== - - - - - -.. _ref_flyteidl.core.DynamicJobSpec: - -DynamicJobSpec ------------------------------------------------------------------- - -Describes a set of tasks to execute and how the final outputs are produced. - - - -.. csv-table:: DynamicJobSpec type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "nodes", ":ref:`ref_flyteidl.core.Node`", "repeated", "A collection of nodes to execute." - "min_successes", ":ref:`ref_int64`", "", "An absolute number of successful completions of nodes required to mark this job as succeeded. As soon as this criteria is met, the dynamic job will be marked as successful and outputs will be computed. If this number becomes impossible to reach (e.g. number of currently running tasks + number of already succeeded tasks < min_successes) the task will be aborted immediately and marked as failed. The default value of this field, if not specified, is the count of nodes repeated field." - "outputs", ":ref:`ref_flyteidl.core.Binding`", "repeated", "Describes how to bind the final output of the dynamic job from the outputs of executed nodes. The referenced ids in bindings should have the generated id for the subtask." - "tasks", ":ref:`ref_flyteidl.core.TaskTemplate`", "repeated", "[Optional] A complete list of task specs referenced in nodes." - "subworkflows", ":ref:`ref_flyteidl.core.WorkflowTemplate`", "repeated", "[Optional] A complete list of task specs referenced in nodes." - - - - - - - - - - - - - - - - -.. _ref_flyteidl/core/errors.proto: - -flyteidl/core/errors.proto -================================================================== - - - - - -.. _ref_flyteidl.core.ContainerError: - -ContainerError ------------------------------------------------------------------- - -Error message to propagate detailed errors from container executions to the execution -engine. - - - -.. csv-table:: ContainerError type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "code", ":ref:`ref_string`", "", "A simplified code for errors, so that we can provide a glossary of all possible errors." - "message", ":ref:`ref_string`", "", "A detailed error message." - "kind", ":ref:`ref_flyteidl.core.ContainerError.Kind`", "", "An abstract error kind for this error. Defaults to Non_Recoverable if not specified." - "origin", ":ref:`ref_flyteidl.core.ExecutionError.ErrorKind`", "", "Defines the origin of the error (system, user, unknown)." - - - - - - - -.. _ref_flyteidl.core.ErrorDocument: - -ErrorDocument ------------------------------------------------------------------- - -Defines the errors.pb file format the container can produce to communicate -failure reasons to the execution engine. - - - -.. csv-table:: ErrorDocument type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "error", ":ref:`ref_flyteidl.core.ContainerError`", "", "The error raised during execution." - - - - - - - - - -.. _ref_flyteidl.core.ContainerError.Kind: - -ContainerError.Kind ------------------------------------------------------------------- - -Defines a generic error type that dictates the behavior of the retry strategy. - -.. csv-table:: Enum ContainerError.Kind values - :header: "Name", "Number", "Description" - :widths: auto - - "NON_RECOVERABLE", "0", "" - "RECOVERABLE", "1", "" - - - - - - - - - - -.. _ref_flyteidl/core/execution.proto: - -flyteidl/core/execution.proto -================================================================== - - - - - -.. _ref_flyteidl.core.ExecutionError: - -ExecutionError ------------------------------------------------------------------- - -Represents the error message from the execution. - - - -.. csv-table:: ExecutionError type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "code", ":ref:`ref_string`", "", "Error code indicates a grouping of a type of error. More Info: <Link>" - "message", ":ref:`ref_string`", "", "Detailed description of the error - including stack trace." - "error_uri", ":ref:`ref_string`", "", "Full error contents accessible via a URI" - "kind", ":ref:`ref_flyteidl.core.ExecutionError.ErrorKind`", "", "" - - - - - - - -.. _ref_flyteidl.core.NodeExecution: - -NodeExecution ------------------------------------------------------------------- - -Indicates various phases of Node Execution - - - - - - - - -.. _ref_flyteidl.core.QualityOfService: - -QualityOfService ------------------------------------------------------------------- - -Indicates the priority of an execution. - - - -.. csv-table:: QualityOfService type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "tier", ":ref:`ref_flyteidl.core.QualityOfService.Tier`", "", "" - "spec", ":ref:`ref_flyteidl.core.QualityOfServiceSpec`", "", "" - - - - - - - -.. _ref_flyteidl.core.QualityOfServiceSpec: - -QualityOfServiceSpec ------------------------------------------------------------------- - -Represents customized execution run-time attributes. - - - -.. csv-table:: QualityOfServiceSpec type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "queueing_budget", ":ref:`ref_google.protobuf.Duration`", "", "Indicates how much queueing delay an execution can tolerate." - - - - - - - -.. _ref_flyteidl.core.TaskExecution: - -TaskExecution ------------------------------------------------------------------- - -Phases that task plugins can go through. Not all phases may be applicable to a specific plugin task, -but this is the cumulative list that customers may want to know about for their task. - - - - - - - - -.. _ref_flyteidl.core.TaskLog: - -TaskLog ------------------------------------------------------------------- - -Log information for the task that is specific to a log sink -When our log story is flushed out, we may have more metadata here like log link expiry - - - -.. csv-table:: TaskLog type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "uri", ":ref:`ref_string`", "", "" - "name", ":ref:`ref_string`", "", "" - "message_format", ":ref:`ref_flyteidl.core.TaskLog.MessageFormat`", "", "" - "ttl", ":ref:`ref_google.protobuf.Duration`", "", "" - - - - - - - -.. _ref_flyteidl.core.WorkflowExecution: - -WorkflowExecution ------------------------------------------------------------------- - -Indicates various phases of Workflow Execution - - - - - - - - - - -.. _ref_flyteidl.core.ExecutionError.ErrorKind: - -ExecutionError.ErrorKind ------------------------------------------------------------------- - -Error type: System or User - -.. csv-table:: Enum ExecutionError.ErrorKind values - :header: "Name", "Number", "Description" - :widths: auto - - "UNKNOWN", "0", "" - "USER", "1", "" - "SYSTEM", "2", "" - - - -.. _ref_flyteidl.core.NodeExecution.Phase: - -NodeExecution.Phase ------------------------------------------------------------------- - - - -.. csv-table:: Enum NodeExecution.Phase values - :header: "Name", "Number", "Description" - :widths: auto - - "UNDEFINED", "0", "" - "QUEUED", "1", "" - "RUNNING", "2", "" - "SUCCEEDED", "3", "" - "FAILING", "4", "" - "FAILED", "5", "" - "ABORTED", "6", "" - "SKIPPED", "7", "" - "TIMED_OUT", "8", "" - "DYNAMIC_RUNNING", "9", "" - "RECOVERED", "10", "" - - - -.. _ref_flyteidl.core.QualityOfService.Tier: - -QualityOfService.Tier ------------------------------------------------------------------- - - - -.. csv-table:: Enum QualityOfService.Tier values - :header: "Name", "Number", "Description" - :widths: auto - - "UNDEFINED", "0", "Default: no quality of service specified." - "HIGH", "1", "" - "MEDIUM", "2", "" - "LOW", "3", "" - - - -.. _ref_flyteidl.core.TaskExecution.Phase: - -TaskExecution.Phase ------------------------------------------------------------------- - - - -.. csv-table:: Enum TaskExecution.Phase values - :header: "Name", "Number", "Description" - :widths: auto - - "UNDEFINED", "0", "" - "QUEUED", "1", "" - "RUNNING", "2", "" - "SUCCEEDED", "3", "" - "ABORTED", "4", "" - "FAILED", "5", "" - "INITIALIZING", "6", "To indicate cases where task is initializing, like: ErrImagePull, ContainerCreating, PodInitializing" - "WAITING_FOR_RESOURCES", "7", "To address cases, where underlying resource is not available: Backoff error, Resource quota exceeded" - - - -.. _ref_flyteidl.core.TaskLog.MessageFormat: - -TaskLog.MessageFormat ------------------------------------------------------------------- - - - -.. csv-table:: Enum TaskLog.MessageFormat values - :header: "Name", "Number", "Description" - :widths: auto - - "UNKNOWN", "0", "" - "CSV", "1", "" - "JSON", "2", "" - - - -.. _ref_flyteidl.core.WorkflowExecution.Phase: - -WorkflowExecution.Phase ------------------------------------------------------------------- - - - -.. csv-table:: Enum WorkflowExecution.Phase values - :header: "Name", "Number", "Description" - :widths: auto - - "UNDEFINED", "0", "" - "QUEUED", "1", "" - "RUNNING", "2", "" - "SUCCEEDING", "3", "" - "SUCCEEDED", "4", "" - "FAILING", "5", "" - "FAILED", "6", "" - "ABORTED", "7", "" - "TIMED_OUT", "8", "" - "ABORTING", "9", "" - - - - - - - - - - -.. _ref_flyteidl/core/identifier.proto: - -flyteidl/core/identifier.proto -================================================================== - - - - - -.. _ref_flyteidl.core.Identifier: - -Identifier ------------------------------------------------------------------- - -Encapsulation of fields that uniquely identifies a Flyte resource. - - - -.. csv-table:: Identifier type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "resource_type", ":ref:`ref_flyteidl.core.ResourceType`", "", "Identifies the specific type of resource that this identifier corresponds to." - "project", ":ref:`ref_string`", "", "Name of the project the resource belongs to." - "domain", ":ref:`ref_string`", "", "Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project." - "name", ":ref:`ref_string`", "", "User provided value for the resource." - "version", ":ref:`ref_string`", "", "Specific version of the resource." - - - - - - - -.. _ref_flyteidl.core.NodeExecutionIdentifier: - -NodeExecutionIdentifier ------------------------------------------------------------------- - -Encapsulation of fields that identify a Flyte node execution entity. - - - -.. csv-table:: NodeExecutionIdentifier type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "node_id", ":ref:`ref_string`", "", "" - "execution_id", ":ref:`ref_flyteidl.core.WorkflowExecutionIdentifier`", "", "" - - - - - - - -.. _ref_flyteidl.core.TaskExecutionIdentifier: - -TaskExecutionIdentifier ------------------------------------------------------------------- - -Encapsulation of fields that identify a Flyte task execution entity. - - - -.. csv-table:: TaskExecutionIdentifier type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "task_id", ":ref:`ref_flyteidl.core.Identifier`", "", "" - "node_execution_id", ":ref:`ref_flyteidl.core.NodeExecutionIdentifier`", "", "" - "retry_attempt", ":ref:`ref_uint32`", "", "" - - - - - - - -.. _ref_flyteidl.core.WorkflowExecutionIdentifier: - -WorkflowExecutionIdentifier ------------------------------------------------------------------- - -Encapsulation of fields that uniquely identifies a Flyte workflow execution - - - -.. csv-table:: WorkflowExecutionIdentifier type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "project", ":ref:`ref_string`", "", "Name of the project the resource belongs to." - "domain", ":ref:`ref_string`", "", "Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project." - "name", ":ref:`ref_string`", "", "User or system provided value for the resource." - - - - - - - - - -.. _ref_flyteidl.core.ResourceType: - -ResourceType ------------------------------------------------------------------- - -Indicates a resource type within Flyte. - -.. csv-table:: Enum ResourceType values - :header: "Name", "Number", "Description" - :widths: auto - - "UNSPECIFIED", "0", "" - "TASK", "1", "" - "WORKFLOW", "2", "" - "LAUNCH_PLAN", "3", "" - "DATASET", "4", "A dataset represents an entity modeled in Flyte DataCatalog. A Dataset is also a versioned entity and can be a compilation of multiple individual objects. Eventually all Catalog objects should be modeled similar to Flyte Objects. The Dataset entities makes it possible for the UI and CLI to act on the objects in a similar manner to other Flyte objects" - - - - - - - - - - -.. _ref_flyteidl/core/interface.proto: - -flyteidl/core/interface.proto -================================================================== - - - - - -.. _ref_flyteidl.core.Parameter: - -Parameter ------------------------------------------------------------------- - -A parameter is used as input to a launch plan and has -the special ability to have a default value or mark itself as required. - - - -.. csv-table:: Parameter type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "var", ":ref:`ref_flyteidl.core.Variable`", "", "+required Variable. Defines the type of the variable backing this parameter." - "default", ":ref:`ref_flyteidl.core.Literal`", "", "Defines a default value that has to match the variable type defined." - "required", ":ref:`ref_bool`", "", "+optional, is this value required to be filled." - - - - - - - -.. _ref_flyteidl.core.ParameterMap: - -ParameterMap ------------------------------------------------------------------- - -A map of Parameters. - - - -.. csv-table:: ParameterMap type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "parameters", ":ref:`ref_flyteidl.core.ParameterMap.ParametersEntry`", "repeated", "Defines a map of parameter names to parameters." - - - - - - - -.. _ref_flyteidl.core.ParameterMap.ParametersEntry: - -ParameterMap.ParametersEntry ------------------------------------------------------------------- - - - - - -.. csv-table:: ParameterMap.ParametersEntry type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "key", ":ref:`ref_string`", "", "" - "value", ":ref:`ref_flyteidl.core.Parameter`", "", "" - - - - - - - -.. _ref_flyteidl.core.TypedInterface: - -TypedInterface ------------------------------------------------------------------- - -Defines strongly typed inputs and outputs. - - - -.. csv-table:: TypedInterface type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "inputs", ":ref:`ref_flyteidl.core.VariableMap`", "", "" - "outputs", ":ref:`ref_flyteidl.core.VariableMap`", "", "" - - - - - - - -.. _ref_flyteidl.core.Variable: - -Variable ------------------------------------------------------------------- - -Defines a strongly typed variable. - - - -.. csv-table:: Variable type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "type", ":ref:`ref_flyteidl.core.LiteralType`", "", "Variable literal type." - "description", ":ref:`ref_string`", "", "+optional string describing input variable" - - - - - - - -.. _ref_flyteidl.core.VariableMap: - -VariableMap ------------------------------------------------------------------- - -A map of Variables - - - -.. csv-table:: VariableMap type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "variables", ":ref:`ref_flyteidl.core.VariableMap.VariablesEntry`", "repeated", "Defines a map of variable names to variables." - - - - - - - -.. _ref_flyteidl.core.VariableMap.VariablesEntry: - -VariableMap.VariablesEntry ------------------------------------------------------------------- - - - - - -.. csv-table:: VariableMap.VariablesEntry type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "key", ":ref:`ref_string`", "", "" - "value", ":ref:`ref_flyteidl.core.Variable`", "", "" - - - - - - - - - - - - - - - - -.. _ref_flyteidl/core/literals.proto: - -flyteidl/core/literals.proto -================================================================== - - - - - -.. _ref_flyteidl.core.Binary: - -Binary ------------------------------------------------------------------- - -A simple byte array with a tag to help different parts of the system communicate about what is in the byte array. -It's strongly advisable that consumers of this type define a unique tag and validate the tag before parsing the data. - - - -.. csv-table:: Binary type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "value", ":ref:`ref_bytes`", "", "" - "tag", ":ref:`ref_string`", "", "" - - - - - - - -.. _ref_flyteidl.core.Binding: - -Binding ------------------------------------------------------------------- - -An input/output binding of a variable to either static value or a node output. - - - -.. csv-table:: Binding type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "var", ":ref:`ref_string`", "", "Variable name must match an input/output variable of the node." - "binding", ":ref:`ref_flyteidl.core.BindingData`", "", "Data to use to bind this variable." - - - - - - - -.. _ref_flyteidl.core.BindingData: - -BindingData ------------------------------------------------------------------- - -Specifies either a simple value or a reference to another output. - - - -.. csv-table:: BindingData type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "scalar", ":ref:`ref_flyteidl.core.Scalar`", "", "A simple scalar value." - "collection", ":ref:`ref_flyteidl.core.BindingDataCollection`", "", "A collection of binding data. This allows nesting of binding data to any number of levels." - "promise", ":ref:`ref_flyteidl.core.OutputReference`", "", "References an output promised by another node." - "map", ":ref:`ref_flyteidl.core.BindingDataMap`", "", "A map of bindings. The key is always a string." - "union", ":ref:`ref_flyteidl.core.UnionInfo`", "", "" - - - - - - - -.. _ref_flyteidl.core.BindingDataCollection: - -BindingDataCollection ------------------------------------------------------------------- - -A collection of BindingData items. - - - -.. csv-table:: BindingDataCollection type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "bindings", ":ref:`ref_flyteidl.core.BindingData`", "repeated", "" - - - - - - - -.. _ref_flyteidl.core.BindingDataMap: - -BindingDataMap ------------------------------------------------------------------- - -A map of BindingData items. - - - -.. csv-table:: BindingDataMap type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "bindings", ":ref:`ref_flyteidl.core.BindingDataMap.BindingsEntry`", "repeated", "" - - - - - - - -.. _ref_flyteidl.core.BindingDataMap.BindingsEntry: - -BindingDataMap.BindingsEntry ------------------------------------------------------------------- - - - - - -.. csv-table:: BindingDataMap.BindingsEntry type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "key", ":ref:`ref_string`", "", "" - "value", ":ref:`ref_flyteidl.core.BindingData`", "", "" - - - - - - - -.. _ref_flyteidl.core.Blob: - -Blob ------------------------------------------------------------------- - -Refers to an offloaded set of files. It encapsulates the type of the store and a unique uri for where the data is. -There are no restrictions on how the uri is formatted since it will depend on how to interact with the store. - - - -.. csv-table:: Blob type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "metadata", ":ref:`ref_flyteidl.core.BlobMetadata`", "", "" - "uri", ":ref:`ref_string`", "", "" - - - - - - - -.. _ref_flyteidl.core.BlobMetadata: - -BlobMetadata ------------------------------------------------------------------- - - - - - -.. csv-table:: BlobMetadata type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "type", ":ref:`ref_flyteidl.core.BlobType`", "", "" - - - - - - - -.. _ref_flyteidl.core.KeyValuePair: - -KeyValuePair ------------------------------------------------------------------- - -A generic key value pair. - - - -.. csv-table:: KeyValuePair type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "key", ":ref:`ref_string`", "", "required." - "value", ":ref:`ref_string`", "", "+optional." - - - - - - - -.. _ref_flyteidl.core.Literal: - -Literal ------------------------------------------------------------------- - -A simple value. This supports any level of nesting (e.g. array of array of array of Blobs) as well as simple primitives. - - - -.. csv-table:: Literal type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "scalar", ":ref:`ref_flyteidl.core.Scalar`", "", "A simple value." - "collection", ":ref:`ref_flyteidl.core.LiteralCollection`", "", "A collection of literals to allow nesting." - "map", ":ref:`ref_flyteidl.core.LiteralMap`", "", "A map of strings to literals." - "hash", ":ref:`ref_string`", "", "A hash representing this literal. This is used for caching purposes. For more details refer to RFC 1893 (https://github.com/flyteorg/flyte/blob/master/rfc/system/1893-caching-of-offloaded-objects.md)" - - - - - - - -.. _ref_flyteidl.core.LiteralCollection: - -LiteralCollection ------------------------------------------------------------------- - -A collection of literals. This is a workaround since oneofs in proto messages cannot contain a repeated field. - - - -.. csv-table:: LiteralCollection type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "literals", ":ref:`ref_flyteidl.core.Literal`", "repeated", "" - - - - - - - -.. _ref_flyteidl.core.LiteralMap: - -LiteralMap ------------------------------------------------------------------- - -A map of literals. This is a workaround since oneofs in proto messages cannot contain a repeated field. - - - -.. csv-table:: LiteralMap type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "literals", ":ref:`ref_flyteidl.core.LiteralMap.LiteralsEntry`", "repeated", "" - - - - - - - -.. _ref_flyteidl.core.LiteralMap.LiteralsEntry: - -LiteralMap.LiteralsEntry ------------------------------------------------------------------- - - - - - -.. csv-table:: LiteralMap.LiteralsEntry type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "key", ":ref:`ref_string`", "", "" - "value", ":ref:`ref_flyteidl.core.Literal`", "", "" - - - - - - - -.. _ref_flyteidl.core.Primitive: - -Primitive ------------------------------------------------------------------- - -Primitive Types - - - -.. csv-table:: Primitive type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "integer", ":ref:`ref_int64`", "", "" - "float_value", ":ref:`ref_double`", "", "" - "string_value", ":ref:`ref_string`", "", "" - "boolean", ":ref:`ref_bool`", "", "" - "datetime", ":ref:`ref_google.protobuf.Timestamp`", "", "" - "duration", ":ref:`ref_google.protobuf.Duration`", "", "" - - - - - - - -.. _ref_flyteidl.core.RetryStrategy: - -RetryStrategy ------------------------------------------------------------------- - -Retry strategy associated with an executable unit. - - - -.. csv-table:: RetryStrategy type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "retries", ":ref:`ref_uint32`", "", "Number of retries. Retries will be consumed when the job fails with a recoverable error. The number of retries must be less than or equals to 10." - - - - - - - -.. _ref_flyteidl.core.Scalar: - -Scalar ------------------------------------------------------------------- - - - - - -.. csv-table:: Scalar type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "primitive", ":ref:`ref_flyteidl.core.Primitive`", "", "" - "blob", ":ref:`ref_flyteidl.core.Blob`", "", "" - "binary", ":ref:`ref_flyteidl.core.Binary`", "", "" - "schema", ":ref:`ref_flyteidl.core.Schema`", "", "" - "none_type", ":ref:`ref_flyteidl.core.Void`", "", "" - "error", ":ref:`ref_flyteidl.core.Error`", "", "" - "generic", ":ref:`ref_google.protobuf.Struct`", "", "" - "structured_dataset", ":ref:`ref_flyteidl.core.StructuredDataset`", "", "" - "union", ":ref:`ref_flyteidl.core.Union`", "", "" - - - - - - - -.. _ref_flyteidl.core.Schema: - -Schema ------------------------------------------------------------------- - -A strongly typed schema that defines the interface of data retrieved from the underlying storage medium. - - - -.. csv-table:: Schema type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "uri", ":ref:`ref_string`", "", "" - "type", ":ref:`ref_flyteidl.core.SchemaType`", "", "" - - - - - - - -.. _ref_flyteidl.core.StructuredDataset: - -StructuredDataset ------------------------------------------------------------------- - - - - - -.. csv-table:: StructuredDataset type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "uri", ":ref:`ref_string`", "", "String location uniquely identifying where the data is. Should start with the storage location (e.g. s3://, gs://, bq://, etc.)" - "metadata", ":ref:`ref_flyteidl.core.StructuredDatasetMetadata`", "", "" - - - - - - - -.. _ref_flyteidl.core.StructuredDatasetMetadata: - -StructuredDatasetMetadata ------------------------------------------------------------------- - - - - - -.. csv-table:: StructuredDatasetMetadata type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "structured_dataset_type", ":ref:`ref_flyteidl.core.StructuredDatasetType`", "", "Bundle the type information along with the literal. This is here because StructuredDatasets can often be more defined at run time than at compile time. That is, at compile time you might only declare a task to return a pandas dataframe or a StructuredDataset, without any column information, but at run time, you might have that column information. flytekit python will copy this type information into the literal, from the type information, if not provided by the various plugins (encoders). Since this field is run time generated, it's not used for any type checking." - - - - - - - -.. _ref_flyteidl.core.Union: - -Union ------------------------------------------------------------------- - -The runtime representation of a tagged union value. See `UnionType` for more details. - - - -.. csv-table:: Union type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "value", ":ref:`ref_flyteidl.core.Literal`", "", "" - "type", ":ref:`ref_flyteidl.core.LiteralType`", "", "" - - - - - - - -.. _ref_flyteidl.core.UnionInfo: - -UnionInfo ------------------------------------------------------------------- - - - - - -.. csv-table:: UnionInfo type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "targetType", ":ref:`ref_flyteidl.core.LiteralType`", "", "" - - - - - - - -.. _ref_flyteidl.core.Void: - -Void ------------------------------------------------------------------- - -Used to denote a nil/null/None assignment to a scalar value. The underlying LiteralType for Void is intentionally -undefined since it can be assigned to a scalar of any LiteralType. - - - - - - - - - - - - - - - - - -.. _ref_flyteidl/core/security.proto: - -flyteidl/core/security.proto -================================================================== - - - - - -.. _ref_flyteidl.core.Identity: - -Identity ------------------------------------------------------------------- - -Identity encapsulates the various security identities a task can run as. It's up to the underlying plugin to pick the -right identity for the execution environment. - - - -.. csv-table:: Identity type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "iam_role", ":ref:`ref_string`", "", "iam_role references the fully qualified name of Identity & Access Management role to impersonate." - "k8s_service_account", ":ref:`ref_string`", "", "k8s_service_account references a kubernetes service account to impersonate." - "oauth2_client", ":ref:`ref_flyteidl.core.OAuth2Client`", "", "oauth2_client references an oauth2 client. Backend plugins can use this information to impersonate the client when making external calls." - - - - - - - -.. _ref_flyteidl.core.OAuth2Client: - -OAuth2Client ------------------------------------------------------------------- - -OAuth2Client encapsulates OAuth2 Client Credentials to be used when making calls on behalf of that task. - - - -.. csv-table:: OAuth2Client type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "client_id", ":ref:`ref_string`", "", "client_id is the public id for the client to use. The system will not perform any pre-auth validation that the secret requested matches the client_id indicated here. +required" - "client_secret", ":ref:`ref_flyteidl.core.Secret`", "", "client_secret is a reference to the secret used to authenticate the OAuth2 client. +required" - - - - - - - -.. _ref_flyteidl.core.OAuth2TokenRequest: - -OAuth2TokenRequest ------------------------------------------------------------------- - -OAuth2TokenRequest encapsulates information needed to request an OAuth2 token. -FLYTE_TOKENS_ENV_PREFIX will be passed to indicate the prefix of the environment variables that will be present if -tokens are passed through environment variables. -FLYTE_TOKENS_PATH_PREFIX will be passed to indicate the prefix of the path where secrets will be mounted if tokens -are passed through file mounts. - - - -.. csv-table:: OAuth2TokenRequest type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "name", ":ref:`ref_string`", "", "name indicates a unique id for the token request within this task token requests. It'll be used as a suffix for environment variables and as a filename for mounting tokens as files. +required" - "type", ":ref:`ref_flyteidl.core.OAuth2TokenRequest.Type`", "", "type indicates the type of the request to make. Defaults to CLIENT_CREDENTIALS. +required" - "client", ":ref:`ref_flyteidl.core.OAuth2Client`", "", "client references the client_id/secret to use to request the OAuth2 token. +required" - "idp_discovery_endpoint", ":ref:`ref_string`", "", "idp_discovery_endpoint references the discovery endpoint used to retrieve token endpoint and other related information. +optional" - "token_endpoint", ":ref:`ref_string`", "", "token_endpoint references the token issuance endpoint. If idp_discovery_endpoint is not provided, this parameter is mandatory. +optional" - - - - - - - -.. _ref_flyteidl.core.Secret: - -Secret ------------------------------------------------------------------- - -Secret encapsulates information about the secret a task needs to proceed. An environment variable -FLYTE_SECRETS_ENV_PREFIX will be passed to indicate the prefix of the environment variables that will be present if -secrets are passed through environment variables. -FLYTE_SECRETS_DEFAULT_DIR will be passed to indicate the prefix of the path where secrets will be mounted if secrets -are passed through file mounts. - - - -.. csv-table:: Secret type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "group", ":ref:`ref_string`", "", "The name of the secret group where to find the key referenced below. For K8s secrets, this should be the name of the v1/secret object. For Confidant, this should be the Credential name. For Vault, this should be the secret name. For AWS Secret Manager, this should be the name of the secret. +required" - "group_version", ":ref:`ref_string`", "", "The group version to fetch. This is not supported in all secret management systems. It'll be ignored for the ones that do not support it. +optional" - "key", ":ref:`ref_string`", "", "The name of the secret to mount. This has to match an existing secret in the system. It's up to the implementation of the secret management system to require case sensitivity. For K8s secrets, Confidant and Vault, this should match one of the keys inside the secret. For AWS Secret Manager, it's ignored. +optional" - "mount_requirement", ":ref:`ref_flyteidl.core.Secret.MountType`", "", "mount_requirement is optional. Indicates where the secret has to be mounted. If provided, the execution will fail if the underlying key management system cannot satisfy that requirement. If not provided, the default location will depend on the key management system. +optional" - - - - - - - -.. _ref_flyteidl.core.SecurityContext: - -SecurityContext ------------------------------------------------------------------- - -SecurityContext holds security attributes that apply to tasks. - - - -.. csv-table:: SecurityContext type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "run_as", ":ref:`ref_flyteidl.core.Identity`", "", "run_as encapsulates the identity a pod should run as. If the task fills in multiple fields here, it'll be up to the backend plugin to choose the appropriate identity for the execution engine the task will run on." - "secrets", ":ref:`ref_flyteidl.core.Secret`", "repeated", "secrets indicate the list of secrets the task needs in order to proceed. Secrets will be mounted/passed to the pod as it starts. If the plugin responsible for kicking of the task will not run it on a flyte cluster (e.g. AWS Batch), it's the responsibility of the plugin to fetch the secret (which means propeller identity will need access to the secret) and to pass it to the remote execution engine." - "tokens", ":ref:`ref_flyteidl.core.OAuth2TokenRequest`", "repeated", "tokens indicate the list of token requests the task needs in order to proceed. Tokens will be mounted/passed to the pod as it starts. If the plugin responsible for kicking of the task will not run it on a flyte cluster (e.g. AWS Batch), it's the responsibility of the plugin to fetch the secret (which means propeller identity will need access to the secret) and to pass it to the remote execution engine." - - - - - - - - - -.. _ref_flyteidl.core.OAuth2TokenRequest.Type: - -OAuth2TokenRequest.Type ------------------------------------------------------------------- - -Type of the token requested. - -.. csv-table:: Enum OAuth2TokenRequest.Type values - :header: "Name", "Number", "Description" - :widths: auto - - "CLIENT_CREDENTIALS", "0", "CLIENT_CREDENTIALS indicates a 2-legged OAuth token requested using client credentials." - - - -.. _ref_flyteidl.core.Secret.MountType: - -Secret.MountType ------------------------------------------------------------------- - - - -.. csv-table:: Enum Secret.MountType values - :header: "Name", "Number", "Description" - :widths: auto - - "ANY", "0", "Default case, indicates the client can tolerate either mounting options." - "ENV_VAR", "1", "ENV_VAR indicates the secret needs to be mounted as an environment variable." - "FILE", "2", "FILE indicates the secret needs to be mounted as a file." - - - - - - - - - - -.. _ref_flyteidl/core/tasks.proto: - -flyteidl/core/tasks.proto -================================================================== - - - - - -.. _ref_flyteidl.core.Container: - -Container ------------------------------------------------------------------- - - - - - -.. csv-table:: Container type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "image", ":ref:`ref_string`", "", "Container image url. Eg: docker/redis:latest" - "command", ":ref:`ref_string`", "repeated", "Command to be executed, if not provided, the default entrypoint in the container image will be used." - "args", ":ref:`ref_string`", "repeated", "These will default to Flyte given paths. If provided, the system will not append known paths. If the task still needs flyte's inputs and outputs path, add $(FLYTE_INPUT_FILE), $(FLYTE_OUTPUT_FILE) wherever makes sense and the system will populate these before executing the container." - "resources", ":ref:`ref_flyteidl.core.Resources`", "", "Container resources requirement as specified by the container engine." - "env", ":ref:`ref_flyteidl.core.KeyValuePair`", "repeated", "Environment variables will be set as the container is starting up." - "config", ":ref:`ref_flyteidl.core.KeyValuePair`", "repeated", "**Deprecated.** Allows extra configs to be available for the container. TODO: elaborate on how configs will become available. Deprecated, please use TaskTemplate.config instead." - "ports", ":ref:`ref_flyteidl.core.ContainerPort`", "repeated", "Ports to open in the container. This feature is not supported by all execution engines. (e.g. supported on K8s but not supported on AWS Batch) Only K8s" - "data_config", ":ref:`ref_flyteidl.core.DataLoadingConfig`", "", "BETA: Optional configuration for DataLoading. If not specified, then default values are used. This makes it possible to to run a completely portable container, that uses inputs and outputs only from the local file-system and without having any reference to flyteidl. This is supported only on K8s at the moment. If data loading is enabled, then data will be mounted in accompanying directories specified in the DataLoadingConfig. If the directories are not specified, inputs will be mounted onto and outputs will be uploaded from a pre-determined file-system path. Refer to the documentation to understand the default paths. Only K8s" - "architecture", ":ref:`ref_flyteidl.core.Container.Architecture`", "", "" - - - - - - - -.. _ref_flyteidl.core.ContainerPort: - -ContainerPort ------------------------------------------------------------------- - -Defines port properties for a container. - - - -.. csv-table:: ContainerPort type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "container_port", ":ref:`ref_uint32`", "", "Number of port to expose on the pod's IP address. This must be a valid port number, 0 < x < 65536." - - - - - - - -.. _ref_flyteidl.core.DataLoadingConfig: - -DataLoadingConfig ------------------------------------------------------------------- - -This configuration allows executing raw containers in Flyte using the Flyte CoPilot system. -Flyte CoPilot, eliminates the needs of flytekit or sdk inside the container. Any inputs required by the users container are side-loaded in the input_path -Any outputs generated by the user container - within output_path are automatically uploaded. - - - -.. csv-table:: DataLoadingConfig type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "enabled", ":ref:`ref_bool`", "", "Flag enables DataLoading Config. If this is not set, data loading will not be used!" - "input_path", ":ref:`ref_string`", "", "File system path (start at root). This folder will contain all the inputs exploded to a separate file. Example, if the input interface needs (x: int, y: blob, z: multipart_blob) and the input path is "/var/flyte/inputs", then the file system will look like /var/flyte/inputs/inputs.<metadata format dependent -> .pb .json .yaml> -> Format as defined previously. The Blob and Multipart blob will reference local filesystem instead of remote locations /var/flyte/inputs/x -> X is a file that contains the value of x (integer) in string format /var/flyte/inputs/y -> Y is a file in Binary format /var/flyte/inputs/z/... -> Note Z itself is a directory More information about the protocol - refer to docs #TODO reference docs here" - "output_path", ":ref:`ref_string`", "", "File system path (start at root). This folder should contain all the outputs for the task as individual files and/or an error text file" - "format", ":ref:`ref_flyteidl.core.DataLoadingConfig.LiteralMapFormat`", "", "In the inputs folder, there will be an additional summary/metadata file that contains references to all files or inlined primitive values. This format decides the actual encoding for the data. Refer to the encoding to understand the specifics of the contents and the encoding" - "io_strategy", ":ref:`ref_flyteidl.core.IOStrategy`", "", "" - - - - - - - -.. _ref_flyteidl.core.IOStrategy: - -IOStrategy ------------------------------------------------------------------- - -Strategy to use when dealing with Blob, Schema, or multipart blob data (large datasets) - - - -.. csv-table:: IOStrategy type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "download_mode", ":ref:`ref_flyteidl.core.IOStrategy.DownloadMode`", "", "Mode to use to manage downloads" - "upload_mode", ":ref:`ref_flyteidl.core.IOStrategy.UploadMode`", "", "Mode to use to manage uploads" - - - - - - - -.. _ref_flyteidl.core.K8sObjectMetadata: - -K8sObjectMetadata ------------------------------------------------------------------- - -Metadata for building a kubernetes object when a task is executed. - - - -.. csv-table:: K8sObjectMetadata type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "labels", ":ref:`ref_flyteidl.core.K8sObjectMetadata.LabelsEntry`", "repeated", "Optional labels to add to the pod definition." - "annotations", ":ref:`ref_flyteidl.core.K8sObjectMetadata.AnnotationsEntry`", "repeated", "Optional annotations to add to the pod definition." - - - - - - - -.. _ref_flyteidl.core.K8sObjectMetadata.AnnotationsEntry: - -K8sObjectMetadata.AnnotationsEntry ------------------------------------------------------------------- - - - - - -.. csv-table:: K8sObjectMetadata.AnnotationsEntry type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "key", ":ref:`ref_string`", "", "" - "value", ":ref:`ref_string`", "", "" - - - - - - - -.. _ref_flyteidl.core.K8sObjectMetadata.LabelsEntry: - -K8sObjectMetadata.LabelsEntry ------------------------------------------------------------------- - - - - - -.. csv-table:: K8sObjectMetadata.LabelsEntry type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "key", ":ref:`ref_string`", "", "" - "value", ":ref:`ref_string`", "", "" - - - - - - - -.. _ref_flyteidl.core.K8sPod: - -K8sPod ------------------------------------------------------------------- - -Defines a pod spec and additional pod metadata that is created when a task is executed. - - - -.. csv-table:: K8sPod type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "metadata", ":ref:`ref_flyteidl.core.K8sObjectMetadata`", "", "Contains additional metadata for building a kubernetes pod." - "pod_spec", ":ref:`ref_google.protobuf.Struct`", "", "Defines the primary pod spec created when a task is executed. This should be a JSON-marshalled pod spec, which can be defined in - go, using: https://github.com/kubernetes/api/blob/release-1.21/core/v1/types.go#L2936 - python: using https://github.com/kubernetes-client/python/blob/release-19.0/kubernetes/client/models/v1_pod_spec.py" - - - - - - - -.. _ref_flyteidl.core.Resources: - -Resources ------------------------------------------------------------------- - -A customizable interface to convey resources requested for a container. This can be interpreted differently for different -container engines. - - - -.. csv-table:: Resources type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "requests", ":ref:`ref_flyteidl.core.Resources.ResourceEntry`", "repeated", "The desired set of resources requested. ResourceNames must be unique within the list." - "limits", ":ref:`ref_flyteidl.core.Resources.ResourceEntry`", "repeated", "Defines a set of bounds (e.g. min/max) within which the task can reliably run. ResourceNames must be unique within the list." - - - - - - - -.. _ref_flyteidl.core.Resources.ResourceEntry: - -Resources.ResourceEntry ------------------------------------------------------------------- - -Encapsulates a resource name and value. - - - -.. csv-table:: Resources.ResourceEntry type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "name", ":ref:`ref_flyteidl.core.Resources.ResourceName`", "", "Resource name." - "value", ":ref:`ref_string`", "", "Value must be a valid k8s quantity. See https://github.com/kubernetes/apimachinery/blob/master/pkg/api/resource/quantity.go#L30-L80" - - - - - - - -.. _ref_flyteidl.core.RuntimeMetadata: - -RuntimeMetadata ------------------------------------------------------------------- - -Runtime information. This is loosely defined to allow for extensibility. - - - -.. csv-table:: RuntimeMetadata type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "type", ":ref:`ref_flyteidl.core.RuntimeMetadata.RuntimeType`", "", "Type of runtime." - "version", ":ref:`ref_string`", "", "Version of the runtime. All versions should be backward compatible. However, certain cases call for version checks to ensure tighter validation or setting expectations." - "flavor", ":ref:`ref_string`", "", "+optional It can be used to provide extra information about the runtime (e.g. python, golang... etc.)." - - - - - - - -.. _ref_flyteidl.core.Sql: - -Sql ------------------------------------------------------------------- - -Sql represents a generic sql workload with a statement and dialect. - - - -.. csv-table:: Sql type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "statement", ":ref:`ref_string`", "", "The actual query to run, the query can have templated parameters. We use Flyte's Golang templating format for Query templating. Refer to the templating documentation. https://docs.flyte.org/projects/cookbook/en/latest/auto/integrations/external_services/hive/hive.html#sphx-glr-auto-integrations-external-services-hive-hive-py For example, insert overwrite directory '{{ .rawOutputDataPrefix }}' stored as parquet select * from my_table where ds = '{{ .Inputs.ds }}'" - "dialect", ":ref:`ref_flyteidl.core.Sql.Dialect`", "", "" - - - - - - - -.. _ref_flyteidl.core.TaskMetadata: - -TaskMetadata ------------------------------------------------------------------- - -Task Metadata - - - -.. csv-table:: TaskMetadata type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "discoverable", ":ref:`ref_bool`", "", "Indicates whether the system should attempt to lookup this task's output to avoid duplication of work." - "runtime", ":ref:`ref_flyteidl.core.RuntimeMetadata`", "", "Runtime information about the task." - "timeout", ":ref:`ref_google.protobuf.Duration`", "", "The overall timeout of a task including user-triggered retries." - "retries", ":ref:`ref_flyteidl.core.RetryStrategy`", "", "Number of retries per task." - "discovery_version", ":ref:`ref_string`", "", "Indicates a logical version to apply to this task for the purpose of discovery." - "deprecated_error_message", ":ref:`ref_string`", "", "If set, this indicates that this task is deprecated. This will enable owners of tasks to notify consumers of the ending of support for a given task." - "interruptible", ":ref:`ref_bool`", "", "" - "cache_serializable", ":ref:`ref_bool`", "", "Indicates whether the system should attempt to execute discoverable instances in serial to avoid duplicate work" - - - - - - - -.. _ref_flyteidl.core.TaskTemplate: - -TaskTemplate ------------------------------------------------------------------- - -A Task structure that uniquely identifies a task in the system -Tasks are registered as a first step in the system. - - - -.. csv-table:: TaskTemplate type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "id", ":ref:`ref_flyteidl.core.Identifier`", "", "Auto generated taskId by the system. Task Id uniquely identifies this task globally." - "type", ":ref:`ref_string`", "", "A predefined yet extensible Task type identifier. This can be used to customize any of the components. If no extensions are provided in the system, Flyte will resolve the this task to its TaskCategory and default the implementation registered for the TaskCategory." - "metadata", ":ref:`ref_flyteidl.core.TaskMetadata`", "", "Extra metadata about the task." - "interface", ":ref:`ref_flyteidl.core.TypedInterface`", "", "A strongly typed interface for the task. This enables others to use this task within a workflow and guarantees compile-time validation of the workflow to avoid costly runtime failures." - "custom", ":ref:`ref_google.protobuf.Struct`", "", "Custom data about the task. This is extensible to allow various plugins in the system." - "container", ":ref:`ref_flyteidl.core.Container`", "", "" - "k8s_pod", ":ref:`ref_flyteidl.core.K8sPod`", "", "" - "sql", ":ref:`ref_flyteidl.core.Sql`", "", "" - "task_type_version", ":ref:`ref_int32`", "", "This can be used to customize task handling at execution time for the same task type." - "security_context", ":ref:`ref_flyteidl.core.SecurityContext`", "", "security_context encapsulates security attributes requested to run this task." - "config", ":ref:`ref_flyteidl.core.TaskTemplate.ConfigEntry`", "repeated", "Metadata about the custom defined for this task. This is extensible to allow various plugins in the system to use as required. reserve the field numbers 1 through 15 for very frequently occurring message elements" - - - - - - - -.. _ref_flyteidl.core.TaskTemplate.ConfigEntry: - -TaskTemplate.ConfigEntry ------------------------------------------------------------------- - - - - - -.. csv-table:: TaskTemplate.ConfigEntry type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "key", ":ref:`ref_string`", "", "" - "value", ":ref:`ref_string`", "", "" - - - - - - - - - -.. _ref_flyteidl.core.Container.Architecture: - -Container.Architecture ------------------------------------------------------------------- - -Architecture-type the container image supports. - -.. csv-table:: Enum Container.Architecture values - :header: "Name", "Number", "Description" - :widths: auto - - "UNKNOWN", "0", "" - "AMD64", "1", "" - "ARM64", "2", "" - "ARM_V6", "3", "" - "ARM_V7", "4", "" - - - -.. _ref_flyteidl.core.DataLoadingConfig.LiteralMapFormat: - -DataLoadingConfig.LiteralMapFormat ------------------------------------------------------------------- - -LiteralMapFormat decides the encoding format in which the input metadata should be made available to the containers. -If the user has access to the protocol buffer definitions, it is recommended to use the PROTO format. -JSON and YAML do not need any protobuf definitions to read it -All remote references in core.LiteralMap are replaced with local filesystem references (the data is downloaded to local filesystem) - -.. csv-table:: Enum DataLoadingConfig.LiteralMapFormat values - :header: "Name", "Number", "Description" - :widths: auto - - "JSON", "0", "JSON / YAML for the metadata (which contains inlined primitive values). The representation is inline with the standard json specification as specified - https://www.json.org/json-en.html" - "YAML", "1", "" - "PROTO", "2", "Proto is a serialized binary of `core.LiteralMap` defined in flyteidl/core" - - - -.. _ref_flyteidl.core.IOStrategy.DownloadMode: - -IOStrategy.DownloadMode ------------------------------------------------------------------- - -Mode to use for downloading - -.. csv-table:: Enum IOStrategy.DownloadMode values - :header: "Name", "Number", "Description" - :widths: auto - - "DOWNLOAD_EAGER", "0", "All data will be downloaded before the main container is executed" - "DOWNLOAD_STREAM", "1", "Data will be downloaded as a stream and an End-Of-Stream marker will be written to indicate all data has been downloaded. Refer to protocol for details" - "DO_NOT_DOWNLOAD", "2", "Large objects (offloaded) will not be downloaded" - - - -.. _ref_flyteidl.core.IOStrategy.UploadMode: - -IOStrategy.UploadMode ------------------------------------------------------------------- - -Mode to use for uploading - -.. csv-table:: Enum IOStrategy.UploadMode values - :header: "Name", "Number", "Description" - :widths: auto - - "UPLOAD_ON_EXIT", "0", "All data will be uploaded after the main container exits" - "UPLOAD_EAGER", "1", "Data will be uploaded as it appears. Refer to protocol specification for details" - "DO_NOT_UPLOAD", "2", "Data will not be uploaded, only references will be written" - - - -.. _ref_flyteidl.core.Resources.ResourceName: - -Resources.ResourceName ------------------------------------------------------------------- - -Known resource names. - -.. csv-table:: Enum Resources.ResourceName values - :header: "Name", "Number", "Description" - :widths: auto - - "UNKNOWN", "0", "" - "CPU", "1", "" - "GPU", "2", "" - "MEMORY", "3", "" - "STORAGE", "4", "" - "EPHEMERAL_STORAGE", "5", "For Kubernetes-based deployments, pods use ephemeral local storage for scratch space, caching, and for logs." - - - -.. _ref_flyteidl.core.RuntimeMetadata.RuntimeType: - -RuntimeMetadata.RuntimeType ------------------------------------------------------------------- - - - -.. csv-table:: Enum RuntimeMetadata.RuntimeType values - :header: "Name", "Number", "Description" - :widths: auto - - "OTHER", "0", "" - "FLYTE_SDK", "1", "" - - - -.. _ref_flyteidl.core.Sql.Dialect: - -Sql.Dialect ------------------------------------------------------------------- - -The dialect of the SQL statement. This is used to validate and parse SQL statements at compilation time to avoid -expensive runtime operations. If set to an unsupported dialect, no validation will be done on the statement. -We support the following dialect: ansi, hive. - -.. csv-table:: Enum Sql.Dialect values - :header: "Name", "Number", "Description" - :widths: auto - - "UNDEFINED", "0", "" - "ANSI", "1", "" - "HIVE", "2", "" - "OTHER", "3", "" - - - - - - - - - - -.. _ref_flyteidl/core/types.proto: - -flyteidl/core/types.proto -================================================================== - - - - - -.. _ref_flyteidl.core.BlobType: - -BlobType ------------------------------------------------------------------- - -Defines type behavior for blob objects - - - -.. csv-table:: BlobType type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "format", ":ref:`ref_string`", "", "Format can be a free form string understood by SDK/UI etc like csv, parquet etc" - "dimensionality", ":ref:`ref_flyteidl.core.BlobType.BlobDimensionality`", "", "" - - - - - - - -.. _ref_flyteidl.core.EnumType: - -EnumType ------------------------------------------------------------------- - -Enables declaring enum types, with predefined string values -For len(values) > 0, the first value in the ordered list is regarded as the default value. If you wish -To provide no defaults, make the first value as undefined. - - - -.. csv-table:: EnumType type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "values", ":ref:`ref_string`", "repeated", "Predefined set of enum values." - - - - - - - -.. _ref_flyteidl.core.Error: - -Error ------------------------------------------------------------------- - -Represents an error thrown from a node. - - - -.. csv-table:: Error type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "failed_node_id", ":ref:`ref_string`", "", "The node id that threw the error." - "message", ":ref:`ref_string`", "", "Error message thrown." - - - - - - - -.. _ref_flyteidl.core.LiteralType: - -LiteralType ------------------------------------------------------------------- - -Defines a strong type to allow type checking between interfaces. - - - -.. csv-table:: LiteralType type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "simple", ":ref:`ref_flyteidl.core.SimpleType`", "", "A simple type that can be compared one-to-one with another." - "schema", ":ref:`ref_flyteidl.core.SchemaType`", "", "A complex type that requires matching of inner fields." - "collection_type", ":ref:`ref_flyteidl.core.LiteralType`", "", "Defines the type of the value of a collection. Only homogeneous collections are allowed." - "map_value_type", ":ref:`ref_flyteidl.core.LiteralType`", "", "Defines the type of the value of a map type. The type of the key is always a string." - "blob", ":ref:`ref_flyteidl.core.BlobType`", "", "A blob might have specialized implementation details depending on associated metadata." - "enum_type", ":ref:`ref_flyteidl.core.EnumType`", "", "Defines an enum with pre-defined string values." - "structured_dataset_type", ":ref:`ref_flyteidl.core.StructuredDatasetType`", "", "Generalized schema support" - "union_type", ":ref:`ref_flyteidl.core.UnionType`", "", "Defines an union type with pre-defined LiteralTypes." - "metadata", ":ref:`ref_google.protobuf.Struct`", "", "This field contains type metadata that is descriptive of the type, but is NOT considered in type-checking. This might be used by consumers to identify special behavior or display extended information for the type." - "annotation", ":ref:`ref_flyteidl.core.TypeAnnotation`", "", "This field contains arbitrary data that might have special semantic meaning for the client but does not effect internal flyte behavior." - "structure", ":ref:`ref_flyteidl.core.TypeStructure`", "", "Hints to improve type matching." - - - - - - - -.. _ref_flyteidl.core.OutputReference: - -OutputReference ------------------------------------------------------------------- - -A reference to an output produced by a node. The type can be retrieved -and validated- from -the underlying interface of the node. - - - -.. csv-table:: OutputReference type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "node_id", ":ref:`ref_string`", "", "Node id must exist at the graph layer." - "var", ":ref:`ref_string`", "", "Variable name must refer to an output variable for the node." - - - - - - - -.. _ref_flyteidl.core.SchemaType: - -SchemaType ------------------------------------------------------------------- - -Defines schema columns and types to strongly type-validate schemas interoperability. - - - -.. csv-table:: SchemaType type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "columns", ":ref:`ref_flyteidl.core.SchemaType.SchemaColumn`", "repeated", "A list of ordered columns this schema comprises of." - - - - - - - -.. _ref_flyteidl.core.SchemaType.SchemaColumn: - -SchemaType.SchemaColumn ------------------------------------------------------------------- - - - - - -.. csv-table:: SchemaType.SchemaColumn type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "name", ":ref:`ref_string`", "", "A unique name -within the schema type- for the column" - "type", ":ref:`ref_flyteidl.core.SchemaType.SchemaColumn.SchemaColumnType`", "", "The column type. This allows a limited set of types currently." - - - - - - - -.. _ref_flyteidl.core.StructuredDatasetType: - -StructuredDatasetType ------------------------------------------------------------------- - - - - - -.. csv-table:: StructuredDatasetType type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "columns", ":ref:`ref_flyteidl.core.StructuredDatasetType.DatasetColumn`", "repeated", "A list of ordered columns this schema comprises of." - "format", ":ref:`ref_string`", "", "This is the storage format, the format of the bits at rest parquet, feather, csv, etc. For two types to be compatible, the format will need to be an exact match." - "external_schema_type", ":ref:`ref_string`", "", "This is a string representing the type that the bytes in external_schema_bytes are formatted in. This is an optional field that will not be used for type checking." - "external_schema_bytes", ":ref:`ref_bytes`", "", "The serialized bytes of a third-party schema library like Arrow. This is an optional field that will not be used for type checking." - - - - - - - -.. _ref_flyteidl.core.StructuredDatasetType.DatasetColumn: - -StructuredDatasetType.DatasetColumn ------------------------------------------------------------------- - - - - - -.. csv-table:: StructuredDatasetType.DatasetColumn type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "name", ":ref:`ref_string`", "", "A unique name within the schema type for the column." - "literal_type", ":ref:`ref_flyteidl.core.LiteralType`", "", "The column type." - - - - - - - -.. _ref_flyteidl.core.TypeAnnotation: - -TypeAnnotation ------------------------------------------------------------------- - -TypeAnnotation encapsulates registration time information about a type. This can be used for various control-plane operations. TypeAnnotation will not be available at runtime when a task runs. - - - -.. csv-table:: TypeAnnotation type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "annotations", ":ref:`ref_google.protobuf.Struct`", "", "A arbitrary JSON payload to describe a type." - - - - - - - -.. _ref_flyteidl.core.TypeStructure: - -TypeStructure ------------------------------------------------------------------- - -Hints to improve type matching -e.g. allows distinguishing output from custom type transformers -even if the underlying IDL serialization matches. - - - -.. csv-table:: TypeStructure type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "tag", ":ref:`ref_string`", "", "Must exactly match for types to be castable" - - - - - - - -.. _ref_flyteidl.core.UnionType: - -UnionType ------------------------------------------------------------------- - -Defines a tagged union type, also known as a variant (and formally as the sum type). - -A sum type S is defined by a sequence of types (A, B, C, ...), each tagged by a string tag -A value of type S is constructed from a value of any of the variant types. The specific choice of type is recorded by -storing the varaint's tag with the literal value and can be examined in runtime. - -Type S is typically written as -S := Apple A | Banana B | Cantaloupe C | ... - -Notably, a nullable (optional) type is a sum type between some type X and the singleton type representing a null-value: -Optional X := X | Null - -See also: https://en.wikipedia.org/wiki/Tagged_union - - - -.. csv-table:: UnionType type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "variants", ":ref:`ref_flyteidl.core.LiteralType`", "repeated", "Predefined set of variants in union." - - - - - - - - - -.. _ref_flyteidl.core.BlobType.BlobDimensionality: - -BlobType.BlobDimensionality ------------------------------------------------------------------- - - - -.. csv-table:: Enum BlobType.BlobDimensionality values - :header: "Name", "Number", "Description" - :widths: auto - - "SINGLE", "0", "" - "MULTIPART", "1", "" - - - -.. _ref_flyteidl.core.SchemaType.SchemaColumn.SchemaColumnType: - -SchemaType.SchemaColumn.SchemaColumnType ------------------------------------------------------------------- - - - -.. csv-table:: Enum SchemaType.SchemaColumn.SchemaColumnType values - :header: "Name", "Number", "Description" - :widths: auto - - "INTEGER", "0", "" - "FLOAT", "1", "" - "STRING", "2", "" - "BOOLEAN", "3", "" - "DATETIME", "4", "" - "DURATION", "5", "" - - - -.. _ref_flyteidl.core.SimpleType: - -SimpleType ------------------------------------------------------------------- - -Define a set of simple types. - -.. csv-table:: Enum SimpleType values - :header: "Name", "Number", "Description" - :widths: auto - - "NONE", "0", "" - "INTEGER", "1", "" - "FLOAT", "2", "" - "STRING", "3", "" - "BOOLEAN", "4", "" - "DATETIME", "5", "" - "DURATION", "6", "" - "BINARY", "7", "" - "ERROR", "8", "" - "STRUCT", "9", "" - - - - - - - - - - -.. _ref_flyteidl/core/workflow.proto: - -flyteidl/core/workflow.proto -================================================================== - - - - - -.. _ref_flyteidl.core.Alias: - -Alias ------------------------------------------------------------------- - -Links a variable to an alias. - - - -.. csv-table:: Alias type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "var", ":ref:`ref_string`", "", "Must match one of the output variable names on a node." - "alias", ":ref:`ref_string`", "", "A workflow-level unique alias that downstream nodes can refer to in their input." - - - - - - - -.. _ref_flyteidl.core.BranchNode: - -BranchNode ------------------------------------------------------------------- - -BranchNode is a special node that alter the flow of the workflow graph. It allows the control flow to branch at -runtime based on a series of conditions that get evaluated on various parameters (e.g. inputs, primitives). - - - -.. csv-table:: BranchNode type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "if_else", ":ref:`ref_flyteidl.core.IfElseBlock`", "", "+required" - - - - - - - -.. _ref_flyteidl.core.IfBlock: - -IfBlock ------------------------------------------------------------------- - -Defines a condition and the execution unit that should be executed if the condition is satisfied. - - - -.. csv-table:: IfBlock type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "condition", ":ref:`ref_flyteidl.core.BooleanExpression`", "", "" - "then_node", ":ref:`ref_flyteidl.core.Node`", "", "" - - - - - - - -.. _ref_flyteidl.core.IfElseBlock: - -IfElseBlock ------------------------------------------------------------------- - -Defines a series of if/else blocks. The first branch whose condition evaluates to true is the one to execute. -If no conditions were satisfied, the else_node or the error will execute. - - - -.. csv-table:: IfElseBlock type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "case", ":ref:`ref_flyteidl.core.IfBlock`", "", "+required. First condition to evaluate." - "other", ":ref:`ref_flyteidl.core.IfBlock`", "repeated", "+optional. Additional branches to evaluate." - "else_node", ":ref:`ref_flyteidl.core.Node`", "", "The node to execute in case none of the branches were taken." - "error", ":ref:`ref_flyteidl.core.Error`", "", "An error to throw in case none of the branches were taken." - - - - - - - -.. _ref_flyteidl.core.Node: - -Node ------------------------------------------------------------------- - -A Workflow graph Node. One unit of execution in the graph. Each node can be linked to a Task, a Workflow or a branch -node. - - - -.. csv-table:: Node type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "id", ":ref:`ref_string`", "", "A workflow-level unique identifier that identifies this node in the workflow. "inputs" and "outputs" are reserved node ids that cannot be used by other nodes." - "metadata", ":ref:`ref_flyteidl.core.NodeMetadata`", "", "Extra metadata about the node." - "inputs", ":ref:`ref_flyteidl.core.Binding`", "repeated", "Specifies how to bind the underlying interface's inputs. All required inputs specified in the underlying interface must be fulfilled." - "upstream_node_ids", ":ref:`ref_string`", "repeated", "+optional Specifies execution dependency for this node ensuring it will only get scheduled to run after all its upstream nodes have completed. This node will have an implicit dependency on any node that appears in inputs field." - "output_aliases", ":ref:`ref_flyteidl.core.Alias`", "repeated", "+optional. A node can define aliases for a subset of its outputs. This is particularly useful if different nodes need to conform to the same interface (e.g. all branches in a branch node). Downstream nodes must refer to this nodes outputs using the alias if one's specified." - "task_node", ":ref:`ref_flyteidl.core.TaskNode`", "", "Information about the Task to execute in this node." - "workflow_node", ":ref:`ref_flyteidl.core.WorkflowNode`", "", "Information about the Workflow to execute in this mode." - "branch_node", ":ref:`ref_flyteidl.core.BranchNode`", "", "Information about the branch node to evaluate in this node." - - - - - - - -.. _ref_flyteidl.core.NodeMetadata: - -NodeMetadata ------------------------------------------------------------------- - -Defines extra information about the Node. - - - -.. csv-table:: NodeMetadata type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "name", ":ref:`ref_string`", "", "A friendly name for the Node" - "timeout", ":ref:`ref_google.protobuf.Duration`", "", "The overall timeout of a task." - "retries", ":ref:`ref_flyteidl.core.RetryStrategy`", "", "Number of retries per task." - "interruptible", ":ref:`ref_bool`", "", "" - - - - - - - -.. _ref_flyteidl.core.TaskNode: - -TaskNode ------------------------------------------------------------------- - -Refers to the task that the Node is to execute. - - - -.. csv-table:: TaskNode type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "reference_id", ":ref:`ref_flyteidl.core.Identifier`", "", "A globally unique identifier for the task." - "overrides", ":ref:`ref_flyteidl.core.TaskNodeOverrides`", "", "Optional overrides applied at task execution time." - - - - - - - -.. _ref_flyteidl.core.TaskNodeOverrides: - -TaskNodeOverrides ------------------------------------------------------------------- - -Optional task node overrides that will be applied at task execution time. - - - -.. csv-table:: TaskNodeOverrides type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "resources", ":ref:`ref_flyteidl.core.Resources`", "", "A customizable interface to convey resources requested for a task container." - - - - - - - -.. _ref_flyteidl.core.WorkflowMetadata: - -WorkflowMetadata ------------------------------------------------------------------- - -This is workflow layer metadata. These settings are only applicable to the workflow as a whole, and do not -percolate down to child entities (like tasks) launched by the workflow. - - - -.. csv-table:: WorkflowMetadata type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "quality_of_service", ":ref:`ref_flyteidl.core.QualityOfService`", "", "Indicates the runtime priority of workflow executions." - "on_failure", ":ref:`ref_flyteidl.core.WorkflowMetadata.OnFailurePolicy`", "", "Defines how the system should behave when a failure is detected in the workflow execution." - - - - - - - -.. _ref_flyteidl.core.WorkflowMetadataDefaults: - -WorkflowMetadataDefaults ------------------------------------------------------------------- - -The difference between these settings and the WorkflowMetadata ones is that these are meant to be passed down to -a workflow's underlying entities (like tasks). For instance, 'interruptible' has no meaning at the workflow layer, it -is only relevant when a task executes. The settings here are the defaults that are passed to all nodes -unless explicitly overridden at the node layer. -If you are adding a setting that applies to both the Workflow itself, and everything underneath it, it should be -added to both this object and the WorkflowMetadata object above. - - - -.. csv-table:: WorkflowMetadataDefaults type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "interruptible", ":ref:`ref_bool`", "", "Whether child nodes of the workflow are interruptible." - - - - - - - -.. _ref_flyteidl.core.WorkflowNode: - -WorkflowNode ------------------------------------------------------------------- - -Refers to a the workflow the node is to execute. - - - -.. csv-table:: WorkflowNode type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "launchplan_ref", ":ref:`ref_flyteidl.core.Identifier`", "", "A globally unique identifier for the launch plan." - "sub_workflow_ref", ":ref:`ref_flyteidl.core.Identifier`", "", "Reference to a subworkflow, that should be defined with the compiler context" - - - - - - - -.. _ref_flyteidl.core.WorkflowTemplate: - -WorkflowTemplate ------------------------------------------------------------------- - -Flyte Workflow Structure that encapsulates task, branch and subworkflow nodes to form a statically analyzable, -directed acyclic graph. - - - -.. csv-table:: WorkflowTemplate type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "id", ":ref:`ref_flyteidl.core.Identifier`", "", "A globally unique identifier for the workflow." - "metadata", ":ref:`ref_flyteidl.core.WorkflowMetadata`", "", "Extra metadata about the workflow." - "interface", ":ref:`ref_flyteidl.core.TypedInterface`", "", "Defines a strongly typed interface for the Workflow. This can include some optional parameters." - "nodes", ":ref:`ref_flyteidl.core.Node`", "repeated", "A list of nodes. In addition, "globals" is a special reserved node id that can be used to consume workflow inputs." - "outputs", ":ref:`ref_flyteidl.core.Binding`", "repeated", "A list of output bindings that specify how to construct workflow outputs. Bindings can pull node outputs or specify literals. All workflow outputs specified in the interface field must be bound in order for the workflow to be validated. A workflow has an implicit dependency on all of its nodes to execute successfully in order to bind final outputs. Most of these outputs will be Binding's with a BindingData of type OutputReference. That is, your workflow can just have an output of some constant (`Output(5)`), but usually, the workflow will be pulling outputs from the output of a task." - "failure_node", ":ref:`ref_flyteidl.core.Node`", "", "+optional A catch-all node. This node is executed whenever the execution engine determines the workflow has failed. The interface of this node must match the Workflow interface with an additional input named "error" of type pb.lyft.flyte.core.Error." - "metadata_defaults", ":ref:`ref_flyteidl.core.WorkflowMetadataDefaults`", "", "workflow defaults" - - - - - - - - - -.. _ref_flyteidl.core.WorkflowMetadata.OnFailurePolicy: - -WorkflowMetadata.OnFailurePolicy ------------------------------------------------------------------- - -Failure Handling Strategy - -.. csv-table:: Enum WorkflowMetadata.OnFailurePolicy values - :header: "Name", "Number", "Description" - :widths: auto - - "FAIL_IMMEDIATELY", "0", "FAIL_IMMEDIATELY instructs the system to fail as soon as a node fails in the workflow. It'll automatically abort all currently running nodes and clean up resources before finally marking the workflow executions as failed." - "FAIL_AFTER_EXECUTABLE_NODES_COMPLETE", "1", "FAIL_AFTER_EXECUTABLE_NODES_COMPLETE instructs the system to make as much progress as it can. The system will not alter the dependencies of the execution graph so any node that depend on the failed node will not be run. Other nodes that will be executed to completion before cleaning up resources and marking the workflow execution as failed." - - - - - - - - - - -.. _ref_flyteidl/core/workflow_closure.proto: - -flyteidl/core/workflow_closure.proto -================================================================== - - - - - -.. _ref_flyteidl.core.WorkflowClosure: - -WorkflowClosure ------------------------------------------------------------------- - -Defines an enclosed package of workflow and tasks it references. - - - -.. csv-table:: WorkflowClosure type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "workflow", ":ref:`ref_flyteidl.core.WorkflowTemplate`", "", "required. Workflow template." - "tasks", ":ref:`ref_flyteidl.core.TaskTemplate`", "repeated", "optional. A collection of tasks referenced by the workflow. Only needed if the workflow references tasks." - - - - - - - - - - - - - - - - -.. _ref_google/protobuf/timestamp.proto: - -google/protobuf/timestamp.proto -================================================================== - - - - - -.. _ref_google.protobuf.Timestamp: - -Timestamp ------------------------------------------------------------------- - -A Timestamp represents a point in time independent of any time zone or local -calendar, encoded as a count of seconds and fractions of seconds at -nanosecond resolution. The count is relative to an epoch at UTC midnight on -January 1, 1970, in the proleptic Gregorian calendar which extends the -Gregorian calendar backwards to year one. - -All minutes are 60 seconds long. Leap seconds are "smeared" so that no leap -second table is needed for interpretation, using a [24-hour linear -smear](https://developers.google.com/time/smear). - -The range is from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59.999999999Z. By -restricting to that range, we ensure that we can convert to and from [RFC -3339](https://www.ietf.org/rfc/rfc3339.txt) date strings. - -# Examples - -Example 1: Compute Timestamp from POSIX `time()`. - - Timestamp timestamp; - timestamp.set_seconds(time(NULL)); - timestamp.set_nanos(0); - -Example 2: Compute Timestamp from POSIX `gettimeofday()`. - - struct timeval tv; - gettimeofday(&tv, NULL); - - Timestamp timestamp; - timestamp.set_seconds(tv.tv_sec); - timestamp.set_nanos(tv.tv_usec * 1000); - -Example 3: Compute Timestamp from Win32 `GetSystemTimeAsFileTime()`. - - FILETIME ft; - GetSystemTimeAsFileTime(&ft); - UINT64 ticks = (((UINT64)ft.dwHighDateTime) << 32) | ft.dwLowDateTime; - - // A Windows tick is 100 nanoseconds. Windows epoch 1601-01-01T00:00:00Z - // is 11644473600 seconds before Unix epoch 1970-01-01T00:00:00Z. - Timestamp timestamp; - timestamp.set_seconds((INT64) ((ticks / 10000000) - 11644473600LL)); - timestamp.set_nanos((INT32) ((ticks % 10000000) * 100)); - -Example 4: Compute Timestamp from Java `System.currentTimeMillis()`. - - long millis = System.currentTimeMillis(); - - Timestamp timestamp = Timestamp.newBuilder().setSeconds(millis / 1000) - .setNanos((int) ((millis % 1000) * 1000000)).build(); - - -Example 5: Compute Timestamp from Java `Instant.now()`. - - Instant now = Instant.now(); - - Timestamp timestamp = - Timestamp.newBuilder().setSeconds(now.getEpochSecond()) - .setNanos(now.getNano()).build(); - - -Example 6: Compute Timestamp from current time in Python. - - timestamp = Timestamp() - timestamp.GetCurrentTime() - -# JSON Mapping - -In JSON format, the Timestamp type is encoded as a string in the -[RFC 3339](https://www.ietf.org/rfc/rfc3339.txt) format. That is, the -format is "{year}-{month}-{day}T{hour}:{min}:{sec}[.{frac_sec}]Z" -where {year} is always expressed using four digits while {month}, {day}, -{hour}, {min}, and {sec} are zero-padded to two digits each. The fractional -seconds, which can go up to 9 digits (i.e. up to 1 nanosecond resolution), -are optional. The "Z" suffix indicates the timezone ("UTC"); the timezone -is required. A proto3 JSON serializer should always use UTC (as indicated by -"Z") when printing the Timestamp type and a proto3 JSON parser should be -able to accept both UTC and other timezones (as indicated by an offset). - -For example, "2017-01-15T01:30:15.01Z" encodes 15.01 seconds past -01:30 UTC on January 15, 2017. - -In JavaScript, one can convert a Date object to this format using the -standard -[toISOString()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Date/toISOString) -method. In Python, a standard `datetime.datetime` object can be converted -to this format using -[`strftime`](https://docs.python.org/2/library/time.html#time.strftime) with -the time format spec '%Y-%m-%dT%H:%M:%S.%fZ'. Likewise, in Java, one can use -the Joda Time's [`ISODateTimeFormat.dateTime()`]( -http://www.joda.org/joda-time/apidocs/org/joda/time/format/ISODateTimeFormat.html#dateTime%2D%2D -) to obtain a formatter capable of generating timestamps in this format. - - - -.. csv-table:: Timestamp type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "seconds", ":ref:`ref_int64`", "", "Represents seconds of UTC time since Unix epoch 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59Z inclusive." - "nanos", ":ref:`ref_int32`", "", "Non-negative fractions of a second at nanosecond resolution. Negative second values with fractions must still have non-negative nanos values that count forward in time. Must be from 0 to 999,999,999 inclusive." - - - - - - - - - - - - - - - - -.. _ref_google/protobuf/duration.proto: - -google/protobuf/duration.proto -================================================================== - - - - - -.. _ref_google.protobuf.Duration: - -Duration ------------------------------------------------------------------- - -A Duration represents a signed, fixed-length span of time represented -as a count of seconds and fractions of seconds at nanosecond -resolution. It is independent of any calendar and concepts like "day" -or "month". It is related to Timestamp in that the difference between -two Timestamp values is a Duration and it can be added or subtracted -from a Timestamp. Range is approximately +-10,000 years. - -# Examples - -Example 1: Compute Duration from two Timestamps in pseudo code. - - Timestamp start = ...; - Timestamp end = ...; - Duration duration = ...; - - duration.seconds = end.seconds - start.seconds; - duration.nanos = end.nanos - start.nanos; - - if (duration.seconds < 0 && duration.nanos > 0) { - duration.seconds += 1; - duration.nanos -= 1000000000; - } else if (duration.seconds > 0 && duration.nanos < 0) { - duration.seconds -= 1; - duration.nanos += 1000000000; - } - -Example 2: Compute Timestamp from Timestamp + Duration in pseudo code. - - Timestamp start = ...; - Duration duration = ...; - Timestamp end = ...; - - end.seconds = start.seconds + duration.seconds; - end.nanos = start.nanos + duration.nanos; - - if (end.nanos < 0) { - end.seconds -= 1; - end.nanos += 1000000000; - } else if (end.nanos >= 1000000000) { - end.seconds += 1; - end.nanos -= 1000000000; - } - -Example 3: Compute Duration from datetime.timedelta in Python. - - td = datetime.timedelta(days=3, minutes=10) - duration = Duration() - duration.FromTimedelta(td) - -# JSON Mapping - -In JSON format, the Duration type is encoded as a string rather than an -object, where the string ends in the suffix "s" (indicating seconds) and -is preceded by the number of seconds, with nanoseconds expressed as -fractional seconds. For example, 3 seconds with 0 nanoseconds should be -encoded in JSON format as "3s", while 3 seconds and 1 nanosecond should -be expressed in JSON format as "3.000000001s", and 3 seconds and 1 -microsecond should be expressed in JSON format as "3.000001s". - - - -.. csv-table:: Duration type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "seconds", ":ref:`ref_int64`", "", "Signed seconds of the span of time. Must be from -315,576,000,000 to +315,576,000,000 inclusive. Note: these bounds are computed from: 60 sec/min * 60 min/hr * 24 hr/day * 365.25 days/year * 10000 years" - "nanos", ":ref:`ref_int32`", "", "Signed fractions of a second at nanosecond resolution of the span of time. Durations less than one second are represented with a 0 `seconds` field and a positive or negative `nanos` field. For durations of one second or more, a non-zero value for the `nanos` field must be of the same sign as the `seconds` field. Must be from -999,999,999 to +999,999,999 inclusive." - - - - - - - - - - - - - - - - -.. _ref_google/protobuf/struct.proto: - -google/protobuf/struct.proto -================================================================== - - - - - -.. _ref_google.protobuf.ListValue: - -ListValue ------------------------------------------------------------------- - -`ListValue` is a wrapper around a repeated field of values. - -The JSON representation for `ListValue` is JSON array. - - - -.. csv-table:: ListValue type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "values", ":ref:`ref_google.protobuf.Value`", "repeated", "Repeated field of dynamically typed values." - - - - - - - -.. _ref_google.protobuf.Struct: - -Struct ------------------------------------------------------------------- - -`Struct` represents a structured data value, consisting of fields -which map to dynamically typed values. In some languages, `Struct` -might be supported by a native representation. For example, in -scripting languages like JS a struct is represented as an -object. The details of that representation are described together -with the proto support for the language. - -The JSON representation for `Struct` is JSON object. - - - -.. csv-table:: Struct type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "fields", ":ref:`ref_google.protobuf.Struct.FieldsEntry`", "repeated", "Unordered map of dynamically typed values." - - - - - - - -.. _ref_google.protobuf.Struct.FieldsEntry: - -Struct.FieldsEntry ------------------------------------------------------------------- - - - - - -.. csv-table:: Struct.FieldsEntry type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "key", ":ref:`ref_string`", "", "" - "value", ":ref:`ref_google.protobuf.Value`", "", "" - - - - - - - -.. _ref_google.protobuf.Value: - -Value ------------------------------------------------------------------- - -`Value` represents a dynamically typed value which can be either -null, a number, a string, a boolean, a recursive struct value, or a -list of values. A producer of value is expected to set one of these -variants. Absence of any variant indicates an error. - -The JSON representation for `Value` is JSON value. - - - -.. csv-table:: Value type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "null_value", ":ref:`ref_google.protobuf.NullValue`", "", "Represents a null value." - "number_value", ":ref:`ref_double`", "", "Represents a double value." - "string_value", ":ref:`ref_string`", "", "Represents a string value." - "bool_value", ":ref:`ref_bool`", "", "Represents a boolean value." - "struct_value", ":ref:`ref_google.protobuf.Struct`", "", "Represents a structured value." - "list_value", ":ref:`ref_google.protobuf.ListValue`", "", "Represents a repeated `Value`." - - - - - - - - - -.. _ref_google.protobuf.NullValue: - -NullValue ------------------------------------------------------------------- - -`NullValue` is a singleton enumeration to represent the null value for the -`Value` type union. - - The JSON representation for `NullValue` is JSON `null`. - -.. csv-table:: Enum NullValue values - :header: "Name", "Number", "Description" - :widths: auto - - "NULL_VALUE", "0", "Null value." - - - - - - - - - -.. _ref_scala_types: - -Scalar Value Types -================== - - - -.. _ref_double: - -double ------------------------------ - - - -.. csv-table:: double language representation - :header: ".proto Type", "C++", "Java", "Python", "Go", "C#", "PHP", "Ruby" - :widths: auto - - "double", "double", "double", "float", "float64", "double", "float", "Float" - - - -.. _ref_float: - -float ------------------------------ - - - -.. csv-table:: float language representation - :header: ".proto Type", "C++", "Java", "Python", "Go", "C#", "PHP", "Ruby" - :widths: auto - - "float", "float", "float", "float", "float32", "float", "float", "Float" - - - -.. _ref_int32: - -int32 ------------------------------ - -Uses variable-length encoding. Inefficient for encoding negative numbers – if your field is likely to have negative values, use sint32 instead. - -.. csv-table:: int32 language representation - :header: ".proto Type", "C++", "Java", "Python", "Go", "C#", "PHP", "Ruby" - :widths: auto - - "int32", "int32", "int", "int", "int32", "int", "integer", "Bignum or Fixnum (as required)" - - - -.. _ref_int64: - -int64 ------------------------------ - -Uses variable-length encoding. Inefficient for encoding negative numbers – if your field is likely to have negative values, use sint64 instead. - -.. csv-table:: int64 language representation - :header: ".proto Type", "C++", "Java", "Python", "Go", "C#", "PHP", "Ruby" - :widths: auto - - "int64", "int64", "long", "int/long", "int64", "long", "integer/string", "Bignum" - - - -.. _ref_uint32: - -uint32 ------------------------------ - -Uses variable-length encoding. - -.. csv-table:: uint32 language representation - :header: ".proto Type", "C++", "Java", "Python", "Go", "C#", "PHP", "Ruby" - :widths: auto - - "uint32", "uint32", "int", "int/long", "uint32", "uint", "integer", "Bignum or Fixnum (as required)" - - - -.. _ref_uint64: - -uint64 ------------------------------ - -Uses variable-length encoding. - -.. csv-table:: uint64 language representation - :header: ".proto Type", "C++", "Java", "Python", "Go", "C#", "PHP", "Ruby" - :widths: auto - - "uint64", "uint64", "long", "int/long", "uint64", "ulong", "integer/string", "Bignum or Fixnum (as required)" - - - -.. _ref_sint32: - -sint32 ------------------------------ - -Uses variable-length encoding. Signed int value. These more efficiently encode negative numbers than regular int32s. - -.. csv-table:: sint32 language representation - :header: ".proto Type", "C++", "Java", "Python", "Go", "C#", "PHP", "Ruby" - :widths: auto - - "sint32", "int32", "int", "int", "int32", "int", "integer", "Bignum or Fixnum (as required)" - - - -.. _ref_sint64: - -sint64 ------------------------------ - -Uses variable-length encoding. Signed int value. These more efficiently encode negative numbers than regular int64s. - -.. csv-table:: sint64 language representation - :header: ".proto Type", "C++", "Java", "Python", "Go", "C#", "PHP", "Ruby" - :widths: auto - - "sint64", "int64", "long", "int/long", "int64", "long", "integer/string", "Bignum" - - - -.. _ref_fixed32: - -fixed32 ------------------------------ - -Always four bytes. More efficient than uint32 if values are often greater than 2^28. - -.. csv-table:: fixed32 language representation - :header: ".proto Type", "C++", "Java", "Python", "Go", "C#", "PHP", "Ruby" - :widths: auto - - "fixed32", "uint32", "int", "int", "uint32", "uint", "integer", "Bignum or Fixnum (as required)" - - - -.. _ref_fixed64: - -fixed64 ------------------------------ - -Always eight bytes. More efficient than uint64 if values are often greater than 2^56. - -.. csv-table:: fixed64 language representation - :header: ".proto Type", "C++", "Java", "Python", "Go", "C#", "PHP", "Ruby" - :widths: auto - - "fixed64", "uint64", "long", "int/long", "uint64", "ulong", "integer/string", "Bignum" - - - -.. _ref_sfixed32: - -sfixed32 ------------------------------ - -Always four bytes. - -.. csv-table:: sfixed32 language representation - :header: ".proto Type", "C++", "Java", "Python", "Go", "C#", "PHP", "Ruby" - :widths: auto - - "sfixed32", "int32", "int", "int", "int32", "int", "integer", "Bignum or Fixnum (as required)" - - - -.. _ref_sfixed64: - -sfixed64 ------------------------------ - -Always eight bytes. - -.. csv-table:: sfixed64 language representation - :header: ".proto Type", "C++", "Java", "Python", "Go", "C#", "PHP", "Ruby" - :widths: auto - - "sfixed64", "int64", "long", "int/long", "int64", "long", "integer/string", "Bignum" - - - -.. _ref_bool: - -bool ------------------------------ - - - -.. csv-table:: bool language representation - :header: ".proto Type", "C++", "Java", "Python", "Go", "C#", "PHP", "Ruby" - :widths: auto - - "bool", "bool", "boolean", "boolean", "bool", "bool", "boolean", "TrueClass/FalseClass" - - - -.. _ref_string: - -string ------------------------------ - -A string must always contain UTF-8 encoded or 7-bit ASCII text. - -.. csv-table:: string language representation - :header: ".proto Type", "C++", "Java", "Python", "Go", "C#", "PHP", "Ruby" - :widths: auto - - "string", "string", "String", "str/unicode", "string", "string", "string", "String (UTF-8)" - - - -.. _ref_bytes: - -bytes ------------------------------ - -May contain any arbitrary sequence of bytes. - -.. csv-table:: bytes language representation - :header: ".proto Type", "C++", "Java", "Python", "Go", "C#", "PHP", "Ruby" - :widths: auto - - "bytes", "string", "ByteString", "str", "[]byte", "ByteString", "string", "String (ASCII-8BIT)" - - \ No newline at end of file diff --git a/flyteidl/protos/docs/datacatalog/datacatalog.rst b/flyteidl/protos/docs/datacatalog/datacatalog.rst deleted file mode 100644 index 6a2477cf1..000000000 --- a/flyteidl/protos/docs/datacatalog/datacatalog.rst +++ /dev/null @@ -1,1237 +0,0 @@ -###################### -Protocol Documentation -###################### - - - - -.. _ref_flyteidl/datacatalog/datacatalog.proto: - -flyteidl/datacatalog/datacatalog.proto -================================================================== - - - - - -.. _ref_datacatalog.AddTagRequest: - -AddTagRequest ------------------------------------------------------------------- - -Request message for tagging an Artifact. - - - -.. csv-table:: AddTagRequest type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "tag", ":ref:`ref_datacatalog.Tag`", "", "" - - - - - - - -.. _ref_datacatalog.AddTagResponse: - -AddTagResponse ------------------------------------------------------------------- - -Response message for tagging an Artifact. - - - - - - - - -.. _ref_datacatalog.Artifact: - -Artifact ------------------------------------------------------------------- - -Artifact message. It is composed of several string fields. - - - -.. csv-table:: Artifact type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "id", ":ref:`ref_string`", "", "The unique ID of the artifact" - "dataset", ":ref:`ref_datacatalog.DatasetID`", "", "The Dataset that the artifact belongs to" - "data", ":ref:`ref_datacatalog.ArtifactData`", "repeated", "A list of data that is associated with the artifact" - "metadata", ":ref:`ref_datacatalog.Metadata`", "", "Free-form metadata associated with the artifact" - "partitions", ":ref:`ref_datacatalog.Partition`", "repeated", "" - "tags", ":ref:`ref_datacatalog.Tag`", "repeated", "" - "created_at", ":ref:`ref_google.protobuf.Timestamp`", "", "creation timestamp of artifact, autogenerated by service" - - - - - - - -.. _ref_datacatalog.ArtifactData: - -ArtifactData ------------------------------------------------------------------- - -ArtifactData that belongs to an artifact - - - -.. csv-table:: ArtifactData type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "name", ":ref:`ref_string`", "", "" - "value", ":ref:`ref_flyteidl.core.Literal`", "", "" - - - - - - - -.. _ref_datacatalog.ArtifactPropertyFilter: - -ArtifactPropertyFilter ------------------------------------------------------------------- - -Artifact properties we can filter by - - - -.. csv-table:: ArtifactPropertyFilter type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "artifact_id", ":ref:`ref_string`", "", "" - - - - - - - -.. _ref_datacatalog.CreateArtifactRequest: - -CreateArtifactRequest ------------------------------------------------------------------- - -Request message for creating an Artifact and its associated artifact Data. - - - -.. csv-table:: CreateArtifactRequest type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "artifact", ":ref:`ref_datacatalog.Artifact`", "", "" - - - - - - - -.. _ref_datacatalog.CreateArtifactResponse: - -CreateArtifactResponse ------------------------------------------------------------------- - -Response message for creating an Artifact. - - - - - - - - -.. _ref_datacatalog.CreateDatasetRequest: - -CreateDatasetRequest ------------------------------------------------------------------- - -Request message for creating a Dataset. - - - -.. csv-table:: CreateDatasetRequest type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "dataset", ":ref:`ref_datacatalog.Dataset`", "", "" - - - - - - - -.. _ref_datacatalog.CreateDatasetResponse: - -CreateDatasetResponse ------------------------------------------------------------------- - -Response message for creating a Dataset - - - - - - - - -.. _ref_datacatalog.Dataset: - -Dataset ------------------------------------------------------------------- - -Dataset message. It is uniquely identified by DatasetID. - - - -.. csv-table:: Dataset type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "id", ":ref:`ref_datacatalog.DatasetID`", "", "" - "metadata", ":ref:`ref_datacatalog.Metadata`", "", "" - "partitionKeys", ":ref:`ref_string`", "repeated", "" - - - - - - - -.. _ref_datacatalog.DatasetID: - -DatasetID ------------------------------------------------------------------- - -DatasetID message that is composed of several string fields. - - - -.. csv-table:: DatasetID type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "project", ":ref:`ref_string`", "", "The name of the project" - "name", ":ref:`ref_string`", "", "The name of the dataset" - "domain", ":ref:`ref_string`", "", "The domain (eg. environment)" - "version", ":ref:`ref_string`", "", "Version of the data schema" - "UUID", ":ref:`ref_string`", "", "UUID for the dataset (if set the above fields are optional)" - - - - - - - -.. _ref_datacatalog.DatasetPropertyFilter: - -DatasetPropertyFilter ------------------------------------------------------------------- - -Dataset properties we can filter by - - - -.. csv-table:: DatasetPropertyFilter type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "project", ":ref:`ref_string`", "", "" - "name", ":ref:`ref_string`", "", "" - "domain", ":ref:`ref_string`", "", "" - "version", ":ref:`ref_string`", "", "" - - - - - - - -.. _ref_datacatalog.FilterExpression: - -FilterExpression ------------------------------------------------------------------- - -Filter expression that is composed of a combination of single filters - - - -.. csv-table:: FilterExpression type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "filters", ":ref:`ref_datacatalog.SinglePropertyFilter`", "repeated", "" - - - - - - - -.. _ref_datacatalog.GetArtifactRequest: - -GetArtifactRequest ------------------------------------------------------------------- - -Request message for retrieving an Artifact. Retrieve an artifact based on a query handle that -can be one of artifact_id or tag. The result returned will include the artifact data and metadata -associated with the artifact. - - - -.. csv-table:: GetArtifactRequest type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "dataset", ":ref:`ref_datacatalog.DatasetID`", "", "" - "artifact_id", ":ref:`ref_string`", "", "" - "tag_name", ":ref:`ref_string`", "", "" - - - - - - - -.. _ref_datacatalog.GetArtifactResponse: - -GetArtifactResponse ------------------------------------------------------------------- - -Response message for retrieving an Artifact. The result returned will include the artifact data -and metadata associated with the artifact. - - - -.. csv-table:: GetArtifactResponse type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "artifact", ":ref:`ref_datacatalog.Artifact`", "", "" - - - - - - - -.. _ref_datacatalog.GetDatasetRequest: - -GetDatasetRequest ------------------------------------------------------------------- - -Request message for retrieving a Dataset. The Dataset is retrieved by it's unique identifier -which is a combination of several fields. - - - -.. csv-table:: GetDatasetRequest type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "dataset", ":ref:`ref_datacatalog.DatasetID`", "", "" - - - - - - - -.. _ref_datacatalog.GetDatasetResponse: - -GetDatasetResponse ------------------------------------------------------------------- - -Response message for retrieving a Dataset. The response will include the metadata for the -Dataset. - - - -.. csv-table:: GetDatasetResponse type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "dataset", ":ref:`ref_datacatalog.Dataset`", "", "" - - - - - - - -.. _ref_datacatalog.GetOrExtendReservationRequest: - -GetOrExtendReservationRequest ------------------------------------------------------------------- - -Try to acquire or extend an artifact reservation. If an active reservation exists, retreive that instance. - - - -.. csv-table:: GetOrExtendReservationRequest type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "reservation_id", ":ref:`ref_datacatalog.ReservationID`", "", "" - "owner_id", ":ref:`ref_string`", "", "" - "heartbeat_interval", ":ref:`ref_google.protobuf.Duration`", "", "Requested reservation extension heartbeat interval" - - - - - - - -.. _ref_datacatalog.GetOrExtendReservationResponse: - -GetOrExtendReservationResponse ------------------------------------------------------------------- - -Response including either a newly minted reservation or the existing reservation - - - -.. csv-table:: GetOrExtendReservationResponse type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "reservation", ":ref:`ref_datacatalog.Reservation`", "", "" - - - - - - - -.. _ref_datacatalog.KeyValuePair: - -KeyValuePair ------------------------------------------------------------------- - - - - - -.. csv-table:: KeyValuePair type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "key", ":ref:`ref_string`", "", "" - "value", ":ref:`ref_string`", "", "" - - - - - - - -.. _ref_datacatalog.ListArtifactsRequest: - -ListArtifactsRequest ------------------------------------------------------------------- - -List the artifacts that belong to the Dataset, optionally filtered using filtered expression. - - - -.. csv-table:: ListArtifactsRequest type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "dataset", ":ref:`ref_datacatalog.DatasetID`", "", "Use a datasetID for which you want to retrieve the artifacts" - "filter", ":ref:`ref_datacatalog.FilterExpression`", "", "Apply the filter expression to this query" - "pagination", ":ref:`ref_datacatalog.PaginationOptions`", "", "Pagination options to get a page of artifacts" - - - - - - - -.. _ref_datacatalog.ListArtifactsResponse: - -ListArtifactsResponse ------------------------------------------------------------------- - -Response to list artifacts - - - -.. csv-table:: ListArtifactsResponse type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "artifacts", ":ref:`ref_datacatalog.Artifact`", "repeated", "The list of artifacts" - "next_token", ":ref:`ref_string`", "", "Token to use to request the next page, pass this into the next requests PaginationOptions" - - - - - - - -.. _ref_datacatalog.ListDatasetsRequest: - -ListDatasetsRequest ------------------------------------------------------------------- - -List the datasets for the given query - - - -.. csv-table:: ListDatasetsRequest type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "filter", ":ref:`ref_datacatalog.FilterExpression`", "", "Apply the filter expression to this query" - "pagination", ":ref:`ref_datacatalog.PaginationOptions`", "", "Pagination options to get a page of datasets" - - - - - - - -.. _ref_datacatalog.ListDatasetsResponse: - -ListDatasetsResponse ------------------------------------------------------------------- - -List the datasets response with token for next pagination - - - -.. csv-table:: ListDatasetsResponse type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "datasets", ":ref:`ref_datacatalog.Dataset`", "repeated", "The list of datasets" - "next_token", ":ref:`ref_string`", "", "Token to use to request the next page, pass this into the next requests PaginationOptions" - - - - - - - -.. _ref_datacatalog.Metadata: - -Metadata ------------------------------------------------------------------- - -Metadata representation for artifacts and datasets - - - -.. csv-table:: Metadata type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "key_map", ":ref:`ref_datacatalog.Metadata.KeyMapEntry`", "repeated", "key map is a dictionary of key/val strings that represent metadata" - - - - - - - -.. _ref_datacatalog.Metadata.KeyMapEntry: - -Metadata.KeyMapEntry ------------------------------------------------------------------- - - - - - -.. csv-table:: Metadata.KeyMapEntry type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "key", ":ref:`ref_string`", "", "" - "value", ":ref:`ref_string`", "", "" - - - - - - - -.. _ref_datacatalog.PaginationOptions: - -PaginationOptions ------------------------------------------------------------------- - -Pagination options for making list requests - - - -.. csv-table:: PaginationOptions type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "limit", ":ref:`ref_uint32`", "", "the max number of results to return" - "token", ":ref:`ref_string`", "", "the token to pass to fetch the next page" - "sortKey", ":ref:`ref_datacatalog.PaginationOptions.SortKey`", "", "the property that we want to sort the results by" - "sortOrder", ":ref:`ref_datacatalog.PaginationOptions.SortOrder`", "", "the sort order of the results" - - - - - - - -.. _ref_datacatalog.Partition: - -Partition ------------------------------------------------------------------- - -An artifact could have multiple partitions and each partition can have an arbitrary string key/value pair - - - -.. csv-table:: Partition type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "key", ":ref:`ref_string`", "", "" - "value", ":ref:`ref_string`", "", "" - - - - - - - -.. _ref_datacatalog.PartitionPropertyFilter: - -PartitionPropertyFilter ------------------------------------------------------------------- - -Partition properties we can filter by - - - -.. csv-table:: PartitionPropertyFilter type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "key_val", ":ref:`ref_datacatalog.KeyValuePair`", "", "" - - - - - - - -.. _ref_datacatalog.ReleaseReservationRequest: - -ReleaseReservationRequest ------------------------------------------------------------------- - -Request to release reservation - - - -.. csv-table:: ReleaseReservationRequest type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "reservation_id", ":ref:`ref_datacatalog.ReservationID`", "", "" - "owner_id", ":ref:`ref_string`", "", "" - - - - - - - -.. _ref_datacatalog.ReleaseReservationResponse: - -ReleaseReservationResponse ------------------------------------------------------------------- - -Response to release reservation - - - - - - - - -.. _ref_datacatalog.Reservation: - -Reservation ------------------------------------------------------------------- - -A reservation including owner, heartbeat interval, expiration timestamp, and various metadata. - - - -.. csv-table:: Reservation type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "reservation_id", ":ref:`ref_datacatalog.ReservationID`", "", "" - "owner_id", ":ref:`ref_string`", "", "" - "heartbeat_interval", ":ref:`ref_google.protobuf.Duration`", "", "Recommended heartbeat interval to extend reservation" - "expires_at", ":ref:`ref_google.protobuf.Timestamp`", "", "Expiration timestamp of this reservation" - "metadata", ":ref:`ref_datacatalog.Metadata`", "", "" - - - - - - - -.. _ref_datacatalog.ReservationID: - -ReservationID ------------------------------------------------------------------- - -ReservationID message that is composed of several string fields. - - - -.. csv-table:: ReservationID type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "dataset_id", ":ref:`ref_datacatalog.DatasetID`", "", "" - "tag_name", ":ref:`ref_string`", "", "" - - - - - - - -.. _ref_datacatalog.SinglePropertyFilter: - -SinglePropertyFilter ------------------------------------------------------------------- - -A single property to filter on. - - - -.. csv-table:: SinglePropertyFilter type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "tag_filter", ":ref:`ref_datacatalog.TagPropertyFilter`", "", "" - "partition_filter", ":ref:`ref_datacatalog.PartitionPropertyFilter`", "", "" - "artifact_filter", ":ref:`ref_datacatalog.ArtifactPropertyFilter`", "", "" - "dataset_filter", ":ref:`ref_datacatalog.DatasetPropertyFilter`", "", "" - "operator", ":ref:`ref_datacatalog.SinglePropertyFilter.ComparisonOperator`", "", "field 10 in case we add more entities to query" - - - - - - - -.. _ref_datacatalog.Tag: - -Tag ------------------------------------------------------------------- - -Tag message that is unique to a Dataset. It is associated to a single artifact and -can be retrieved by name later. - - - -.. csv-table:: Tag type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "name", ":ref:`ref_string`", "", "Name of tag" - "artifact_id", ":ref:`ref_string`", "", "The tagged artifact" - "dataset", ":ref:`ref_datacatalog.DatasetID`", "", "The Dataset that this tag belongs to" - - - - - - - -.. _ref_datacatalog.TagPropertyFilter: - -TagPropertyFilter ------------------------------------------------------------------- - -Tag properties we can filter by - - - -.. csv-table:: TagPropertyFilter type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "tag_name", ":ref:`ref_string`", "", "" - - - - - - - - - -.. _ref_datacatalog.PaginationOptions.SortKey: - -PaginationOptions.SortKey ------------------------------------------------------------------- - - - -.. csv-table:: Enum PaginationOptions.SortKey values - :header: "Name", "Number", "Description" - :widths: auto - - "CREATION_TIME", "0", "" - - - -.. _ref_datacatalog.PaginationOptions.SortOrder: - -PaginationOptions.SortOrder ------------------------------------------------------------------- - - - -.. csv-table:: Enum PaginationOptions.SortOrder values - :header: "Name", "Number", "Description" - :widths: auto - - "DESCENDING", "0", "" - "ASCENDING", "1", "" - - - -.. _ref_datacatalog.SinglePropertyFilter.ComparisonOperator: - -SinglePropertyFilter.ComparisonOperator ------------------------------------------------------------------- - -as use-cases come up we can add more operators, ex: gte, like, not eq etc. - -.. csv-table:: Enum SinglePropertyFilter.ComparisonOperator values - :header: "Name", "Number", "Description" - :widths: auto - - "EQUALS", "0", "" - - - - - - - -.. _ref_datacatalog.DataCatalog: - -DataCatalog ------------------------------------------------------------------- - -Data Catalog service definition -Data Catalog is a service for indexing parameterized, strongly-typed data artifacts across revisions. -Artifacts are associated with a Dataset, and can be tagged for retrieval. - -.. csv-table:: DataCatalog service methods - :header: "Method Name", "Request Type", "Response Type", "Description" - :widths: auto - - "CreateDataset", ":ref:`ref_datacatalog.CreateDatasetRequest`", ":ref:`ref_datacatalog.CreateDatasetResponse`", "Create a new Dataset. Datasets are unique based on the DatasetID. Datasets are logical groupings of artifacts. Each dataset can have one or more artifacts" - "GetDataset", ":ref:`ref_datacatalog.GetDatasetRequest`", ":ref:`ref_datacatalog.GetDatasetResponse`", "Get a Dataset by the DatasetID. This returns the Dataset with the associated metadata." - "CreateArtifact", ":ref:`ref_datacatalog.CreateArtifactRequest`", ":ref:`ref_datacatalog.CreateArtifactResponse`", "Create an artifact and the artifact data associated with it. An artifact can be a hive partition or arbitrary files or data values" - "GetArtifact", ":ref:`ref_datacatalog.GetArtifactRequest`", ":ref:`ref_datacatalog.GetArtifactResponse`", "Retrieve an artifact by an identifying handle. This returns an artifact along with the artifact data." - "AddTag", ":ref:`ref_datacatalog.AddTagRequest`", ":ref:`ref_datacatalog.AddTagResponse`", "Associate a tag with an artifact. Tags are unique within a Dataset." - "ListArtifacts", ":ref:`ref_datacatalog.ListArtifactsRequest`", ":ref:`ref_datacatalog.ListArtifactsResponse`", "Return a paginated list of artifacts" - "ListDatasets", ":ref:`ref_datacatalog.ListDatasetsRequest`", ":ref:`ref_datacatalog.ListDatasetsResponse`", "Return a paginated list of datasets" - "GetOrExtendReservation", ":ref:`ref_datacatalog.GetOrExtendReservationRequest`", ":ref:`ref_datacatalog.GetOrExtendReservationResponse`", "Attempts to get or extend a reservation for the corresponding artifact. If one already exists (ie. another entity owns the reservation) then that reservation is retrieved. Once you acquire a reservation, you need to periodically extend the reservation with an identical call. If the reservation is not extended before the defined expiration, it may be acquired by another task. Note: We may have multiple concurrent tasks with the same signature and the same input that try to populate the same artifact at the same time. Thus with reservation, only one task can run at a time, until the reservation expires. Note: If task A does not extend the reservation in time and the reservation expires, another task B may take over the reservation, resulting in two tasks A and B running in parallel. So a third task C may get the Artifact from A or B, whichever writes last." - "ReleaseReservation", ":ref:`ref_datacatalog.ReleaseReservationRequest`", ":ref:`ref_datacatalog.ReleaseReservationResponse`", "Release the reservation when the task holding the spot fails so that the other tasks can grab the spot." - - - - - -.. _ref_google/protobuf/timestamp.proto: - -google/protobuf/timestamp.proto -================================================================== - - - - - -.. _ref_google.protobuf.Timestamp: - -Timestamp ------------------------------------------------------------------- - -A Timestamp represents a point in time independent of any time zone or local -calendar, encoded as a count of seconds and fractions of seconds at -nanosecond resolution. The count is relative to an epoch at UTC midnight on -January 1, 1970, in the proleptic Gregorian calendar which extends the -Gregorian calendar backwards to year one. - -All minutes are 60 seconds long. Leap seconds are "smeared" so that no leap -second table is needed for interpretation, using a [24-hour linear -smear](https://developers.google.com/time/smear). - -The range is from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59.999999999Z. By -restricting to that range, we ensure that we can convert to and from [RFC -3339](https://www.ietf.org/rfc/rfc3339.txt) date strings. - -# Examples - -Example 1: Compute Timestamp from POSIX `time()`. - - Timestamp timestamp; - timestamp.set_seconds(time(NULL)); - timestamp.set_nanos(0); - -Example 2: Compute Timestamp from POSIX `gettimeofday()`. - - struct timeval tv; - gettimeofday(&tv, NULL); - - Timestamp timestamp; - timestamp.set_seconds(tv.tv_sec); - timestamp.set_nanos(tv.tv_usec * 1000); - -Example 3: Compute Timestamp from Win32 `GetSystemTimeAsFileTime()`. - - FILETIME ft; - GetSystemTimeAsFileTime(&ft); - UINT64 ticks = (((UINT64)ft.dwHighDateTime) << 32) | ft.dwLowDateTime; - - // A Windows tick is 100 nanoseconds. Windows epoch 1601-01-01T00:00:00Z - // is 11644473600 seconds before Unix epoch 1970-01-01T00:00:00Z. - Timestamp timestamp; - timestamp.set_seconds((INT64) ((ticks / 10000000) - 11644473600LL)); - timestamp.set_nanos((INT32) ((ticks % 10000000) * 100)); - -Example 4: Compute Timestamp from Java `System.currentTimeMillis()`. - - long millis = System.currentTimeMillis(); - - Timestamp timestamp = Timestamp.newBuilder().setSeconds(millis / 1000) - .setNanos((int) ((millis % 1000) * 1000000)).build(); - - -Example 5: Compute Timestamp from Java `Instant.now()`. - - Instant now = Instant.now(); - - Timestamp timestamp = - Timestamp.newBuilder().setSeconds(now.getEpochSecond()) - .setNanos(now.getNano()).build(); - - -Example 6: Compute Timestamp from current time in Python. - - timestamp = Timestamp() - timestamp.GetCurrentTime() - -# JSON Mapping - -In JSON format, the Timestamp type is encoded as a string in the -[RFC 3339](https://www.ietf.org/rfc/rfc3339.txt) format. That is, the -format is "{year}-{month}-{day}T{hour}:{min}:{sec}[.{frac_sec}]Z" -where {year} is always expressed using four digits while {month}, {day}, -{hour}, {min}, and {sec} are zero-padded to two digits each. The fractional -seconds, which can go up to 9 digits (i.e. up to 1 nanosecond resolution), -are optional. The "Z" suffix indicates the timezone ("UTC"); the timezone -is required. A proto3 JSON serializer should always use UTC (as indicated by -"Z") when printing the Timestamp type and a proto3 JSON parser should be -able to accept both UTC and other timezones (as indicated by an offset). - -For example, "2017-01-15T01:30:15.01Z" encodes 15.01 seconds past -01:30 UTC on January 15, 2017. - -In JavaScript, one can convert a Date object to this format using the -standard -[toISOString()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Date/toISOString) -method. In Python, a standard `datetime.datetime` object can be converted -to this format using -[`strftime`](https://docs.python.org/2/library/time.html#time.strftime) with -the time format spec '%Y-%m-%dT%H:%M:%S.%fZ'. Likewise, in Java, one can use -the Joda Time's [`ISODateTimeFormat.dateTime()`]( -http://www.joda.org/joda-time/apidocs/org/joda/time/format/ISODateTimeFormat.html#dateTime%2D%2D -) to obtain a formatter capable of generating timestamps in this format. - - - -.. csv-table:: Timestamp type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "seconds", ":ref:`ref_int64`", "", "Represents seconds of UTC time since Unix epoch 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59Z inclusive." - "nanos", ":ref:`ref_int32`", "", "Non-negative fractions of a second at nanosecond resolution. Negative second values with fractions must still have non-negative nanos values that count forward in time. Must be from 0 to 999,999,999 inclusive." - - - - - - - - - - - - - - - - -.. _ref_google/protobuf/duration.proto: - -google/protobuf/duration.proto -================================================================== - - - - - -.. _ref_google.protobuf.Duration: - -Duration ------------------------------------------------------------------- - -A Duration represents a signed, fixed-length span of time represented -as a count of seconds and fractions of seconds at nanosecond -resolution. It is independent of any calendar and concepts like "day" -or "month". It is related to Timestamp in that the difference between -two Timestamp values is a Duration and it can be added or subtracted -from a Timestamp. Range is approximately +-10,000 years. - -# Examples - -Example 1: Compute Duration from two Timestamps in pseudo code. - - Timestamp start = ...; - Timestamp end = ...; - Duration duration = ...; - - duration.seconds = end.seconds - start.seconds; - duration.nanos = end.nanos - start.nanos; - - if (duration.seconds < 0 && duration.nanos > 0) { - duration.seconds += 1; - duration.nanos -= 1000000000; - } else if (duration.seconds > 0 && duration.nanos < 0) { - duration.seconds -= 1; - duration.nanos += 1000000000; - } - -Example 2: Compute Timestamp from Timestamp + Duration in pseudo code. - - Timestamp start = ...; - Duration duration = ...; - Timestamp end = ...; - - end.seconds = start.seconds + duration.seconds; - end.nanos = start.nanos + duration.nanos; - - if (end.nanos < 0) { - end.seconds -= 1; - end.nanos += 1000000000; - } else if (end.nanos >= 1000000000) { - end.seconds += 1; - end.nanos -= 1000000000; - } - -Example 3: Compute Duration from datetime.timedelta in Python. - - td = datetime.timedelta(days=3, minutes=10) - duration = Duration() - duration.FromTimedelta(td) - -# JSON Mapping - -In JSON format, the Duration type is encoded as a string rather than an -object, where the string ends in the suffix "s" (indicating seconds) and -is preceded by the number of seconds, with nanoseconds expressed as -fractional seconds. For example, 3 seconds with 0 nanoseconds should be -encoded in JSON format as "3s", while 3 seconds and 1 nanosecond should -be expressed in JSON format as "3.000000001s", and 3 seconds and 1 -microsecond should be expressed in JSON format as "3.000001s". - - - -.. csv-table:: Duration type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "seconds", ":ref:`ref_int64`", "", "Signed seconds of the span of time. Must be from -315,576,000,000 to +315,576,000,000 inclusive. Note: these bounds are computed from: 60 sec/min * 60 min/hr * 24 hr/day * 365.25 days/year * 10000 years" - "nanos", ":ref:`ref_int32`", "", "Signed fractions of a second at nanosecond resolution of the span of time. Durations less than one second are represented with a 0 `seconds` field and a positive or negative `nanos` field. For durations of one second or more, a non-zero value for the `nanos` field must be of the same sign as the `seconds` field. Must be from -999,999,999 to +999,999,999 inclusive." - - - - - - - - - - - - - - - - -.. _ref_google/protobuf/struct.proto: - -google/protobuf/struct.proto -================================================================== - - - - - -.. _ref_google.protobuf.ListValue: - -ListValue ------------------------------------------------------------------- - -`ListValue` is a wrapper around a repeated field of values. - -The JSON representation for `ListValue` is JSON array. - - - -.. csv-table:: ListValue type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "values", ":ref:`ref_google.protobuf.Value`", "repeated", "Repeated field of dynamically typed values." - - - - - - - -.. _ref_google.protobuf.Struct: - -Struct ------------------------------------------------------------------- - -`Struct` represents a structured data value, consisting of fields -which map to dynamically typed values. In some languages, `Struct` -might be supported by a native representation. For example, in -scripting languages like JS a struct is represented as an -object. The details of that representation are described together -with the proto support for the language. - -The JSON representation for `Struct` is JSON object. - - - -.. csv-table:: Struct type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "fields", ":ref:`ref_google.protobuf.Struct.FieldsEntry`", "repeated", "Unordered map of dynamically typed values." - - - - - - - -.. _ref_google.protobuf.Struct.FieldsEntry: - -Struct.FieldsEntry ------------------------------------------------------------------- - - - - - -.. csv-table:: Struct.FieldsEntry type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "key", ":ref:`ref_string`", "", "" - "value", ":ref:`ref_google.protobuf.Value`", "", "" - - - - - - - -.. _ref_google.protobuf.Value: - -Value ------------------------------------------------------------------- - -`Value` represents a dynamically typed value which can be either -null, a number, a string, a boolean, a recursive struct value, or a -list of values. A producer of value is expected to set one of these -variants. Absence of any variant indicates an error. - -The JSON representation for `Value` is JSON value. - - - -.. csv-table:: Value type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "null_value", ":ref:`ref_google.protobuf.NullValue`", "", "Represents a null value." - "number_value", ":ref:`ref_double`", "", "Represents a double value." - "string_value", ":ref:`ref_string`", "", "Represents a string value." - "bool_value", ":ref:`ref_bool`", "", "Represents a boolean value." - "struct_value", ":ref:`ref_google.protobuf.Struct`", "", "Represents a structured value." - "list_value", ":ref:`ref_google.protobuf.ListValue`", "", "Represents a repeated `Value`." - - - - - - - - - -.. _ref_google.protobuf.NullValue: - -NullValue ------------------------------------------------------------------- - -`NullValue` is a singleton enumeration to represent the null value for the -`Value` type union. - - The JSON representation for `NullValue` is JSON `null`. - -.. csv-table:: Enum NullValue values - :header: "Name", "Number", "Description" - :widths: auto - - "NULL_VALUE", "0", "Null value." - - - - - - - - diff --git a/flyteidl/protos/docs/event/event.rst b/flyteidl/protos/docs/event/event.rst deleted file mode 100644 index b1bcc69ca..000000000 --- a/flyteidl/protos/docs/event/event.rst +++ /dev/null @@ -1,693 +0,0 @@ -###################### -Protocol Documentation -###################### - - - - -.. _ref_flyteidl/event/event.proto: - -flyteidl/event/event.proto -================================================================== - - - - - -.. _ref_flyteidl.event.DynamicWorkflowNodeMetadata: - -DynamicWorkflowNodeMetadata ------------------------------------------------------------------- - -For dynamic workflow nodes we send information about the dynamic workflow definition that gets generated. - - - -.. csv-table:: DynamicWorkflowNodeMetadata type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "id", ":ref:`ref_flyteidl.core.Identifier`", "", "id represents the unique identifier of the workflow." - "compiled_workflow", ":ref:`ref_flyteidl.core.CompiledWorkflowClosure`", "", "Represents the compiled representation of the embedded dynamic workflow." - - - - - - - -.. _ref_flyteidl.event.ExternalResourceInfo: - -ExternalResourceInfo ------------------------------------------------------------------- - -This message contains metadata about external resources produced or used by a specific task execution. - - - -.. csv-table:: ExternalResourceInfo type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "external_id", ":ref:`ref_string`", "", "Identifier for an external resource created by this task execution, for example Qubole query ID or presto query ids." - "index", ":ref:`ref_uint32`", "", "A unique index for the external resource with respect to all external resources for this task. Although the identifier may change between task reporting events or retries, this will remain the same to enable aggregating information from multiple reports." - "retry_attempt", ":ref:`ref_uint32`", "", "Retry attempt number for this external resource, ie., 2 for the second attempt" - "phase", ":ref:`ref_flyteidl.core.TaskExecution.Phase`", "", "Phase associated with the external resource" - "cache_status", ":ref:`ref_flyteidl.core.CatalogCacheStatus`", "", "Captures the status of caching for this external resource execution." - "logs", ":ref:`ref_flyteidl.core.TaskLog`", "repeated", "log information for the external resource execution" - - - - - - - -.. _ref_flyteidl.event.NodeExecutionEvent: - -NodeExecutionEvent ------------------------------------------------------------------- - - - - - -.. csv-table:: NodeExecutionEvent type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "id", ":ref:`ref_flyteidl.core.NodeExecutionIdentifier`", "", "Unique identifier for this node execution" - "producer_id", ":ref:`ref_string`", "", "the id of the originator (Propeller) of the event" - "phase", ":ref:`ref_flyteidl.core.NodeExecution.Phase`", "", "" - "occurred_at", ":ref:`ref_google.protobuf.Timestamp`", "", "This timestamp represents when the original event occurred, it is generated by the executor of the node." - "input_uri", ":ref:`ref_string`", "", "" - "output_uri", ":ref:`ref_string`", "", "URL to the output of the execution, it encodes all the information including Cloud source provider. ie., s3://..." - "error", ":ref:`ref_flyteidl.core.ExecutionError`", "", "Error information for the execution" - "output_data", ":ref:`ref_flyteidl.core.LiteralMap`", "", "Raw output data produced by this node execution." - "workflow_node_metadata", ":ref:`ref_flyteidl.event.WorkflowNodeMetadata`", "", "" - "task_node_metadata", ":ref:`ref_flyteidl.event.TaskNodeMetadata`", "", "" - "parent_task_metadata", ":ref:`ref_flyteidl.event.ParentTaskExecutionMetadata`", "", "[To be deprecated] Specifies which task (if any) launched this node." - "parent_node_metadata", ":ref:`ref_flyteidl.event.ParentNodeExecutionMetadata`", "", "Specifies the parent node of the current node execution. Node executions at level zero will not have a parent node." - "retry_group", ":ref:`ref_string`", "", "Retry group to indicate grouping of nodes by retries" - "spec_node_id", ":ref:`ref_string`", "", "Identifier of the node in the original workflow/graph This maps to value of WorkflowTemplate.nodes[X].id" - "node_name", ":ref:`ref_string`", "", "Friendly readable name for the node" - "event_version", ":ref:`ref_int32`", "", "" - "is_parent", ":ref:`ref_bool`", "", "Whether this node launched a subworkflow." - "is_dynamic", ":ref:`ref_bool`", "", "Whether this node yielded a dynamic workflow." - - - - - - - -.. _ref_flyteidl.event.ParentNodeExecutionMetadata: - -ParentNodeExecutionMetadata ------------------------------------------------------------------- - - - - - -.. csv-table:: ParentNodeExecutionMetadata type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "node_id", ":ref:`ref_string`", "", "Unique identifier of the parent node id within the execution This is value of core.NodeExecutionIdentifier.node_id of the parent node" - - - - - - - -.. _ref_flyteidl.event.ParentTaskExecutionMetadata: - -ParentTaskExecutionMetadata ------------------------------------------------------------------- - - - - - -.. csv-table:: ParentTaskExecutionMetadata type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "id", ":ref:`ref_flyteidl.core.TaskExecutionIdentifier`", "", "" - - - - - - - -.. _ref_flyteidl.event.ResourcePoolInfo: - -ResourcePoolInfo ------------------------------------------------------------------- - -This message holds task execution metadata specific to resource allocation used to manage concurrent -executions for a project namespace. - - - -.. csv-table:: ResourcePoolInfo type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "allocation_token", ":ref:`ref_string`", "", "Unique resource ID used to identify this execution when allocating a token." - "namespace", ":ref:`ref_string`", "", "Namespace under which this task execution requested an allocation token." - - - - - - - -.. _ref_flyteidl.event.TaskExecutionEvent: - -TaskExecutionEvent ------------------------------------------------------------------- - -Plugin specific execution event information. For tasks like Python, Hive, Spark, DynamicJob. - - - -.. csv-table:: TaskExecutionEvent type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "task_id", ":ref:`ref_flyteidl.core.Identifier`", "", "ID of the task. In combination with the retryAttempt this will indicate the task execution uniquely for a given parent node execution." - "parent_node_execution_id", ":ref:`ref_flyteidl.core.NodeExecutionIdentifier`", "", "A task execution is always kicked off by a node execution, the event consumer will use the parent_id to relate the task to it's parent node execution" - "retry_attempt", ":ref:`ref_uint32`", "", "retry attempt number for this task, ie., 2 for the second attempt" - "phase", ":ref:`ref_flyteidl.core.TaskExecution.Phase`", "", "Phase associated with the event" - "producer_id", ":ref:`ref_string`", "", "id of the process that sent this event, mainly for trace debugging" - "logs", ":ref:`ref_flyteidl.core.TaskLog`", "repeated", "log information for the task execution" - "occurred_at", ":ref:`ref_google.protobuf.Timestamp`", "", "This timestamp represents when the original event occurred, it is generated by the executor of the task." - "input_uri", ":ref:`ref_string`", "", "URI of the input file, it encodes all the information including Cloud source provider. ie., s3://..." - "output_uri", ":ref:`ref_string`", "", "URI to the output of the execution, it will be in a format that encodes all the information including Cloud source provider. ie., s3://..." - "error", ":ref:`ref_flyteidl.core.ExecutionError`", "", "Error information for the execution" - "output_data", ":ref:`ref_flyteidl.core.LiteralMap`", "", "Raw output data produced by this task execution." - "custom_info", ":ref:`ref_google.protobuf.Struct`", "", "Custom data that the task plugin sends back. This is extensible to allow various plugins in the system." - "phase_version", ":ref:`ref_uint32`", "", "Some phases, like RUNNING, can send multiple events with changed metadata (new logs, additional custom_info, etc) that should be recorded regardless of the lack of phase change. The version field should be incremented when metadata changes across the duration of an individual phase." - "reason", ":ref:`ref_string`", "", "An optional explanation for the phase transition." - "task_type", ":ref:`ref_string`", "", "A predefined yet extensible Task type identifier. If the task definition is already registered in flyte admin this type will be identical, but not all task executions necessarily use pre-registered definitions and this type is useful to render the task in the UI, filter task executions, etc." - "metadata", ":ref:`ref_flyteidl.event.TaskExecutionMetadata`", "", "Metadata around how a task was executed." - - - - - - - -.. _ref_flyteidl.event.TaskExecutionMetadata: - -TaskExecutionMetadata ------------------------------------------------------------------- - -Holds metadata around how a task was executed. -As a task transitions across event phases during execution some attributes, such its generated name, generated external resources, -and more may grow in size but not change necessarily based on the phase transition that sparked the event update. -Metadata is a container for these attributes across the task execution lifecycle. - - - -.. csv-table:: TaskExecutionMetadata type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "generated_name", ":ref:`ref_string`", "", "Unique, generated name for this task execution used by the backend." - "external_resources", ":ref:`ref_flyteidl.event.ExternalResourceInfo`", "repeated", "Additional data on external resources on other back-ends or platforms (e.g. Hive, Qubole, etc) launched by this task execution." - "resource_pool_info", ":ref:`ref_flyteidl.event.ResourcePoolInfo`", "repeated", "Includes additional data on concurrent resource management used during execution.. This is a repeated field because a plugin can request multiple resource allocations during execution." - "plugin_identifier", ":ref:`ref_string`", "", "The identifier of the plugin used to execute this task." - "instance_class", ":ref:`ref_flyteidl.event.TaskExecutionMetadata.InstanceClass`", "", "" - - - - - - - -.. _ref_flyteidl.event.TaskNodeMetadata: - -TaskNodeMetadata ------------------------------------------------------------------- - - - - - -.. csv-table:: TaskNodeMetadata type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "cache_status", ":ref:`ref_flyteidl.core.CatalogCacheStatus`", "", "Captures the status of caching for this execution." - "catalog_key", ":ref:`ref_flyteidl.core.CatalogMetadata`", "", "This structure carries the catalog artifact information" - "reservation_status", ":ref:`ref_flyteidl.core.CatalogReservation.Status`", "", "Captures the status of cache reservations for this execution." - "dynamic_workflow", ":ref:`ref_flyteidl.event.DynamicWorkflowNodeMetadata`", "", "In the case this task launched a dynamic workflow we capture its structure here." - - - - - - - -.. _ref_flyteidl.event.WorkflowExecutionEvent: - -WorkflowExecutionEvent ------------------------------------------------------------------- - - - - - -.. csv-table:: WorkflowExecutionEvent type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "execution_id", ":ref:`ref_flyteidl.core.WorkflowExecutionIdentifier`", "", "Workflow execution id" - "producer_id", ":ref:`ref_string`", "", "the id of the originator (Propeller) of the event" - "phase", ":ref:`ref_flyteidl.core.WorkflowExecution.Phase`", "", "" - "occurred_at", ":ref:`ref_google.protobuf.Timestamp`", "", "This timestamp represents when the original event occurred, it is generated by the executor of the workflow." - "output_uri", ":ref:`ref_string`", "", "URL to the output of the execution, it encodes all the information including Cloud source provider. ie., s3://..." - "error", ":ref:`ref_flyteidl.core.ExecutionError`", "", "Error information for the execution" - "output_data", ":ref:`ref_flyteidl.core.LiteralMap`", "", "Raw output data produced by this workflow execution." - - - - - - - -.. _ref_flyteidl.event.WorkflowNodeMetadata: - -WorkflowNodeMetadata ------------------------------------------------------------------- - -For Workflow Nodes we need to send information about the workflow that's launched - - - -.. csv-table:: WorkflowNodeMetadata type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "execution_id", ":ref:`ref_flyteidl.core.WorkflowExecutionIdentifier`", "", "" - - - - - - - - - -.. _ref_flyteidl.event.TaskExecutionMetadata.InstanceClass: - -TaskExecutionMetadata.InstanceClass ------------------------------------------------------------------- - -Includes the broad category of machine used for this specific task execution. - -.. csv-table:: Enum TaskExecutionMetadata.InstanceClass values - :header: "Name", "Number", "Description" - :widths: auto - - "DEFAULT", "0", "The default instance class configured for the flyte application platform." - "INTERRUPTIBLE", "1", "The instance class configured for interruptible tasks." - - - - - - - - - - -.. _ref_google/protobuf/timestamp.proto: - -google/protobuf/timestamp.proto -================================================================== - - - - - -.. _ref_google.protobuf.Timestamp: - -Timestamp ------------------------------------------------------------------- - -A Timestamp represents a point in time independent of any time zone or local -calendar, encoded as a count of seconds and fractions of seconds at -nanosecond resolution. The count is relative to an epoch at UTC midnight on -January 1, 1970, in the proleptic Gregorian calendar which extends the -Gregorian calendar backwards to year one. - -All minutes are 60 seconds long. Leap seconds are "smeared" so that no leap -second table is needed for interpretation, using a [24-hour linear -smear](https://developers.google.com/time/smear). - -The range is from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59.999999999Z. By -restricting to that range, we ensure that we can convert to and from [RFC -3339](https://www.ietf.org/rfc/rfc3339.txt) date strings. - -# Examples - -Example 1: Compute Timestamp from POSIX `time()`. - - Timestamp timestamp; - timestamp.set_seconds(time(NULL)); - timestamp.set_nanos(0); - -Example 2: Compute Timestamp from POSIX `gettimeofday()`. - - struct timeval tv; - gettimeofday(&tv, NULL); - - Timestamp timestamp; - timestamp.set_seconds(tv.tv_sec); - timestamp.set_nanos(tv.tv_usec * 1000); - -Example 3: Compute Timestamp from Win32 `GetSystemTimeAsFileTime()`. - - FILETIME ft; - GetSystemTimeAsFileTime(&ft); - UINT64 ticks = (((UINT64)ft.dwHighDateTime) << 32) | ft.dwLowDateTime; - - // A Windows tick is 100 nanoseconds. Windows epoch 1601-01-01T00:00:00Z - // is 11644473600 seconds before Unix epoch 1970-01-01T00:00:00Z. - Timestamp timestamp; - timestamp.set_seconds((INT64) ((ticks / 10000000) - 11644473600LL)); - timestamp.set_nanos((INT32) ((ticks % 10000000) * 100)); - -Example 4: Compute Timestamp from Java `System.currentTimeMillis()`. - - long millis = System.currentTimeMillis(); - - Timestamp timestamp = Timestamp.newBuilder().setSeconds(millis / 1000) - .setNanos((int) ((millis % 1000) * 1000000)).build(); - - -Example 5: Compute Timestamp from Java `Instant.now()`. - - Instant now = Instant.now(); - - Timestamp timestamp = - Timestamp.newBuilder().setSeconds(now.getEpochSecond()) - .setNanos(now.getNano()).build(); - - -Example 6: Compute Timestamp from current time in Python. - - timestamp = Timestamp() - timestamp.GetCurrentTime() - -# JSON Mapping - -In JSON format, the Timestamp type is encoded as a string in the -[RFC 3339](https://www.ietf.org/rfc/rfc3339.txt) format. That is, the -format is "{year}-{month}-{day}T{hour}:{min}:{sec}[.{frac_sec}]Z" -where {year} is always expressed using four digits while {month}, {day}, -{hour}, {min}, and {sec} are zero-padded to two digits each. The fractional -seconds, which can go up to 9 digits (i.e. up to 1 nanosecond resolution), -are optional. The "Z" suffix indicates the timezone ("UTC"); the timezone -is required. A proto3 JSON serializer should always use UTC (as indicated by -"Z") when printing the Timestamp type and a proto3 JSON parser should be -able to accept both UTC and other timezones (as indicated by an offset). - -For example, "2017-01-15T01:30:15.01Z" encodes 15.01 seconds past -01:30 UTC on January 15, 2017. - -In JavaScript, one can convert a Date object to this format using the -standard -[toISOString()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Date/toISOString) -method. In Python, a standard `datetime.datetime` object can be converted -to this format using -[`strftime`](https://docs.python.org/2/library/time.html#time.strftime) with -the time format spec '%Y-%m-%dT%H:%M:%S.%fZ'. Likewise, in Java, one can use -the Joda Time's [`ISODateTimeFormat.dateTime()`]( -http://www.joda.org/joda-time/apidocs/org/joda/time/format/ISODateTimeFormat.html#dateTime%2D%2D -) to obtain a formatter capable of generating timestamps in this format. - - - -.. csv-table:: Timestamp type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "seconds", ":ref:`ref_int64`", "", "Represents seconds of UTC time since Unix epoch 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59Z inclusive." - "nanos", ":ref:`ref_int32`", "", "Non-negative fractions of a second at nanosecond resolution. Negative second values with fractions must still have non-negative nanos values that count forward in time. Must be from 0 to 999,999,999 inclusive." - - - - - - - - - - - - - - - - -.. _ref_google/protobuf/duration.proto: - -google/protobuf/duration.proto -================================================================== - - - - - -.. _ref_google.protobuf.Duration: - -Duration ------------------------------------------------------------------- - -A Duration represents a signed, fixed-length span of time represented -as a count of seconds and fractions of seconds at nanosecond -resolution. It is independent of any calendar and concepts like "day" -or "month". It is related to Timestamp in that the difference between -two Timestamp values is a Duration and it can be added or subtracted -from a Timestamp. Range is approximately +-10,000 years. - -# Examples - -Example 1: Compute Duration from two Timestamps in pseudo code. - - Timestamp start = ...; - Timestamp end = ...; - Duration duration = ...; - - duration.seconds = end.seconds - start.seconds; - duration.nanos = end.nanos - start.nanos; - - if (duration.seconds < 0 && duration.nanos > 0) { - duration.seconds += 1; - duration.nanos -= 1000000000; - } else if (duration.seconds > 0 && duration.nanos < 0) { - duration.seconds -= 1; - duration.nanos += 1000000000; - } - -Example 2: Compute Timestamp from Timestamp + Duration in pseudo code. - - Timestamp start = ...; - Duration duration = ...; - Timestamp end = ...; - - end.seconds = start.seconds + duration.seconds; - end.nanos = start.nanos + duration.nanos; - - if (end.nanos < 0) { - end.seconds -= 1; - end.nanos += 1000000000; - } else if (end.nanos >= 1000000000) { - end.seconds += 1; - end.nanos -= 1000000000; - } - -Example 3: Compute Duration from datetime.timedelta in Python. - - td = datetime.timedelta(days=3, minutes=10) - duration = Duration() - duration.FromTimedelta(td) - -# JSON Mapping - -In JSON format, the Duration type is encoded as a string rather than an -object, where the string ends in the suffix "s" (indicating seconds) and -is preceded by the number of seconds, with nanoseconds expressed as -fractional seconds. For example, 3 seconds with 0 nanoseconds should be -encoded in JSON format as "3s", while 3 seconds and 1 nanosecond should -be expressed in JSON format as "3.000000001s", and 3 seconds and 1 -microsecond should be expressed in JSON format as "3.000001s". - - - -.. csv-table:: Duration type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "seconds", ":ref:`ref_int64`", "", "Signed seconds of the span of time. Must be from -315,576,000,000 to +315,576,000,000 inclusive. Note: these bounds are computed from: 60 sec/min * 60 min/hr * 24 hr/day * 365.25 days/year * 10000 years" - "nanos", ":ref:`ref_int32`", "", "Signed fractions of a second at nanosecond resolution of the span of time. Durations less than one second are represented with a 0 `seconds` field and a positive or negative `nanos` field. For durations of one second or more, a non-zero value for the `nanos` field must be of the same sign as the `seconds` field. Must be from -999,999,999 to +999,999,999 inclusive." - - - - - - - - - - - - - - - - -.. _ref_google/protobuf/struct.proto: - -google/protobuf/struct.proto -================================================================== - - - - - -.. _ref_google.protobuf.ListValue: - -ListValue ------------------------------------------------------------------- - -`ListValue` is a wrapper around a repeated field of values. - -The JSON representation for `ListValue` is JSON array. - - - -.. csv-table:: ListValue type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "values", ":ref:`ref_google.protobuf.Value`", "repeated", "Repeated field of dynamically typed values." - - - - - - - -.. _ref_google.protobuf.Struct: - -Struct ------------------------------------------------------------------- - -`Struct` represents a structured data value, consisting of fields -which map to dynamically typed values. In some languages, `Struct` -might be supported by a native representation. For example, in -scripting languages like JS a struct is represented as an -object. The details of that representation are described together -with the proto support for the language. - -The JSON representation for `Struct` is JSON object. - - - -.. csv-table:: Struct type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "fields", ":ref:`ref_google.protobuf.Struct.FieldsEntry`", "repeated", "Unordered map of dynamically typed values." - - - - - - - -.. _ref_google.protobuf.Struct.FieldsEntry: - -Struct.FieldsEntry ------------------------------------------------------------------- - - - - - -.. csv-table:: Struct.FieldsEntry type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "key", ":ref:`ref_string`", "", "" - "value", ":ref:`ref_google.protobuf.Value`", "", "" - - - - - - - -.. _ref_google.protobuf.Value: - -Value ------------------------------------------------------------------- - -`Value` represents a dynamically typed value which can be either -null, a number, a string, a boolean, a recursive struct value, or a -list of values. A producer of value is expected to set one of these -variants. Absence of any variant indicates an error. - -The JSON representation for `Value` is JSON value. - - - -.. csv-table:: Value type fields - :header: "Field", "Type", "Label", "Description" - :widths: auto - - "null_value", ":ref:`ref_google.protobuf.NullValue`", "", "Represents a null value." - "number_value", ":ref:`ref_double`", "", "Represents a double value." - "string_value", ":ref:`ref_string`", "", "Represents a string value." - "bool_value", ":ref:`ref_bool`", "", "Represents a boolean value." - "struct_value", ":ref:`ref_google.protobuf.Struct`", "", "Represents a structured value." - "list_value", ":ref:`ref_google.protobuf.ListValue`", "", "Represents a repeated `Value`." - - - - - - - - - -.. _ref_google.protobuf.NullValue: - -NullValue ------------------------------------------------------------------- - -`NullValue` is a singleton enumeration to represent the null value for the -`Value` type union. - - The JSON representation for `NullValue` is JSON `null`. - -.. csv-table:: Enum NullValue values - :header: "Name", "Number", "Description" - :widths: auto - - "NULL_VALUE", "0", "Null value." - - - - - - - - diff --git a/flyteidl/protos/flyteidl/service/dataproxy.proto b/flyteidl/protos/flyteidl/service/dataproxy.proto index 8fe78a6f2..d3021f2c9 100644 --- a/flyteidl/protos/flyteidl/service/dataproxy.proto +++ b/flyteidl/protos/flyteidl/service/dataproxy.proto @@ -39,9 +39,9 @@ message CreateUploadLocationRequest { google.protobuf.Duration expires_in = 4; } -// DataProxy defines an RPC Service that allows access to user-data in a controlled manner. -service DataProxy { - // Retrieves user information about the currently logged in user. +// DataProxyService defines an RPC Service that allows access to user-data in a controlled manner. +service DataProxyService { + // CreateUploadLocation creates a signed url to upload artifacts to for a given project/domain. rpc CreateUploadLocation (CreateUploadLocationRequest) returns (CreateUploadLocationResponse) { option (google.api.http) = { post: "/api/v1/dataproxy/artifact_urn"