From 5a4b8b392879be731dba40fd3e539276081e6953 Mon Sep 17 00:00:00 2001 From: Bernhard Stadlbauer Date: Sat, 17 Dec 2022 18:15:28 +0100 Subject: [PATCH] Update `dask.proto` after PR review --- gen/pb-cpp/flyteidl/plugins/dask.pb.cc | 556 ++++----- gen/pb-cpp/flyteidl/plugins/dask.pb.h | 416 +++---- gen/pb-go/flyteidl/plugins/dask.pb.go | 156 +-- .../flyteidl/plugins/dask.pb.validate.go | 72 +- gen/pb-java/flyteidl/plugins/Dask.java | 1007 ++++++++--------- gen/pb_python/flyteidl/plugins/dask_pb2.py | 14 +- gen/pb_python/flyteidl/plugins/dask_pb2.pyi | 32 +- protos/flyteidl/plugins/dask.proto | 34 +- 8 files changed, 1140 insertions(+), 1147 deletions(-) diff --git a/gen/pb-cpp/flyteidl/plugins/dask.pb.cc b/gen/pb-cpp/flyteidl/plugins/dask.pb.cc index 3bae1972b..f7fc8b532 100644 --- a/gen/pb-cpp/flyteidl/plugins/dask.pb.cc +++ b/gen/pb-cpp/flyteidl/plugins/dask.pb.cc @@ -17,22 +17,22 @@ #include extern PROTOBUF_INTERNAL_EXPORT_flyteidl_2fcore_2ftasks_2eproto ::google::protobuf::internal::SCCInfo<1> scc_info_Resources_flyteidl_2fcore_2ftasks_2eproto; -extern PROTOBUF_INTERNAL_EXPORT_flyteidl_2fplugins_2fdask_2eproto ::google::protobuf::internal::SCCInfo<1> scc_info_DaskCluster_flyteidl_2fplugins_2fdask_2eproto; -extern PROTOBUF_INTERNAL_EXPORT_flyteidl_2fplugins_2fdask_2eproto ::google::protobuf::internal::SCCInfo<1> scc_info_JobPodSpec_flyteidl_2fplugins_2fdask_2eproto; +extern PROTOBUF_INTERNAL_EXPORT_flyteidl_2fplugins_2fdask_2eproto ::google::protobuf::internal::SCCInfo<1> scc_info_Scheduler_flyteidl_2fplugins_2fdask_2eproto; +extern PROTOBUF_INTERNAL_EXPORT_flyteidl_2fplugins_2fdask_2eproto ::google::protobuf::internal::SCCInfo<1> scc_info_WorkerGroup_flyteidl_2fplugins_2fdask_2eproto; namespace flyteidl { namespace plugins { class DaskJobDefaultTypeInternal { public: ::google::protobuf::internal::ExplicitlyConstructed _instance; } _DaskJob_default_instance_; -class JobPodSpecDefaultTypeInternal { +class SchedulerDefaultTypeInternal { public: - ::google::protobuf::internal::ExplicitlyConstructed _instance; -} _JobPodSpec_default_instance_; -class DaskClusterDefaultTypeInternal { + ::google::protobuf::internal::ExplicitlyConstructed _instance; +} _Scheduler_default_instance_; +class WorkerGroupDefaultTypeInternal { public: - ::google::protobuf::internal::ExplicitlyConstructed _instance; -} _DaskCluster_default_instance_; + ::google::protobuf::internal::ExplicitlyConstructed _instance; +} _WorkerGroup_default_instance_; } // namespace plugins } // namespace flyteidl static void InitDefaultsDaskJob_flyteidl_2fplugins_2fdask_2eproto() { @@ -48,43 +48,43 @@ static void InitDefaultsDaskJob_flyteidl_2fplugins_2fdask_2eproto() { ::google::protobuf::internal::SCCInfo<2> scc_info_DaskJob_flyteidl_2fplugins_2fdask_2eproto = {{ATOMIC_VAR_INIT(::google::protobuf::internal::SCCInfoBase::kUninitialized), 2, InitDefaultsDaskJob_flyteidl_2fplugins_2fdask_2eproto}, { - &scc_info_JobPodSpec_flyteidl_2fplugins_2fdask_2eproto.base, - &scc_info_DaskCluster_flyteidl_2fplugins_2fdask_2eproto.base,}}; + &scc_info_Scheduler_flyteidl_2fplugins_2fdask_2eproto.base, + &scc_info_WorkerGroup_flyteidl_2fplugins_2fdask_2eproto.base,}}; -static void InitDefaultsJobPodSpec_flyteidl_2fplugins_2fdask_2eproto() { +static void InitDefaultsScheduler_flyteidl_2fplugins_2fdask_2eproto() { GOOGLE_PROTOBUF_VERIFY_VERSION; { - void* ptr = &::flyteidl::plugins::_JobPodSpec_default_instance_; - new (ptr) ::flyteidl::plugins::JobPodSpec(); + void* ptr = &::flyteidl::plugins::_Scheduler_default_instance_; + new (ptr) ::flyteidl::plugins::Scheduler(); ::google::protobuf::internal::OnShutdownDestroyMessage(ptr); } - ::flyteidl::plugins::JobPodSpec::InitAsDefaultInstance(); + ::flyteidl::plugins::Scheduler::InitAsDefaultInstance(); } -::google::protobuf::internal::SCCInfo<1> scc_info_JobPodSpec_flyteidl_2fplugins_2fdask_2eproto = - {{ATOMIC_VAR_INIT(::google::protobuf::internal::SCCInfoBase::kUninitialized), 1, InitDefaultsJobPodSpec_flyteidl_2fplugins_2fdask_2eproto}, { +::google::protobuf::internal::SCCInfo<1> scc_info_Scheduler_flyteidl_2fplugins_2fdask_2eproto = + {{ATOMIC_VAR_INIT(::google::protobuf::internal::SCCInfoBase::kUninitialized), 1, InitDefaultsScheduler_flyteidl_2fplugins_2fdask_2eproto}, { &scc_info_Resources_flyteidl_2fcore_2ftasks_2eproto.base,}}; -static void InitDefaultsDaskCluster_flyteidl_2fplugins_2fdask_2eproto() { +static void InitDefaultsWorkerGroup_flyteidl_2fplugins_2fdask_2eproto() { GOOGLE_PROTOBUF_VERIFY_VERSION; { - void* ptr = &::flyteidl::plugins::_DaskCluster_default_instance_; - new (ptr) ::flyteidl::plugins::DaskCluster(); + void* ptr = &::flyteidl::plugins::_WorkerGroup_default_instance_; + new (ptr) ::flyteidl::plugins::WorkerGroup(); ::google::protobuf::internal::OnShutdownDestroyMessage(ptr); } - ::flyteidl::plugins::DaskCluster::InitAsDefaultInstance(); + ::flyteidl::plugins::WorkerGroup::InitAsDefaultInstance(); } -::google::protobuf::internal::SCCInfo<1> scc_info_DaskCluster_flyteidl_2fplugins_2fdask_2eproto = - {{ATOMIC_VAR_INIT(::google::protobuf::internal::SCCInfoBase::kUninitialized), 1, InitDefaultsDaskCluster_flyteidl_2fplugins_2fdask_2eproto}, { +::google::protobuf::internal::SCCInfo<1> scc_info_WorkerGroup_flyteidl_2fplugins_2fdask_2eproto = + {{ATOMIC_VAR_INIT(::google::protobuf::internal::SCCInfoBase::kUninitialized), 1, InitDefaultsWorkerGroup_flyteidl_2fplugins_2fdask_2eproto}, { &scc_info_Resources_flyteidl_2fcore_2ftasks_2eproto.base,}}; void InitDefaults_flyteidl_2fplugins_2fdask_2eproto() { ::google::protobuf::internal::InitSCC(&scc_info_DaskJob_flyteidl_2fplugins_2fdask_2eproto.base); - ::google::protobuf::internal::InitSCC(&scc_info_JobPodSpec_flyteidl_2fplugins_2fdask_2eproto.base); - ::google::protobuf::internal::InitSCC(&scc_info_DaskCluster_flyteidl_2fplugins_2fdask_2eproto.base); + ::google::protobuf::internal::InitSCC(&scc_info_Scheduler_flyteidl_2fplugins_2fdask_2eproto.base); + ::google::protobuf::internal::InitSCC(&scc_info_WorkerGroup_flyteidl_2fplugins_2fdask_2eproto.base); } ::google::protobuf::Metadata file_level_metadata_flyteidl_2fplugins_2fdask_2eproto[3]; @@ -97,34 +97,34 @@ const ::google::protobuf::uint32 TableStruct_flyteidl_2fplugins_2fdask_2eproto:: ~0u, // no _extensions_ ~0u, // no _oneof_case_ ~0u, // no _weak_field_map_ - PROTOBUF_FIELD_OFFSET(::flyteidl::plugins::DaskJob, jobpodspec_), - PROTOBUF_FIELD_OFFSET(::flyteidl::plugins::DaskJob, cluster_), + PROTOBUF_FIELD_OFFSET(::flyteidl::plugins::DaskJob, scheduler_), + PROTOBUF_FIELD_OFFSET(::flyteidl::plugins::DaskJob, workers_), ~0u, // no _has_bits_ - PROTOBUF_FIELD_OFFSET(::flyteidl::plugins::JobPodSpec, _internal_metadata_), + PROTOBUF_FIELD_OFFSET(::flyteidl::plugins::Scheduler, _internal_metadata_), ~0u, // no _extensions_ ~0u, // no _oneof_case_ ~0u, // no _weak_field_map_ - PROTOBUF_FIELD_OFFSET(::flyteidl::plugins::JobPodSpec, image_), - PROTOBUF_FIELD_OFFSET(::flyteidl::plugins::JobPodSpec, resources_), + PROTOBUF_FIELD_OFFSET(::flyteidl::plugins::Scheduler, image_), + PROTOBUF_FIELD_OFFSET(::flyteidl::plugins::Scheduler, resources_), ~0u, // no _has_bits_ - PROTOBUF_FIELD_OFFSET(::flyteidl::plugins::DaskCluster, _internal_metadata_), + PROTOBUF_FIELD_OFFSET(::flyteidl::plugins::WorkerGroup, _internal_metadata_), ~0u, // no _extensions_ ~0u, // no _oneof_case_ ~0u, // no _weak_field_map_ - PROTOBUF_FIELD_OFFSET(::flyteidl::plugins::DaskCluster, image_), - PROTOBUF_FIELD_OFFSET(::flyteidl::plugins::DaskCluster, nworkers_), - PROTOBUF_FIELD_OFFSET(::flyteidl::plugins::DaskCluster, resources_), + PROTOBUF_FIELD_OFFSET(::flyteidl::plugins::WorkerGroup, number_of_workers_), + PROTOBUF_FIELD_OFFSET(::flyteidl::plugins::WorkerGroup, image_), + PROTOBUF_FIELD_OFFSET(::flyteidl::plugins::WorkerGroup, resources_), }; static const ::google::protobuf::internal::MigrationSchema schemas[] PROTOBUF_SECTION_VARIABLE(protodesc_cold) = { { 0, -1, sizeof(::flyteidl::plugins::DaskJob)}, - { 7, -1, sizeof(::flyteidl::plugins::JobPodSpec)}, - { 14, -1, sizeof(::flyteidl::plugins::DaskCluster)}, + { 7, -1, sizeof(::flyteidl::plugins::Scheduler)}, + { 14, -1, sizeof(::flyteidl::plugins::WorkerGroup)}, }; static ::google::protobuf::Message const * const file_default_instances[] = { reinterpret_cast(&::flyteidl::plugins::_DaskJob_default_instance_), - reinterpret_cast(&::flyteidl::plugins::_JobPodSpec_default_instance_), - reinterpret_cast(&::flyteidl::plugins::_DaskCluster_default_instance_), + reinterpret_cast(&::flyteidl::plugins::_Scheduler_default_instance_), + reinterpret_cast(&::flyteidl::plugins::_WorkerGroup_default_instance_), }; ::google::protobuf::internal::AssignDescriptorsTable assign_descriptors_table_flyteidl_2fplugins_2fdask_2eproto = { @@ -135,21 +135,21 @@ ::google::protobuf::internal::AssignDescriptorsTable assign_descriptors_table_fl const char descriptor_table_protodef_flyteidl_2fplugins_2fdask_2eproto[] = "\n\033flyteidl/plugins/dask.proto\022\020flyteidl." - "plugins\032\031flyteidl/core/tasks.proto\"k\n\007Da" - "skJob\0220\n\njobPodSpec\030\001 \001(\0132\034.flyteidl.plu" - "gins.JobPodSpec\022.\n\007cluster\030\002 \001(\0132\035.flyte" - "idl.plugins.DaskCluster\"H\n\nJobPodSpec\022\r\n" - "\005image\030\001 \001(\t\022+\n\tresources\030\002 \001(\0132\030.flytei" - "dl.core.Resources\"[\n\013DaskCluster\022\r\n\005imag" - "e\030\001 \001(\t\022\020\n\010nWorkers\030\002 \001(\005\022+\n\tresources\030\003" - " \001(\0132\030.flyteidl.core.ResourcesB9Z7github" - ".com/flyteorg/flyteidl/gen/pb-go/flyteid" - "l/pluginsb\006proto3" + "plugins\032\031flyteidl/core/tasks.proto\"i\n\007Da" + "skJob\022.\n\tscheduler\030\001 \001(\0132\033.flyteidl.plug" + "ins.Scheduler\022.\n\007workers\030\002 \001(\0132\035.flyteid" + "l.plugins.WorkerGroup\"G\n\tScheduler\022\r\n\005im" + "age\030\001 \001(\t\022+\n\tresources\030\002 \001(\0132\030.flyteidl." + "core.Resources\"d\n\013WorkerGroup\022\031\n\021number_" + "of_workers\030\001 \001(\r\022\r\n\005image\030\002 \001(\t\022+\n\tresou" + "rces\030\003 \001(\0132\030.flyteidl.core.ResourcesB9Z7" + "github.com/flyteorg/flyteidl/gen/pb-go/f" + "lyteidl/pluginsb\006proto3" ; ::google::protobuf::internal::DescriptorTable descriptor_table_flyteidl_2fplugins_2fdask_2eproto = { false, InitDefaults_flyteidl_2fplugins_2fdask_2eproto, descriptor_table_protodef_flyteidl_2fplugins_2fdask_2eproto, - "flyteidl/plugins/dask.proto", &assign_descriptors_table_flyteidl_2fplugins_2fdask_2eproto, 417, + "flyteidl/plugins/dask.proto", &assign_descriptors_table_flyteidl_2fplugins_2fdask_2eproto, 423, }; void AddDescriptors_flyteidl_2fplugins_2fdask_2eproto() { @@ -168,28 +168,28 @@ namespace plugins { // =================================================================== void DaskJob::InitAsDefaultInstance() { - ::flyteidl::plugins::_DaskJob_default_instance_._instance.get_mutable()->jobpodspec_ = const_cast< ::flyteidl::plugins::JobPodSpec*>( - ::flyteidl::plugins::JobPodSpec::internal_default_instance()); - ::flyteidl::plugins::_DaskJob_default_instance_._instance.get_mutable()->cluster_ = const_cast< ::flyteidl::plugins::DaskCluster*>( - ::flyteidl::plugins::DaskCluster::internal_default_instance()); + ::flyteidl::plugins::_DaskJob_default_instance_._instance.get_mutable()->scheduler_ = const_cast< ::flyteidl::plugins::Scheduler*>( + ::flyteidl::plugins::Scheduler::internal_default_instance()); + ::flyteidl::plugins::_DaskJob_default_instance_._instance.get_mutable()->workers_ = const_cast< ::flyteidl::plugins::WorkerGroup*>( + ::flyteidl::plugins::WorkerGroup::internal_default_instance()); } class DaskJob::HasBitSetters { public: - static const ::flyteidl::plugins::JobPodSpec& jobpodspec(const DaskJob* msg); - static const ::flyteidl::plugins::DaskCluster& cluster(const DaskJob* msg); + static const ::flyteidl::plugins::Scheduler& scheduler(const DaskJob* msg); + static const ::flyteidl::plugins::WorkerGroup& workers(const DaskJob* msg); }; -const ::flyteidl::plugins::JobPodSpec& -DaskJob::HasBitSetters::jobpodspec(const DaskJob* msg) { - return *msg->jobpodspec_; +const ::flyteidl::plugins::Scheduler& +DaskJob::HasBitSetters::scheduler(const DaskJob* msg) { + return *msg->scheduler_; } -const ::flyteidl::plugins::DaskCluster& -DaskJob::HasBitSetters::cluster(const DaskJob* msg) { - return *msg->cluster_; +const ::flyteidl::plugins::WorkerGroup& +DaskJob::HasBitSetters::workers(const DaskJob* msg) { + return *msg->workers_; } #if !defined(_MSC_VER) || _MSC_VER >= 1900 -const int DaskJob::kJobPodSpecFieldNumber; -const int DaskJob::kClusterFieldNumber; +const int DaskJob::kSchedulerFieldNumber; +const int DaskJob::kWorkersFieldNumber; #endif // !defined(_MSC_VER) || _MSC_VER >= 1900 DaskJob::DaskJob() @@ -201,15 +201,15 @@ DaskJob::DaskJob(const DaskJob& from) : ::google::protobuf::Message(), _internal_metadata_(nullptr) { _internal_metadata_.MergeFrom(from._internal_metadata_); - if (from.has_jobpodspec()) { - jobpodspec_ = new ::flyteidl::plugins::JobPodSpec(*from.jobpodspec_); + if (from.has_scheduler()) { + scheduler_ = new ::flyteidl::plugins::Scheduler(*from.scheduler_); } else { - jobpodspec_ = nullptr; + scheduler_ = nullptr; } - if (from.has_cluster()) { - cluster_ = new ::flyteidl::plugins::DaskCluster(*from.cluster_); + if (from.has_workers()) { + workers_ = new ::flyteidl::plugins::WorkerGroup(*from.workers_); } else { - cluster_ = nullptr; + workers_ = nullptr; } // @@protoc_insertion_point(copy_constructor:flyteidl.plugins.DaskJob) } @@ -217,9 +217,9 @@ DaskJob::DaskJob(const DaskJob& from) void DaskJob::SharedCtor() { ::google::protobuf::internal::InitSCC( &scc_info_DaskJob_flyteidl_2fplugins_2fdask_2eproto.base); - ::memset(&jobpodspec_, 0, static_cast( - reinterpret_cast(&cluster_) - - reinterpret_cast(&jobpodspec_)) + sizeof(cluster_)); + ::memset(&scheduler_, 0, static_cast( + reinterpret_cast(&workers_) - + reinterpret_cast(&scheduler_)) + sizeof(workers_)); } DaskJob::~DaskJob() { @@ -228,8 +228,8 @@ DaskJob::~DaskJob() { } void DaskJob::SharedDtor() { - if (this != internal_default_instance()) delete jobpodspec_; - if (this != internal_default_instance()) delete cluster_; + if (this != internal_default_instance()) delete scheduler_; + if (this != internal_default_instance()) delete workers_; } void DaskJob::SetCachedSize(int size) const { @@ -247,14 +247,14 @@ void DaskJob::Clear() { // Prevent compiler warnings about cached_has_bits being unused (void) cached_has_bits; - if (GetArenaNoVirtual() == nullptr && jobpodspec_ != nullptr) { - delete jobpodspec_; + if (GetArenaNoVirtual() == nullptr && scheduler_ != nullptr) { + delete scheduler_; } - jobpodspec_ = nullptr; - if (GetArenaNoVirtual() == nullptr && cluster_ != nullptr) { - delete cluster_; + scheduler_ = nullptr; + if (GetArenaNoVirtual() == nullptr && workers_ != nullptr) { + delete workers_; } - cluster_ = nullptr; + workers_ = nullptr; _internal_metadata_.Clear(); } @@ -271,26 +271,26 @@ const char* DaskJob::_InternalParse(const char* begin, const char* end, void* ob ptr = ::google::protobuf::io::Parse32(ptr, &tag); GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); switch (tag >> 3) { - // .flyteidl.plugins.JobPodSpec jobPodSpec = 1; + // .flyteidl.plugins.Scheduler scheduler = 1; case 1: { if (static_cast<::google::protobuf::uint8>(tag) != 10) goto handle_unusual; ptr = ::google::protobuf::io::ReadSize(ptr, &size); GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); - parser_till_end = ::flyteidl::plugins::JobPodSpec::_InternalParse; - object = msg->mutable_jobpodspec(); + parser_till_end = ::flyteidl::plugins::Scheduler::_InternalParse; + object = msg->mutable_scheduler(); if (size > end - ptr) goto len_delim_till_end; ptr += size; GOOGLE_PROTOBUF_PARSER_ASSERT(ctx->ParseExactRange( {parser_till_end, object}, ptr - size, ptr)); break; } - // .flyteidl.plugins.DaskCluster cluster = 2; + // .flyteidl.plugins.WorkerGroup workers = 2; case 2: { if (static_cast<::google::protobuf::uint8>(tag) != 18) goto handle_unusual; ptr = ::google::protobuf::io::ReadSize(ptr, &size); GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); - parser_till_end = ::flyteidl::plugins::DaskCluster::_InternalParse; - object = msg->mutable_cluster(); + parser_till_end = ::flyteidl::plugins::WorkerGroup::_InternalParse; + object = msg->mutable_workers(); if (size > end - ptr) goto len_delim_till_end; ptr += size; GOOGLE_PROTOBUF_PARSER_ASSERT(ctx->ParseExactRange( @@ -327,22 +327,22 @@ bool DaskJob::MergePartialFromCodedStream( tag = p.first; if (!p.second) goto handle_unusual; switch (::google::protobuf::internal::WireFormatLite::GetTagFieldNumber(tag)) { - // .flyteidl.plugins.JobPodSpec jobPodSpec = 1; + // .flyteidl.plugins.Scheduler scheduler = 1; case 1: { if (static_cast< ::google::protobuf::uint8>(tag) == (10 & 0xFF)) { DO_(::google::protobuf::internal::WireFormatLite::ReadMessage( - input, mutable_jobpodspec())); + input, mutable_scheduler())); } else { goto handle_unusual; } break; } - // .flyteidl.plugins.DaskCluster cluster = 2; + // .flyteidl.plugins.WorkerGroup workers = 2; case 2: { if (static_cast< ::google::protobuf::uint8>(tag) == (18 & 0xFF)) { DO_(::google::protobuf::internal::WireFormatLite::ReadMessage( - input, mutable_cluster())); + input, mutable_workers())); } else { goto handle_unusual; } @@ -376,16 +376,16 @@ void DaskJob::SerializeWithCachedSizes( ::google::protobuf::uint32 cached_has_bits = 0; (void) cached_has_bits; - // .flyteidl.plugins.JobPodSpec jobPodSpec = 1; - if (this->has_jobpodspec()) { + // .flyteidl.plugins.Scheduler scheduler = 1; + if (this->has_scheduler()) { ::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray( - 1, HasBitSetters::jobpodspec(this), output); + 1, HasBitSetters::scheduler(this), output); } - // .flyteidl.plugins.DaskCluster cluster = 2; - if (this->has_cluster()) { + // .flyteidl.plugins.WorkerGroup workers = 2; + if (this->has_workers()) { ::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray( - 2, HasBitSetters::cluster(this), output); + 2, HasBitSetters::workers(this), output); } if (_internal_metadata_.have_unknown_fields()) { @@ -401,18 +401,18 @@ ::google::protobuf::uint8* DaskJob::InternalSerializeWithCachedSizesToArray( ::google::protobuf::uint32 cached_has_bits = 0; (void) cached_has_bits; - // .flyteidl.plugins.JobPodSpec jobPodSpec = 1; - if (this->has_jobpodspec()) { + // .flyteidl.plugins.Scheduler scheduler = 1; + if (this->has_scheduler()) { target = ::google::protobuf::internal::WireFormatLite:: InternalWriteMessageToArray( - 1, HasBitSetters::jobpodspec(this), target); + 1, HasBitSetters::scheduler(this), target); } - // .flyteidl.plugins.DaskCluster cluster = 2; - if (this->has_cluster()) { + // .flyteidl.plugins.WorkerGroup workers = 2; + if (this->has_workers()) { target = ::google::protobuf::internal::WireFormatLite:: InternalWriteMessageToArray( - 2, HasBitSetters::cluster(this), target); + 2, HasBitSetters::workers(this), target); } if (_internal_metadata_.have_unknown_fields()) { @@ -436,18 +436,18 @@ size_t DaskJob::ByteSizeLong() const { // Prevent compiler warnings about cached_has_bits being unused (void) cached_has_bits; - // .flyteidl.plugins.JobPodSpec jobPodSpec = 1; - if (this->has_jobpodspec()) { + // .flyteidl.plugins.Scheduler scheduler = 1; + if (this->has_scheduler()) { total_size += 1 + ::google::protobuf::internal::WireFormatLite::MessageSize( - *jobpodspec_); + *scheduler_); } - // .flyteidl.plugins.DaskCluster cluster = 2; - if (this->has_cluster()) { + // .flyteidl.plugins.WorkerGroup workers = 2; + if (this->has_workers()) { total_size += 1 + ::google::protobuf::internal::WireFormatLite::MessageSize( - *cluster_); + *workers_); } int cached_size = ::google::protobuf::internal::ToCachedSize(total_size); @@ -477,11 +477,11 @@ void DaskJob::MergeFrom(const DaskJob& from) { ::google::protobuf::uint32 cached_has_bits = 0; (void) cached_has_bits; - if (from.has_jobpodspec()) { - mutable_jobpodspec()->::flyteidl::plugins::JobPodSpec::MergeFrom(from.jobpodspec()); + if (from.has_scheduler()) { + mutable_scheduler()->::flyteidl::plugins::Scheduler::MergeFrom(from.scheduler()); } - if (from.has_cluster()) { - mutable_cluster()->::flyteidl::plugins::DaskCluster::MergeFrom(from.cluster()); + if (from.has_workers()) { + mutable_workers()->::flyteidl::plugins::WorkerGroup::MergeFrom(from.workers()); } } @@ -510,8 +510,8 @@ void DaskJob::Swap(DaskJob* other) { void DaskJob::InternalSwap(DaskJob* other) { using std::swap; _internal_metadata_.Swap(&other->_internal_metadata_); - swap(jobpodspec_, other->jobpodspec_); - swap(cluster_, other->cluster_); + swap(scheduler_, other->scheduler_); + swap(workers_, other->workers_); } ::google::protobuf::Metadata DaskJob::GetMetadata() const { @@ -522,36 +522,36 @@ ::google::protobuf::Metadata DaskJob::GetMetadata() const { // =================================================================== -void JobPodSpec::InitAsDefaultInstance() { - ::flyteidl::plugins::_JobPodSpec_default_instance_._instance.get_mutable()->resources_ = const_cast< ::flyteidl::core::Resources*>( +void Scheduler::InitAsDefaultInstance() { + ::flyteidl::plugins::_Scheduler_default_instance_._instance.get_mutable()->resources_ = const_cast< ::flyteidl::core::Resources*>( ::flyteidl::core::Resources::internal_default_instance()); } -class JobPodSpec::HasBitSetters { +class Scheduler::HasBitSetters { public: - static const ::flyteidl::core::Resources& resources(const JobPodSpec* msg); + static const ::flyteidl::core::Resources& resources(const Scheduler* msg); }; const ::flyteidl::core::Resources& -JobPodSpec::HasBitSetters::resources(const JobPodSpec* msg) { +Scheduler::HasBitSetters::resources(const Scheduler* msg) { return *msg->resources_; } -void JobPodSpec::clear_resources() { +void Scheduler::clear_resources() { if (GetArenaNoVirtual() == nullptr && resources_ != nullptr) { delete resources_; } resources_ = nullptr; } #if !defined(_MSC_VER) || _MSC_VER >= 1900 -const int JobPodSpec::kImageFieldNumber; -const int JobPodSpec::kResourcesFieldNumber; +const int Scheduler::kImageFieldNumber; +const int Scheduler::kResourcesFieldNumber; #endif // !defined(_MSC_VER) || _MSC_VER >= 1900 -JobPodSpec::JobPodSpec() +Scheduler::Scheduler() : ::google::protobuf::Message(), _internal_metadata_(nullptr) { SharedCtor(); - // @@protoc_insertion_point(constructor:flyteidl.plugins.JobPodSpec) + // @@protoc_insertion_point(constructor:flyteidl.plugins.Scheduler) } -JobPodSpec::JobPodSpec(const JobPodSpec& from) +Scheduler::Scheduler(const Scheduler& from) : ::google::protobuf::Message(), _internal_metadata_(nullptr) { _internal_metadata_.MergeFrom(from._internal_metadata_); @@ -564,37 +564,37 @@ JobPodSpec::JobPodSpec(const JobPodSpec& from) } else { resources_ = nullptr; } - // @@protoc_insertion_point(copy_constructor:flyteidl.plugins.JobPodSpec) + // @@protoc_insertion_point(copy_constructor:flyteidl.plugins.Scheduler) } -void JobPodSpec::SharedCtor() { +void Scheduler::SharedCtor() { ::google::protobuf::internal::InitSCC( - &scc_info_JobPodSpec_flyteidl_2fplugins_2fdask_2eproto.base); + &scc_info_Scheduler_flyteidl_2fplugins_2fdask_2eproto.base); image_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); resources_ = nullptr; } -JobPodSpec::~JobPodSpec() { - // @@protoc_insertion_point(destructor:flyteidl.plugins.JobPodSpec) +Scheduler::~Scheduler() { + // @@protoc_insertion_point(destructor:flyteidl.plugins.Scheduler) SharedDtor(); } -void JobPodSpec::SharedDtor() { +void Scheduler::SharedDtor() { image_.DestroyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); if (this != internal_default_instance()) delete resources_; } -void JobPodSpec::SetCachedSize(int size) const { +void Scheduler::SetCachedSize(int size) const { _cached_size_.Set(size); } -const JobPodSpec& JobPodSpec::default_instance() { - ::google::protobuf::internal::InitSCC(&::scc_info_JobPodSpec_flyteidl_2fplugins_2fdask_2eproto.base); +const Scheduler& Scheduler::default_instance() { + ::google::protobuf::internal::InitSCC(&::scc_info_Scheduler_flyteidl_2fplugins_2fdask_2eproto.base); return *internal_default_instance(); } -void JobPodSpec::Clear() { -// @@protoc_insertion_point(message_clear_start:flyteidl.plugins.JobPodSpec) +void Scheduler::Clear() { +// @@protoc_insertion_point(message_clear_start:flyteidl.plugins.Scheduler) ::google::protobuf::uint32 cached_has_bits = 0; // Prevent compiler warnings about cached_has_bits being unused (void) cached_has_bits; @@ -608,9 +608,9 @@ void JobPodSpec::Clear() { } #if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -const char* JobPodSpec::_InternalParse(const char* begin, const char* end, void* object, +const char* Scheduler::_InternalParse(const char* begin, const char* end, void* object, ::google::protobuf::internal::ParseContext* ctx) { - auto msg = static_cast(object); + auto msg = static_cast(object); ::google::protobuf::int32 size; (void)size; int depth; (void)depth; ::google::protobuf::uint32 tag; @@ -625,7 +625,7 @@ const char* JobPodSpec::_InternalParse(const char* begin, const char* end, void* if (static_cast<::google::protobuf::uint8>(tag) != 10) goto handle_unusual; ptr = ::google::protobuf::io::ReadSize(ptr, &size); GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); - ctx->extra_parse_data().SetFieldName("flyteidl.plugins.JobPodSpec.image"); + ctx->extra_parse_data().SetFieldName("flyteidl.plugins.Scheduler.image"); object = msg->mutable_image(); if (size > end - ptr + ::google::protobuf::internal::ParseContext::kSlopBytes) { parser_till_end = ::google::protobuf::internal::GreedyStringParserUTF8; @@ -673,11 +673,11 @@ const char* JobPodSpec::_InternalParse(const char* begin, const char* end, void* {parser_till_end, object}, size); } #else // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -bool JobPodSpec::MergePartialFromCodedStream( +bool Scheduler::MergePartialFromCodedStream( ::google::protobuf::io::CodedInputStream* input) { #define DO_(EXPRESSION) if (!PROTOBUF_PREDICT_TRUE(EXPRESSION)) goto failure ::google::protobuf::uint32 tag; - // @@protoc_insertion_point(parse_start:flyteidl.plugins.JobPodSpec) + // @@protoc_insertion_point(parse_start:flyteidl.plugins.Scheduler) for (;;) { ::std::pair<::google::protobuf::uint32, bool> p = input->ReadTagWithCutoffNoLastTag(127u); tag = p.first; @@ -691,7 +691,7 @@ bool JobPodSpec::MergePartialFromCodedStream( DO_(::google::protobuf::internal::WireFormatLite::VerifyUtf8String( this->image().data(), static_cast(this->image().length()), ::google::protobuf::internal::WireFormatLite::PARSE, - "flyteidl.plugins.JobPodSpec.image")); + "flyteidl.plugins.Scheduler.image")); } else { goto handle_unusual; } @@ -721,18 +721,18 @@ bool JobPodSpec::MergePartialFromCodedStream( } } success: - // @@protoc_insertion_point(parse_success:flyteidl.plugins.JobPodSpec) + // @@protoc_insertion_point(parse_success:flyteidl.plugins.Scheduler) return true; failure: - // @@protoc_insertion_point(parse_failure:flyteidl.plugins.JobPodSpec) + // @@protoc_insertion_point(parse_failure:flyteidl.plugins.Scheduler) return false; #undef DO_ } #endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -void JobPodSpec::SerializeWithCachedSizes( +void Scheduler::SerializeWithCachedSizes( ::google::protobuf::io::CodedOutputStream* output) const { - // @@protoc_insertion_point(serialize_start:flyteidl.plugins.JobPodSpec) + // @@protoc_insertion_point(serialize_start:flyteidl.plugins.Scheduler) ::google::protobuf::uint32 cached_has_bits = 0; (void) cached_has_bits; @@ -741,7 +741,7 @@ void JobPodSpec::SerializeWithCachedSizes( ::google::protobuf::internal::WireFormatLite::VerifyUtf8String( this->image().data(), static_cast(this->image().length()), ::google::protobuf::internal::WireFormatLite::SERIALIZE, - "flyteidl.plugins.JobPodSpec.image"); + "flyteidl.plugins.Scheduler.image"); ::google::protobuf::internal::WireFormatLite::WriteStringMaybeAliased( 1, this->image(), output); } @@ -756,12 +756,12 @@ void JobPodSpec::SerializeWithCachedSizes( ::google::protobuf::internal::WireFormat::SerializeUnknownFields( _internal_metadata_.unknown_fields(), output); } - // @@protoc_insertion_point(serialize_end:flyteidl.plugins.JobPodSpec) + // @@protoc_insertion_point(serialize_end:flyteidl.plugins.Scheduler) } -::google::protobuf::uint8* JobPodSpec::InternalSerializeWithCachedSizesToArray( +::google::protobuf::uint8* Scheduler::InternalSerializeWithCachedSizesToArray( ::google::protobuf::uint8* target) const { - // @@protoc_insertion_point(serialize_to_array_start:flyteidl.plugins.JobPodSpec) + // @@protoc_insertion_point(serialize_to_array_start:flyteidl.plugins.Scheduler) ::google::protobuf::uint32 cached_has_bits = 0; (void) cached_has_bits; @@ -770,7 +770,7 @@ ::google::protobuf::uint8* JobPodSpec::InternalSerializeWithCachedSizesToArray( ::google::protobuf::internal::WireFormatLite::VerifyUtf8String( this->image().data(), static_cast(this->image().length()), ::google::protobuf::internal::WireFormatLite::SERIALIZE, - "flyteidl.plugins.JobPodSpec.image"); + "flyteidl.plugins.Scheduler.image"); target = ::google::protobuf::internal::WireFormatLite::WriteStringToArray( 1, this->image(), target); @@ -787,12 +787,12 @@ ::google::protobuf::uint8* JobPodSpec::InternalSerializeWithCachedSizesToArray( target = ::google::protobuf::internal::WireFormat::SerializeUnknownFieldsToArray( _internal_metadata_.unknown_fields(), target); } - // @@protoc_insertion_point(serialize_to_array_end:flyteidl.plugins.JobPodSpec) + // @@protoc_insertion_point(serialize_to_array_end:flyteidl.plugins.Scheduler) return target; } -size_t JobPodSpec::ByteSizeLong() const { -// @@protoc_insertion_point(message_byte_size_start:flyteidl.plugins.JobPodSpec) +size_t Scheduler::ByteSizeLong() const { +// @@protoc_insertion_point(message_byte_size_start:flyteidl.plugins.Scheduler) size_t total_size = 0; if (_internal_metadata_.have_unknown_fields()) { @@ -823,23 +823,23 @@ size_t JobPodSpec::ByteSizeLong() const { return total_size; } -void JobPodSpec::MergeFrom(const ::google::protobuf::Message& from) { -// @@protoc_insertion_point(generalized_merge_from_start:flyteidl.plugins.JobPodSpec) +void Scheduler::MergeFrom(const ::google::protobuf::Message& from) { +// @@protoc_insertion_point(generalized_merge_from_start:flyteidl.plugins.Scheduler) GOOGLE_DCHECK_NE(&from, this); - const JobPodSpec* source = - ::google::protobuf::DynamicCastToGenerated( + const Scheduler* source = + ::google::protobuf::DynamicCastToGenerated( &from); if (source == nullptr) { - // @@protoc_insertion_point(generalized_merge_from_cast_fail:flyteidl.plugins.JobPodSpec) + // @@protoc_insertion_point(generalized_merge_from_cast_fail:flyteidl.plugins.Scheduler) ::google::protobuf::internal::ReflectionOps::Merge(from, this); } else { - // @@protoc_insertion_point(generalized_merge_from_cast_success:flyteidl.plugins.JobPodSpec) + // @@protoc_insertion_point(generalized_merge_from_cast_success:flyteidl.plugins.Scheduler) MergeFrom(*source); } } -void JobPodSpec::MergeFrom(const JobPodSpec& from) { -// @@protoc_insertion_point(class_specific_merge_from_start:flyteidl.plugins.JobPodSpec) +void Scheduler::MergeFrom(const Scheduler& from) { +// @@protoc_insertion_point(class_specific_merge_from_start:flyteidl.plugins.Scheduler) GOOGLE_DCHECK_NE(&from, this); _internal_metadata_.MergeFrom(from._internal_metadata_); ::google::protobuf::uint32 cached_has_bits = 0; @@ -854,29 +854,29 @@ void JobPodSpec::MergeFrom(const JobPodSpec& from) { } } -void JobPodSpec::CopyFrom(const ::google::protobuf::Message& from) { -// @@protoc_insertion_point(generalized_copy_from_start:flyteidl.plugins.JobPodSpec) +void Scheduler::CopyFrom(const ::google::protobuf::Message& from) { +// @@protoc_insertion_point(generalized_copy_from_start:flyteidl.plugins.Scheduler) if (&from == this) return; Clear(); MergeFrom(from); } -void JobPodSpec::CopyFrom(const JobPodSpec& from) { -// @@protoc_insertion_point(class_specific_copy_from_start:flyteidl.plugins.JobPodSpec) +void Scheduler::CopyFrom(const Scheduler& from) { +// @@protoc_insertion_point(class_specific_copy_from_start:flyteidl.plugins.Scheduler) if (&from == this) return; Clear(); MergeFrom(from); } -bool JobPodSpec::IsInitialized() const { +bool Scheduler::IsInitialized() const { return true; } -void JobPodSpec::Swap(JobPodSpec* other) { +void Scheduler::Swap(Scheduler* other) { if (other == this) return; InternalSwap(other); } -void JobPodSpec::InternalSwap(JobPodSpec* other) { +void Scheduler::InternalSwap(Scheduler* other) { using std::swap; _internal_metadata_.Swap(&other->_internal_metadata_); image_.Swap(&other->image_, &::google::protobuf::internal::GetEmptyStringAlreadyInited(), @@ -884,7 +884,7 @@ void JobPodSpec::InternalSwap(JobPodSpec* other) { swap(resources_, other->resources_); } -::google::protobuf::Metadata JobPodSpec::GetMetadata() const { +::google::protobuf::Metadata Scheduler::GetMetadata() const { ::google::protobuf::internal::AssignDescriptors(&::assign_descriptors_table_flyteidl_2fplugins_2fdask_2eproto); return ::file_level_metadata_flyteidl_2fplugins_2fdask_2eproto[kIndexInFileMessages]; } @@ -892,37 +892,37 @@ ::google::protobuf::Metadata JobPodSpec::GetMetadata() const { // =================================================================== -void DaskCluster::InitAsDefaultInstance() { - ::flyteidl::plugins::_DaskCluster_default_instance_._instance.get_mutable()->resources_ = const_cast< ::flyteidl::core::Resources*>( +void WorkerGroup::InitAsDefaultInstance() { + ::flyteidl::plugins::_WorkerGroup_default_instance_._instance.get_mutable()->resources_ = const_cast< ::flyteidl::core::Resources*>( ::flyteidl::core::Resources::internal_default_instance()); } -class DaskCluster::HasBitSetters { +class WorkerGroup::HasBitSetters { public: - static const ::flyteidl::core::Resources& resources(const DaskCluster* msg); + static const ::flyteidl::core::Resources& resources(const WorkerGroup* msg); }; const ::flyteidl::core::Resources& -DaskCluster::HasBitSetters::resources(const DaskCluster* msg) { +WorkerGroup::HasBitSetters::resources(const WorkerGroup* msg) { return *msg->resources_; } -void DaskCluster::clear_resources() { +void WorkerGroup::clear_resources() { if (GetArenaNoVirtual() == nullptr && resources_ != nullptr) { delete resources_; } resources_ = nullptr; } #if !defined(_MSC_VER) || _MSC_VER >= 1900 -const int DaskCluster::kImageFieldNumber; -const int DaskCluster::kNWorkersFieldNumber; -const int DaskCluster::kResourcesFieldNumber; +const int WorkerGroup::kNumberOfWorkersFieldNumber; +const int WorkerGroup::kImageFieldNumber; +const int WorkerGroup::kResourcesFieldNumber; #endif // !defined(_MSC_VER) || _MSC_VER >= 1900 -DaskCluster::DaskCluster() +WorkerGroup::WorkerGroup() : ::google::protobuf::Message(), _internal_metadata_(nullptr) { SharedCtor(); - // @@protoc_insertion_point(constructor:flyteidl.plugins.DaskCluster) + // @@protoc_insertion_point(constructor:flyteidl.plugins.WorkerGroup) } -DaskCluster::DaskCluster(const DaskCluster& from) +WorkerGroup::WorkerGroup(const WorkerGroup& from) : ::google::protobuf::Message(), _internal_metadata_(nullptr) { _internal_metadata_.MergeFrom(from._internal_metadata_); @@ -935,40 +935,40 @@ DaskCluster::DaskCluster(const DaskCluster& from) } else { resources_ = nullptr; } - nworkers_ = from.nworkers_; - // @@protoc_insertion_point(copy_constructor:flyteidl.plugins.DaskCluster) + number_of_workers_ = from.number_of_workers_; + // @@protoc_insertion_point(copy_constructor:flyteidl.plugins.WorkerGroup) } -void DaskCluster::SharedCtor() { +void WorkerGroup::SharedCtor() { ::google::protobuf::internal::InitSCC( - &scc_info_DaskCluster_flyteidl_2fplugins_2fdask_2eproto.base); + &scc_info_WorkerGroup_flyteidl_2fplugins_2fdask_2eproto.base); image_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); ::memset(&resources_, 0, static_cast( - reinterpret_cast(&nworkers_) - - reinterpret_cast(&resources_)) + sizeof(nworkers_)); + reinterpret_cast(&number_of_workers_) - + reinterpret_cast(&resources_)) + sizeof(number_of_workers_)); } -DaskCluster::~DaskCluster() { - // @@protoc_insertion_point(destructor:flyteidl.plugins.DaskCluster) +WorkerGroup::~WorkerGroup() { + // @@protoc_insertion_point(destructor:flyteidl.plugins.WorkerGroup) SharedDtor(); } -void DaskCluster::SharedDtor() { +void WorkerGroup::SharedDtor() { image_.DestroyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); if (this != internal_default_instance()) delete resources_; } -void DaskCluster::SetCachedSize(int size) const { +void WorkerGroup::SetCachedSize(int size) const { _cached_size_.Set(size); } -const DaskCluster& DaskCluster::default_instance() { - ::google::protobuf::internal::InitSCC(&::scc_info_DaskCluster_flyteidl_2fplugins_2fdask_2eproto.base); +const WorkerGroup& WorkerGroup::default_instance() { + ::google::protobuf::internal::InitSCC(&::scc_info_WorkerGroup_flyteidl_2fplugins_2fdask_2eproto.base); return *internal_default_instance(); } -void DaskCluster::Clear() { -// @@protoc_insertion_point(message_clear_start:flyteidl.plugins.DaskCluster) +void WorkerGroup::Clear() { +// @@protoc_insertion_point(message_clear_start:flyteidl.plugins.WorkerGroup) ::google::protobuf::uint32 cached_has_bits = 0; // Prevent compiler warnings about cached_has_bits being unused (void) cached_has_bits; @@ -978,14 +978,14 @@ void DaskCluster::Clear() { delete resources_; } resources_ = nullptr; - nworkers_ = 0; + number_of_workers_ = 0u; _internal_metadata_.Clear(); } #if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -const char* DaskCluster::_InternalParse(const char* begin, const char* end, void* object, +const char* WorkerGroup::_InternalParse(const char* begin, const char* end, void* object, ::google::protobuf::internal::ParseContext* ctx) { - auto msg = static_cast(object); + auto msg = static_cast(object); ::google::protobuf::int32 size; (void)size; int depth; (void)depth; ::google::protobuf::uint32 tag; @@ -995,12 +995,19 @@ const char* DaskCluster::_InternalParse(const char* begin, const char* end, void ptr = ::google::protobuf::io::Parse32(ptr, &tag); GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); switch (tag >> 3) { - // string image = 1; + // uint32 number_of_workers = 1; case 1: { - if (static_cast<::google::protobuf::uint8>(tag) != 10) goto handle_unusual; + if (static_cast<::google::protobuf::uint8>(tag) != 8) goto handle_unusual; + msg->set_number_of_workers(::google::protobuf::internal::ReadVarint(&ptr)); + GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); + break; + } + // string image = 2; + case 2: { + if (static_cast<::google::protobuf::uint8>(tag) != 18) goto handle_unusual; ptr = ::google::protobuf::io::ReadSize(ptr, &size); GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); - ctx->extra_parse_data().SetFieldName("flyteidl.plugins.DaskCluster.image"); + ctx->extra_parse_data().SetFieldName("flyteidl.plugins.WorkerGroup.image"); object = msg->mutable_image(); if (size > end - ptr + ::google::protobuf::internal::ParseContext::kSlopBytes) { parser_till_end = ::google::protobuf::internal::GreedyStringParserUTF8; @@ -1011,13 +1018,6 @@ const char* DaskCluster::_InternalParse(const char* begin, const char* end, void ptr += size; break; } - // int32 nWorkers = 2; - case 2: { - if (static_cast<::google::protobuf::uint8>(tag) != 16) goto handle_unusual; - msg->set_nworkers(::google::protobuf::internal::ReadVarint(&ptr)); - GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); - break; - } // .flyteidl.core.Resources resources = 3; case 3: { if (static_cast<::google::protobuf::uint8>(tag) != 26) goto handle_unusual; @@ -1055,38 +1055,38 @@ const char* DaskCluster::_InternalParse(const char* begin, const char* end, void {parser_till_end, object}, size); } #else // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -bool DaskCluster::MergePartialFromCodedStream( +bool WorkerGroup::MergePartialFromCodedStream( ::google::protobuf::io::CodedInputStream* input) { #define DO_(EXPRESSION) if (!PROTOBUF_PREDICT_TRUE(EXPRESSION)) goto failure ::google::protobuf::uint32 tag; - // @@protoc_insertion_point(parse_start:flyteidl.plugins.DaskCluster) + // @@protoc_insertion_point(parse_start:flyteidl.plugins.WorkerGroup) for (;;) { ::std::pair<::google::protobuf::uint32, bool> p = input->ReadTagWithCutoffNoLastTag(127u); tag = p.first; if (!p.second) goto handle_unusual; switch (::google::protobuf::internal::WireFormatLite::GetTagFieldNumber(tag)) { - // string image = 1; + // uint32 number_of_workers = 1; case 1: { - if (static_cast< ::google::protobuf::uint8>(tag) == (10 & 0xFF)) { - DO_(::google::protobuf::internal::WireFormatLite::ReadString( - input, this->mutable_image())); - DO_(::google::protobuf::internal::WireFormatLite::VerifyUtf8String( - this->image().data(), static_cast(this->image().length()), - ::google::protobuf::internal::WireFormatLite::PARSE, - "flyteidl.plugins.DaskCluster.image")); + if (static_cast< ::google::protobuf::uint8>(tag) == (8 & 0xFF)) { + + DO_((::google::protobuf::internal::WireFormatLite::ReadPrimitive< + ::google::protobuf::uint32, ::google::protobuf::internal::WireFormatLite::TYPE_UINT32>( + input, &number_of_workers_))); } else { goto handle_unusual; } break; } - // int32 nWorkers = 2; + // string image = 2; case 2: { - if (static_cast< ::google::protobuf::uint8>(tag) == (16 & 0xFF)) { - - DO_((::google::protobuf::internal::WireFormatLite::ReadPrimitive< - ::google::protobuf::int32, ::google::protobuf::internal::WireFormatLite::TYPE_INT32>( - input, &nworkers_))); + if (static_cast< ::google::protobuf::uint8>(tag) == (18 & 0xFF)) { + DO_(::google::protobuf::internal::WireFormatLite::ReadString( + input, this->mutable_image())); + DO_(::google::protobuf::internal::WireFormatLite::VerifyUtf8String( + this->image().data(), static_cast(this->image().length()), + ::google::protobuf::internal::WireFormatLite::PARSE, + "flyteidl.plugins.WorkerGroup.image")); } else { goto handle_unusual; } @@ -1116,34 +1116,34 @@ bool DaskCluster::MergePartialFromCodedStream( } } success: - // @@protoc_insertion_point(parse_success:flyteidl.plugins.DaskCluster) + // @@protoc_insertion_point(parse_success:flyteidl.plugins.WorkerGroup) return true; failure: - // @@protoc_insertion_point(parse_failure:flyteidl.plugins.DaskCluster) + // @@protoc_insertion_point(parse_failure:flyteidl.plugins.WorkerGroup) return false; #undef DO_ } #endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -void DaskCluster::SerializeWithCachedSizes( +void WorkerGroup::SerializeWithCachedSizes( ::google::protobuf::io::CodedOutputStream* output) const { - // @@protoc_insertion_point(serialize_start:flyteidl.plugins.DaskCluster) + // @@protoc_insertion_point(serialize_start:flyteidl.plugins.WorkerGroup) ::google::protobuf::uint32 cached_has_bits = 0; (void) cached_has_bits; - // string image = 1; + // uint32 number_of_workers = 1; + if (this->number_of_workers() != 0) { + ::google::protobuf::internal::WireFormatLite::WriteUInt32(1, this->number_of_workers(), output); + } + + // string image = 2; if (this->image().size() > 0) { ::google::protobuf::internal::WireFormatLite::VerifyUtf8String( this->image().data(), static_cast(this->image().length()), ::google::protobuf::internal::WireFormatLite::SERIALIZE, - "flyteidl.plugins.DaskCluster.image"); + "flyteidl.plugins.WorkerGroup.image"); ::google::protobuf::internal::WireFormatLite::WriteStringMaybeAliased( - 1, this->image(), output); - } - - // int32 nWorkers = 2; - if (this->nworkers() != 0) { - ::google::protobuf::internal::WireFormatLite::WriteInt32(2, this->nworkers(), output); + 2, this->image(), output); } // .flyteidl.core.Resources resources = 3; @@ -1156,29 +1156,29 @@ void DaskCluster::SerializeWithCachedSizes( ::google::protobuf::internal::WireFormat::SerializeUnknownFields( _internal_metadata_.unknown_fields(), output); } - // @@protoc_insertion_point(serialize_end:flyteidl.plugins.DaskCluster) + // @@protoc_insertion_point(serialize_end:flyteidl.plugins.WorkerGroup) } -::google::protobuf::uint8* DaskCluster::InternalSerializeWithCachedSizesToArray( +::google::protobuf::uint8* WorkerGroup::InternalSerializeWithCachedSizesToArray( ::google::protobuf::uint8* target) const { - // @@protoc_insertion_point(serialize_to_array_start:flyteidl.plugins.DaskCluster) + // @@protoc_insertion_point(serialize_to_array_start:flyteidl.plugins.WorkerGroup) ::google::protobuf::uint32 cached_has_bits = 0; (void) cached_has_bits; - // string image = 1; + // uint32 number_of_workers = 1; + if (this->number_of_workers() != 0) { + target = ::google::protobuf::internal::WireFormatLite::WriteUInt32ToArray(1, this->number_of_workers(), target); + } + + // string image = 2; if (this->image().size() > 0) { ::google::protobuf::internal::WireFormatLite::VerifyUtf8String( this->image().data(), static_cast(this->image().length()), ::google::protobuf::internal::WireFormatLite::SERIALIZE, - "flyteidl.plugins.DaskCluster.image"); + "flyteidl.plugins.WorkerGroup.image"); target = ::google::protobuf::internal::WireFormatLite::WriteStringToArray( - 1, this->image(), target); - } - - // int32 nWorkers = 2; - if (this->nworkers() != 0) { - target = ::google::protobuf::internal::WireFormatLite::WriteInt32ToArray(2, this->nworkers(), target); + 2, this->image(), target); } // .flyteidl.core.Resources resources = 3; @@ -1192,12 +1192,12 @@ ::google::protobuf::uint8* DaskCluster::InternalSerializeWithCachedSizesToArray( target = ::google::protobuf::internal::WireFormat::SerializeUnknownFieldsToArray( _internal_metadata_.unknown_fields(), target); } - // @@protoc_insertion_point(serialize_to_array_end:flyteidl.plugins.DaskCluster) + // @@protoc_insertion_point(serialize_to_array_end:flyteidl.plugins.WorkerGroup) return target; } -size_t DaskCluster::ByteSizeLong() const { -// @@protoc_insertion_point(message_byte_size_start:flyteidl.plugins.DaskCluster) +size_t WorkerGroup::ByteSizeLong() const { +// @@protoc_insertion_point(message_byte_size_start:flyteidl.plugins.WorkerGroup) size_t total_size = 0; if (_internal_metadata_.have_unknown_fields()) { @@ -1209,7 +1209,7 @@ size_t DaskCluster::ByteSizeLong() const { // Prevent compiler warnings about cached_has_bits being unused (void) cached_has_bits; - // string image = 1; + // string image = 2; if (this->image().size() > 0) { total_size += 1 + ::google::protobuf::internal::WireFormatLite::StringSize( @@ -1223,11 +1223,11 @@ size_t DaskCluster::ByteSizeLong() const { *resources_); } - // int32 nWorkers = 2; - if (this->nworkers() != 0) { + // uint32 number_of_workers = 1; + if (this->number_of_workers() != 0) { total_size += 1 + - ::google::protobuf::internal::WireFormatLite::Int32Size( - this->nworkers()); + ::google::protobuf::internal::WireFormatLite::UInt32Size( + this->number_of_workers()); } int cached_size = ::google::protobuf::internal::ToCachedSize(total_size); @@ -1235,23 +1235,23 @@ size_t DaskCluster::ByteSizeLong() const { return total_size; } -void DaskCluster::MergeFrom(const ::google::protobuf::Message& from) { -// @@protoc_insertion_point(generalized_merge_from_start:flyteidl.plugins.DaskCluster) +void WorkerGroup::MergeFrom(const ::google::protobuf::Message& from) { +// @@protoc_insertion_point(generalized_merge_from_start:flyteidl.plugins.WorkerGroup) GOOGLE_DCHECK_NE(&from, this); - const DaskCluster* source = - ::google::protobuf::DynamicCastToGenerated( + const WorkerGroup* source = + ::google::protobuf::DynamicCastToGenerated( &from); if (source == nullptr) { - // @@protoc_insertion_point(generalized_merge_from_cast_fail:flyteidl.plugins.DaskCluster) + // @@protoc_insertion_point(generalized_merge_from_cast_fail:flyteidl.plugins.WorkerGroup) ::google::protobuf::internal::ReflectionOps::Merge(from, this); } else { - // @@protoc_insertion_point(generalized_merge_from_cast_success:flyteidl.plugins.DaskCluster) + // @@protoc_insertion_point(generalized_merge_from_cast_success:flyteidl.plugins.WorkerGroup) MergeFrom(*source); } } -void DaskCluster::MergeFrom(const DaskCluster& from) { -// @@protoc_insertion_point(class_specific_merge_from_start:flyteidl.plugins.DaskCluster) +void WorkerGroup::MergeFrom(const WorkerGroup& from) { +// @@protoc_insertion_point(class_specific_merge_from_start:flyteidl.plugins.WorkerGroup) GOOGLE_DCHECK_NE(&from, this); _internal_metadata_.MergeFrom(from._internal_metadata_); ::google::protobuf::uint32 cached_has_bits = 0; @@ -1264,43 +1264,43 @@ void DaskCluster::MergeFrom(const DaskCluster& from) { if (from.has_resources()) { mutable_resources()->::flyteidl::core::Resources::MergeFrom(from.resources()); } - if (from.nworkers() != 0) { - set_nworkers(from.nworkers()); + if (from.number_of_workers() != 0) { + set_number_of_workers(from.number_of_workers()); } } -void DaskCluster::CopyFrom(const ::google::protobuf::Message& from) { -// @@protoc_insertion_point(generalized_copy_from_start:flyteidl.plugins.DaskCluster) +void WorkerGroup::CopyFrom(const ::google::protobuf::Message& from) { +// @@protoc_insertion_point(generalized_copy_from_start:flyteidl.plugins.WorkerGroup) if (&from == this) return; Clear(); MergeFrom(from); } -void DaskCluster::CopyFrom(const DaskCluster& from) { -// @@protoc_insertion_point(class_specific_copy_from_start:flyteidl.plugins.DaskCluster) +void WorkerGroup::CopyFrom(const WorkerGroup& from) { +// @@protoc_insertion_point(class_specific_copy_from_start:flyteidl.plugins.WorkerGroup) if (&from == this) return; Clear(); MergeFrom(from); } -bool DaskCluster::IsInitialized() const { +bool WorkerGroup::IsInitialized() const { return true; } -void DaskCluster::Swap(DaskCluster* other) { +void WorkerGroup::Swap(WorkerGroup* other) { if (other == this) return; InternalSwap(other); } -void DaskCluster::InternalSwap(DaskCluster* other) { +void WorkerGroup::InternalSwap(WorkerGroup* other) { using std::swap; _internal_metadata_.Swap(&other->_internal_metadata_); image_.Swap(&other->image_, &::google::protobuf::internal::GetEmptyStringAlreadyInited(), GetArenaNoVirtual()); swap(resources_, other->resources_); - swap(nworkers_, other->nworkers_); + swap(number_of_workers_, other->number_of_workers_); } -::google::protobuf::Metadata DaskCluster::GetMetadata() const { +::google::protobuf::Metadata WorkerGroup::GetMetadata() const { ::google::protobuf::internal::AssignDescriptors(&::assign_descriptors_table_flyteidl_2fplugins_2fdask_2eproto); return ::file_level_metadata_flyteidl_2fplugins_2fdask_2eproto[kIndexInFileMessages]; } @@ -1314,11 +1314,11 @@ namespace protobuf { template<> PROTOBUF_NOINLINE ::flyteidl::plugins::DaskJob* Arena::CreateMaybeMessage< ::flyteidl::plugins::DaskJob >(Arena* arena) { return Arena::CreateInternal< ::flyteidl::plugins::DaskJob >(arena); } -template<> PROTOBUF_NOINLINE ::flyteidl::plugins::JobPodSpec* Arena::CreateMaybeMessage< ::flyteidl::plugins::JobPodSpec >(Arena* arena) { - return Arena::CreateInternal< ::flyteidl::plugins::JobPodSpec >(arena); +template<> PROTOBUF_NOINLINE ::flyteidl::plugins::Scheduler* Arena::CreateMaybeMessage< ::flyteidl::plugins::Scheduler >(Arena* arena) { + return Arena::CreateInternal< ::flyteidl::plugins::Scheduler >(arena); } -template<> PROTOBUF_NOINLINE ::flyteidl::plugins::DaskCluster* Arena::CreateMaybeMessage< ::flyteidl::plugins::DaskCluster >(Arena* arena) { - return Arena::CreateInternal< ::flyteidl::plugins::DaskCluster >(arena); +template<> PROTOBUF_NOINLINE ::flyteidl::plugins::WorkerGroup* Arena::CreateMaybeMessage< ::flyteidl::plugins::WorkerGroup >(Arena* arena) { + return Arena::CreateInternal< ::flyteidl::plugins::WorkerGroup >(arena); } } // namespace protobuf } // namespace google diff --git a/gen/pb-cpp/flyteidl/plugins/dask.pb.h b/gen/pb-cpp/flyteidl/plugins/dask.pb.h index 295ad1c50..db6e1b56c 100644 --- a/gen/pb-cpp/flyteidl/plugins/dask.pb.h +++ b/gen/pb-cpp/flyteidl/plugins/dask.pb.h @@ -51,22 +51,22 @@ struct TableStruct_flyteidl_2fplugins_2fdask_2eproto { void AddDescriptors_flyteidl_2fplugins_2fdask_2eproto(); namespace flyteidl { namespace plugins { -class DaskCluster; -class DaskClusterDefaultTypeInternal; -extern DaskClusterDefaultTypeInternal _DaskCluster_default_instance_; class DaskJob; class DaskJobDefaultTypeInternal; extern DaskJobDefaultTypeInternal _DaskJob_default_instance_; -class JobPodSpec; -class JobPodSpecDefaultTypeInternal; -extern JobPodSpecDefaultTypeInternal _JobPodSpec_default_instance_; +class Scheduler; +class SchedulerDefaultTypeInternal; +extern SchedulerDefaultTypeInternal _Scheduler_default_instance_; +class WorkerGroup; +class WorkerGroupDefaultTypeInternal; +extern WorkerGroupDefaultTypeInternal _WorkerGroup_default_instance_; } // namespace plugins } // namespace flyteidl namespace google { namespace protobuf { -template<> ::flyteidl::plugins::DaskCluster* Arena::CreateMaybeMessage<::flyteidl::plugins::DaskCluster>(Arena*); template<> ::flyteidl::plugins::DaskJob* Arena::CreateMaybeMessage<::flyteidl::plugins::DaskJob>(Arena*); -template<> ::flyteidl::plugins::JobPodSpec* Arena::CreateMaybeMessage<::flyteidl::plugins::JobPodSpec>(Arena*); +template<> ::flyteidl::plugins::Scheduler* Arena::CreateMaybeMessage<::flyteidl::plugins::Scheduler>(Arena*); +template<> ::flyteidl::plugins::WorkerGroup* Arena::CreateMaybeMessage<::flyteidl::plugins::WorkerGroup>(Arena*); } // namespace protobuf } // namespace google namespace flyteidl { @@ -169,55 +169,55 @@ class DaskJob final : // accessors ------------------------------------------------------- - // .flyteidl.plugins.JobPodSpec jobPodSpec = 1; - bool has_jobpodspec() const; - void clear_jobpodspec(); - static const int kJobPodSpecFieldNumber = 1; - const ::flyteidl::plugins::JobPodSpec& jobpodspec() const; - ::flyteidl::plugins::JobPodSpec* release_jobpodspec(); - ::flyteidl::plugins::JobPodSpec* mutable_jobpodspec(); - void set_allocated_jobpodspec(::flyteidl::plugins::JobPodSpec* jobpodspec); - - // .flyteidl.plugins.DaskCluster cluster = 2; - bool has_cluster() const; - void clear_cluster(); - static const int kClusterFieldNumber = 2; - const ::flyteidl::plugins::DaskCluster& cluster() const; - ::flyteidl::plugins::DaskCluster* release_cluster(); - ::flyteidl::plugins::DaskCluster* mutable_cluster(); - void set_allocated_cluster(::flyteidl::plugins::DaskCluster* cluster); + // .flyteidl.plugins.Scheduler scheduler = 1; + bool has_scheduler() const; + void clear_scheduler(); + static const int kSchedulerFieldNumber = 1; + const ::flyteidl::plugins::Scheduler& scheduler() const; + ::flyteidl::plugins::Scheduler* release_scheduler(); + ::flyteidl::plugins::Scheduler* mutable_scheduler(); + void set_allocated_scheduler(::flyteidl::plugins::Scheduler* scheduler); + + // .flyteidl.plugins.WorkerGroup workers = 2; + bool has_workers() const; + void clear_workers(); + static const int kWorkersFieldNumber = 2; + const ::flyteidl::plugins::WorkerGroup& workers() const; + ::flyteidl::plugins::WorkerGroup* release_workers(); + ::flyteidl::plugins::WorkerGroup* mutable_workers(); + void set_allocated_workers(::flyteidl::plugins::WorkerGroup* workers); // @@protoc_insertion_point(class_scope:flyteidl.plugins.DaskJob) private: class HasBitSetters; ::google::protobuf::internal::InternalMetadataWithArena _internal_metadata_; - ::flyteidl::plugins::JobPodSpec* jobpodspec_; - ::flyteidl::plugins::DaskCluster* cluster_; + ::flyteidl::plugins::Scheduler* scheduler_; + ::flyteidl::plugins::WorkerGroup* workers_; mutable ::google::protobuf::internal::CachedSize _cached_size_; friend struct ::TableStruct_flyteidl_2fplugins_2fdask_2eproto; }; // ------------------------------------------------------------------- -class JobPodSpec final : - public ::google::protobuf::Message /* @@protoc_insertion_point(class_definition:flyteidl.plugins.JobPodSpec) */ { +class Scheduler final : + public ::google::protobuf::Message /* @@protoc_insertion_point(class_definition:flyteidl.plugins.Scheduler) */ { public: - JobPodSpec(); - virtual ~JobPodSpec(); + Scheduler(); + virtual ~Scheduler(); - JobPodSpec(const JobPodSpec& from); + Scheduler(const Scheduler& from); - inline JobPodSpec& operator=(const JobPodSpec& from) { + inline Scheduler& operator=(const Scheduler& from) { CopyFrom(from); return *this; } #if LANG_CXX11 - JobPodSpec(JobPodSpec&& from) noexcept - : JobPodSpec() { + Scheduler(Scheduler&& from) noexcept + : Scheduler() { *this = ::std::move(from); } - inline JobPodSpec& operator=(JobPodSpec&& from) noexcept { + inline Scheduler& operator=(Scheduler&& from) noexcept { if (GetArenaNoVirtual() == from.GetArenaNoVirtual()) { if (this != &from) InternalSwap(&from); } else { @@ -229,34 +229,34 @@ class JobPodSpec final : static const ::google::protobuf::Descriptor* descriptor() { return default_instance().GetDescriptor(); } - static const JobPodSpec& default_instance(); + static const Scheduler& default_instance(); static void InitAsDefaultInstance(); // FOR INTERNAL USE ONLY - static inline const JobPodSpec* internal_default_instance() { - return reinterpret_cast( - &_JobPodSpec_default_instance_); + static inline const Scheduler* internal_default_instance() { + return reinterpret_cast( + &_Scheduler_default_instance_); } static constexpr int kIndexInFileMessages = 1; - void Swap(JobPodSpec* other); - friend void swap(JobPodSpec& a, JobPodSpec& b) { + void Swap(Scheduler* other); + friend void swap(Scheduler& a, Scheduler& b) { a.Swap(&b); } // implements Message ---------------------------------------------- - inline JobPodSpec* New() const final { - return CreateMaybeMessage(nullptr); + inline Scheduler* New() const final { + return CreateMaybeMessage(nullptr); } - JobPodSpec* New(::google::protobuf::Arena* arena) const final { - return CreateMaybeMessage(arena); + Scheduler* New(::google::protobuf::Arena* arena) const final { + return CreateMaybeMessage(arena); } void CopyFrom(const ::google::protobuf::Message& from) final; void MergeFrom(const ::google::protobuf::Message& from) final; - void CopyFrom(const JobPodSpec& from); - void MergeFrom(const JobPodSpec& from); + void CopyFrom(const Scheduler& from); + void MergeFrom(const Scheduler& from); PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final; bool IsInitialized() const final; @@ -278,7 +278,7 @@ class JobPodSpec final : void SharedCtor(); void SharedDtor(); void SetCachedSize(int size) const final; - void InternalSwap(JobPodSpec* other); + void InternalSwap(Scheduler* other); private: inline ::google::protobuf::Arena* GetArenaNoVirtual() const { return nullptr; @@ -317,7 +317,7 @@ class JobPodSpec final : ::flyteidl::core::Resources* mutable_resources(); void set_allocated_resources(::flyteidl::core::Resources* resources); - // @@protoc_insertion_point(class_scope:flyteidl.plugins.JobPodSpec) + // @@protoc_insertion_point(class_scope:flyteidl.plugins.Scheduler) private: class HasBitSetters; @@ -329,25 +329,25 @@ class JobPodSpec final : }; // ------------------------------------------------------------------- -class DaskCluster final : - public ::google::protobuf::Message /* @@protoc_insertion_point(class_definition:flyteidl.plugins.DaskCluster) */ { +class WorkerGroup final : + public ::google::protobuf::Message /* @@protoc_insertion_point(class_definition:flyteidl.plugins.WorkerGroup) */ { public: - DaskCluster(); - virtual ~DaskCluster(); + WorkerGroup(); + virtual ~WorkerGroup(); - DaskCluster(const DaskCluster& from); + WorkerGroup(const WorkerGroup& from); - inline DaskCluster& operator=(const DaskCluster& from) { + inline WorkerGroup& operator=(const WorkerGroup& from) { CopyFrom(from); return *this; } #if LANG_CXX11 - DaskCluster(DaskCluster&& from) noexcept - : DaskCluster() { + WorkerGroup(WorkerGroup&& from) noexcept + : WorkerGroup() { *this = ::std::move(from); } - inline DaskCluster& operator=(DaskCluster&& from) noexcept { + inline WorkerGroup& operator=(WorkerGroup&& from) noexcept { if (GetArenaNoVirtual() == from.GetArenaNoVirtual()) { if (this != &from) InternalSwap(&from); } else { @@ -359,34 +359,34 @@ class DaskCluster final : static const ::google::protobuf::Descriptor* descriptor() { return default_instance().GetDescriptor(); } - static const DaskCluster& default_instance(); + static const WorkerGroup& default_instance(); static void InitAsDefaultInstance(); // FOR INTERNAL USE ONLY - static inline const DaskCluster* internal_default_instance() { - return reinterpret_cast( - &_DaskCluster_default_instance_); + static inline const WorkerGroup* internal_default_instance() { + return reinterpret_cast( + &_WorkerGroup_default_instance_); } static constexpr int kIndexInFileMessages = 2; - void Swap(DaskCluster* other); - friend void swap(DaskCluster& a, DaskCluster& b) { + void Swap(WorkerGroup* other); + friend void swap(WorkerGroup& a, WorkerGroup& b) { a.Swap(&b); } // implements Message ---------------------------------------------- - inline DaskCluster* New() const final { - return CreateMaybeMessage(nullptr); + inline WorkerGroup* New() const final { + return CreateMaybeMessage(nullptr); } - DaskCluster* New(::google::protobuf::Arena* arena) const final { - return CreateMaybeMessage(arena); + WorkerGroup* New(::google::protobuf::Arena* arena) const final { + return CreateMaybeMessage(arena); } void CopyFrom(const ::google::protobuf::Message& from) final; void MergeFrom(const ::google::protobuf::Message& from) final; - void CopyFrom(const DaskCluster& from); - void MergeFrom(const DaskCluster& from); + void CopyFrom(const WorkerGroup& from); + void MergeFrom(const WorkerGroup& from); PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final; bool IsInitialized() const final; @@ -408,7 +408,7 @@ class DaskCluster final : void SharedCtor(); void SharedDtor(); void SetCachedSize(int size) const final; - void InternalSwap(DaskCluster* other); + void InternalSwap(WorkerGroup* other); private: inline ::google::protobuf::Arena* GetArenaNoVirtual() const { return nullptr; @@ -424,9 +424,9 @@ class DaskCluster final : // accessors ------------------------------------------------------- - // string image = 1; + // string image = 2; void clear_image(); - static const int kImageFieldNumber = 1; + static const int kImageFieldNumber = 2; const ::std::string& image() const; void set_image(const ::std::string& value); #if LANG_CXX11 @@ -447,20 +447,20 @@ class DaskCluster final : ::flyteidl::core::Resources* mutable_resources(); void set_allocated_resources(::flyteidl::core::Resources* resources); - // int32 nWorkers = 2; - void clear_nworkers(); - static const int kNWorkersFieldNumber = 2; - ::google::protobuf::int32 nworkers() const; - void set_nworkers(::google::protobuf::int32 value); + // uint32 number_of_workers = 1; + void clear_number_of_workers(); + static const int kNumberOfWorkersFieldNumber = 1; + ::google::protobuf::uint32 number_of_workers() const; + void set_number_of_workers(::google::protobuf::uint32 value); - // @@protoc_insertion_point(class_scope:flyteidl.plugins.DaskCluster) + // @@protoc_insertion_point(class_scope:flyteidl.plugins.WorkerGroup) private: class HasBitSetters; ::google::protobuf::internal::InternalMetadataWithArena _internal_metadata_; ::google::protobuf::internal::ArenaStringPtr image_; ::flyteidl::core::Resources* resources_; - ::google::protobuf::int32 nworkers_; + ::google::protobuf::uint32 number_of_workers_; mutable ::google::protobuf::internal::CachedSize _cached_size_; friend struct ::TableStruct_flyteidl_2fplugins_2fdask_2eproto; }; @@ -475,192 +475,192 @@ class DaskCluster final : #endif // __GNUC__ // DaskJob -// .flyteidl.plugins.JobPodSpec jobPodSpec = 1; -inline bool DaskJob::has_jobpodspec() const { - return this != internal_default_instance() && jobpodspec_ != nullptr; +// .flyteidl.plugins.Scheduler scheduler = 1; +inline bool DaskJob::has_scheduler() const { + return this != internal_default_instance() && scheduler_ != nullptr; } -inline void DaskJob::clear_jobpodspec() { - if (GetArenaNoVirtual() == nullptr && jobpodspec_ != nullptr) { - delete jobpodspec_; +inline void DaskJob::clear_scheduler() { + if (GetArenaNoVirtual() == nullptr && scheduler_ != nullptr) { + delete scheduler_; } - jobpodspec_ = nullptr; + scheduler_ = nullptr; } -inline const ::flyteidl::plugins::JobPodSpec& DaskJob::jobpodspec() const { - const ::flyteidl::plugins::JobPodSpec* p = jobpodspec_; - // @@protoc_insertion_point(field_get:flyteidl.plugins.DaskJob.jobPodSpec) - return p != nullptr ? *p : *reinterpret_cast( - &::flyteidl::plugins::_JobPodSpec_default_instance_); +inline const ::flyteidl::plugins::Scheduler& DaskJob::scheduler() const { + const ::flyteidl::plugins::Scheduler* p = scheduler_; + // @@protoc_insertion_point(field_get:flyteidl.plugins.DaskJob.scheduler) + return p != nullptr ? *p : *reinterpret_cast( + &::flyteidl::plugins::_Scheduler_default_instance_); } -inline ::flyteidl::plugins::JobPodSpec* DaskJob::release_jobpodspec() { - // @@protoc_insertion_point(field_release:flyteidl.plugins.DaskJob.jobPodSpec) +inline ::flyteidl::plugins::Scheduler* DaskJob::release_scheduler() { + // @@protoc_insertion_point(field_release:flyteidl.plugins.DaskJob.scheduler) - ::flyteidl::plugins::JobPodSpec* temp = jobpodspec_; - jobpodspec_ = nullptr; + ::flyteidl::plugins::Scheduler* temp = scheduler_; + scheduler_ = nullptr; return temp; } -inline ::flyteidl::plugins::JobPodSpec* DaskJob::mutable_jobpodspec() { +inline ::flyteidl::plugins::Scheduler* DaskJob::mutable_scheduler() { - if (jobpodspec_ == nullptr) { - auto* p = CreateMaybeMessage<::flyteidl::plugins::JobPodSpec>(GetArenaNoVirtual()); - jobpodspec_ = p; + if (scheduler_ == nullptr) { + auto* p = CreateMaybeMessage<::flyteidl::plugins::Scheduler>(GetArenaNoVirtual()); + scheduler_ = p; } - // @@protoc_insertion_point(field_mutable:flyteidl.plugins.DaskJob.jobPodSpec) - return jobpodspec_; + // @@protoc_insertion_point(field_mutable:flyteidl.plugins.DaskJob.scheduler) + return scheduler_; } -inline void DaskJob::set_allocated_jobpodspec(::flyteidl::plugins::JobPodSpec* jobpodspec) { +inline void DaskJob::set_allocated_scheduler(::flyteidl::plugins::Scheduler* scheduler) { ::google::protobuf::Arena* message_arena = GetArenaNoVirtual(); if (message_arena == nullptr) { - delete jobpodspec_; + delete scheduler_; } - if (jobpodspec) { + if (scheduler) { ::google::protobuf::Arena* submessage_arena = nullptr; if (message_arena != submessage_arena) { - jobpodspec = ::google::protobuf::internal::GetOwnedMessage( - message_arena, jobpodspec, submessage_arena); + scheduler = ::google::protobuf::internal::GetOwnedMessage( + message_arena, scheduler, submessage_arena); } } else { } - jobpodspec_ = jobpodspec; - // @@protoc_insertion_point(field_set_allocated:flyteidl.plugins.DaskJob.jobPodSpec) + scheduler_ = scheduler; + // @@protoc_insertion_point(field_set_allocated:flyteidl.plugins.DaskJob.scheduler) } -// .flyteidl.plugins.DaskCluster cluster = 2; -inline bool DaskJob::has_cluster() const { - return this != internal_default_instance() && cluster_ != nullptr; +// .flyteidl.plugins.WorkerGroup workers = 2; +inline bool DaskJob::has_workers() const { + return this != internal_default_instance() && workers_ != nullptr; } -inline void DaskJob::clear_cluster() { - if (GetArenaNoVirtual() == nullptr && cluster_ != nullptr) { - delete cluster_; +inline void DaskJob::clear_workers() { + if (GetArenaNoVirtual() == nullptr && workers_ != nullptr) { + delete workers_; } - cluster_ = nullptr; + workers_ = nullptr; } -inline const ::flyteidl::plugins::DaskCluster& DaskJob::cluster() const { - const ::flyteidl::plugins::DaskCluster* p = cluster_; - // @@protoc_insertion_point(field_get:flyteidl.plugins.DaskJob.cluster) - return p != nullptr ? *p : *reinterpret_cast( - &::flyteidl::plugins::_DaskCluster_default_instance_); +inline const ::flyteidl::plugins::WorkerGroup& DaskJob::workers() const { + const ::flyteidl::plugins::WorkerGroup* p = workers_; + // @@protoc_insertion_point(field_get:flyteidl.plugins.DaskJob.workers) + return p != nullptr ? *p : *reinterpret_cast( + &::flyteidl::plugins::_WorkerGroup_default_instance_); } -inline ::flyteidl::plugins::DaskCluster* DaskJob::release_cluster() { - // @@protoc_insertion_point(field_release:flyteidl.plugins.DaskJob.cluster) +inline ::flyteidl::plugins::WorkerGroup* DaskJob::release_workers() { + // @@protoc_insertion_point(field_release:flyteidl.plugins.DaskJob.workers) - ::flyteidl::plugins::DaskCluster* temp = cluster_; - cluster_ = nullptr; + ::flyteidl::plugins::WorkerGroup* temp = workers_; + workers_ = nullptr; return temp; } -inline ::flyteidl::plugins::DaskCluster* DaskJob::mutable_cluster() { +inline ::flyteidl::plugins::WorkerGroup* DaskJob::mutable_workers() { - if (cluster_ == nullptr) { - auto* p = CreateMaybeMessage<::flyteidl::plugins::DaskCluster>(GetArenaNoVirtual()); - cluster_ = p; + if (workers_ == nullptr) { + auto* p = CreateMaybeMessage<::flyteidl::plugins::WorkerGroup>(GetArenaNoVirtual()); + workers_ = p; } - // @@protoc_insertion_point(field_mutable:flyteidl.plugins.DaskJob.cluster) - return cluster_; + // @@protoc_insertion_point(field_mutable:flyteidl.plugins.DaskJob.workers) + return workers_; } -inline void DaskJob::set_allocated_cluster(::flyteidl::plugins::DaskCluster* cluster) { +inline void DaskJob::set_allocated_workers(::flyteidl::plugins::WorkerGroup* workers) { ::google::protobuf::Arena* message_arena = GetArenaNoVirtual(); if (message_arena == nullptr) { - delete cluster_; + delete workers_; } - if (cluster) { + if (workers) { ::google::protobuf::Arena* submessage_arena = nullptr; if (message_arena != submessage_arena) { - cluster = ::google::protobuf::internal::GetOwnedMessage( - message_arena, cluster, submessage_arena); + workers = ::google::protobuf::internal::GetOwnedMessage( + message_arena, workers, submessage_arena); } } else { } - cluster_ = cluster; - // @@protoc_insertion_point(field_set_allocated:flyteidl.plugins.DaskJob.cluster) + workers_ = workers; + // @@protoc_insertion_point(field_set_allocated:flyteidl.plugins.DaskJob.workers) } // ------------------------------------------------------------------- -// JobPodSpec +// Scheduler // string image = 1; -inline void JobPodSpec::clear_image() { +inline void Scheduler::clear_image() { image_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); } -inline const ::std::string& JobPodSpec::image() const { - // @@protoc_insertion_point(field_get:flyteidl.plugins.JobPodSpec.image) +inline const ::std::string& Scheduler::image() const { + // @@protoc_insertion_point(field_get:flyteidl.plugins.Scheduler.image) return image_.GetNoArena(); } -inline void JobPodSpec::set_image(const ::std::string& value) { +inline void Scheduler::set_image(const ::std::string& value) { image_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), value); - // @@protoc_insertion_point(field_set:flyteidl.plugins.JobPodSpec.image) + // @@protoc_insertion_point(field_set:flyteidl.plugins.Scheduler.image) } #if LANG_CXX11 -inline void JobPodSpec::set_image(::std::string&& value) { +inline void Scheduler::set_image(::std::string&& value) { image_.SetNoArena( &::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); - // @@protoc_insertion_point(field_set_rvalue:flyteidl.plugins.JobPodSpec.image) + // @@protoc_insertion_point(field_set_rvalue:flyteidl.plugins.Scheduler.image) } #endif -inline void JobPodSpec::set_image(const char* value) { +inline void Scheduler::set_image(const char* value) { GOOGLE_DCHECK(value != nullptr); image_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); - // @@protoc_insertion_point(field_set_char:flyteidl.plugins.JobPodSpec.image) + // @@protoc_insertion_point(field_set_char:flyteidl.plugins.Scheduler.image) } -inline void JobPodSpec::set_image(const char* value, size_t size) { +inline void Scheduler::set_image(const char* value, size_t size) { image_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::string(reinterpret_cast(value), size)); - // @@protoc_insertion_point(field_set_pointer:flyteidl.plugins.JobPodSpec.image) + // @@protoc_insertion_point(field_set_pointer:flyteidl.plugins.Scheduler.image) } -inline ::std::string* JobPodSpec::mutable_image() { +inline ::std::string* Scheduler::mutable_image() { - // @@protoc_insertion_point(field_mutable:flyteidl.plugins.JobPodSpec.image) + // @@protoc_insertion_point(field_mutable:flyteidl.plugins.Scheduler.image) return image_.MutableNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); } -inline ::std::string* JobPodSpec::release_image() { - // @@protoc_insertion_point(field_release:flyteidl.plugins.JobPodSpec.image) +inline ::std::string* Scheduler::release_image() { + // @@protoc_insertion_point(field_release:flyteidl.plugins.Scheduler.image) return image_.ReleaseNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); } -inline void JobPodSpec::set_allocated_image(::std::string* image) { +inline void Scheduler::set_allocated_image(::std::string* image) { if (image != nullptr) { } else { } image_.SetAllocatedNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), image); - // @@protoc_insertion_point(field_set_allocated:flyteidl.plugins.JobPodSpec.image) + // @@protoc_insertion_point(field_set_allocated:flyteidl.plugins.Scheduler.image) } // .flyteidl.core.Resources resources = 2; -inline bool JobPodSpec::has_resources() const { +inline bool Scheduler::has_resources() const { return this != internal_default_instance() && resources_ != nullptr; } -inline const ::flyteidl::core::Resources& JobPodSpec::resources() const { +inline const ::flyteidl::core::Resources& Scheduler::resources() const { const ::flyteidl::core::Resources* p = resources_; - // @@protoc_insertion_point(field_get:flyteidl.plugins.JobPodSpec.resources) + // @@protoc_insertion_point(field_get:flyteidl.plugins.Scheduler.resources) return p != nullptr ? *p : *reinterpret_cast( &::flyteidl::core::_Resources_default_instance_); } -inline ::flyteidl::core::Resources* JobPodSpec::release_resources() { - // @@protoc_insertion_point(field_release:flyteidl.plugins.JobPodSpec.resources) +inline ::flyteidl::core::Resources* Scheduler::release_resources() { + // @@protoc_insertion_point(field_release:flyteidl.plugins.Scheduler.resources) ::flyteidl::core::Resources* temp = resources_; resources_ = nullptr; return temp; } -inline ::flyteidl::core::Resources* JobPodSpec::mutable_resources() { +inline ::flyteidl::core::Resources* Scheduler::mutable_resources() { if (resources_ == nullptr) { auto* p = CreateMaybeMessage<::flyteidl::core::Resources>(GetArenaNoVirtual()); resources_ = p; } - // @@protoc_insertion_point(field_mutable:flyteidl.plugins.JobPodSpec.resources) + // @@protoc_insertion_point(field_mutable:flyteidl.plugins.Scheduler.resources) return resources_; } -inline void JobPodSpec::set_allocated_resources(::flyteidl::core::Resources* resources) { +inline void Scheduler::set_allocated_resources(::flyteidl::core::Resources* resources) { ::google::protobuf::Arena* message_arena = GetArenaNoVirtual(); if (message_arena == nullptr) { delete reinterpret_cast< ::google::protobuf::MessageLite*>(resources_); @@ -676,107 +676,107 @@ inline void JobPodSpec::set_allocated_resources(::flyteidl::core::Resources* res } resources_ = resources; - // @@protoc_insertion_point(field_set_allocated:flyteidl.plugins.JobPodSpec.resources) + // @@protoc_insertion_point(field_set_allocated:flyteidl.plugins.Scheduler.resources) } // ------------------------------------------------------------------- -// DaskCluster +// WorkerGroup -// string image = 1; -inline void DaskCluster::clear_image() { +// uint32 number_of_workers = 1; +inline void WorkerGroup::clear_number_of_workers() { + number_of_workers_ = 0u; +} +inline ::google::protobuf::uint32 WorkerGroup::number_of_workers() const { + // @@protoc_insertion_point(field_get:flyteidl.plugins.WorkerGroup.number_of_workers) + return number_of_workers_; +} +inline void WorkerGroup::set_number_of_workers(::google::protobuf::uint32 value) { + + number_of_workers_ = value; + // @@protoc_insertion_point(field_set:flyteidl.plugins.WorkerGroup.number_of_workers) +} + +// string image = 2; +inline void WorkerGroup::clear_image() { image_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); } -inline const ::std::string& DaskCluster::image() const { - // @@protoc_insertion_point(field_get:flyteidl.plugins.DaskCluster.image) +inline const ::std::string& WorkerGroup::image() const { + // @@protoc_insertion_point(field_get:flyteidl.plugins.WorkerGroup.image) return image_.GetNoArena(); } -inline void DaskCluster::set_image(const ::std::string& value) { +inline void WorkerGroup::set_image(const ::std::string& value) { image_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), value); - // @@protoc_insertion_point(field_set:flyteidl.plugins.DaskCluster.image) + // @@protoc_insertion_point(field_set:flyteidl.plugins.WorkerGroup.image) } #if LANG_CXX11 -inline void DaskCluster::set_image(::std::string&& value) { +inline void WorkerGroup::set_image(::std::string&& value) { image_.SetNoArena( &::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); - // @@protoc_insertion_point(field_set_rvalue:flyteidl.plugins.DaskCluster.image) + // @@protoc_insertion_point(field_set_rvalue:flyteidl.plugins.WorkerGroup.image) } #endif -inline void DaskCluster::set_image(const char* value) { +inline void WorkerGroup::set_image(const char* value) { GOOGLE_DCHECK(value != nullptr); image_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); - // @@protoc_insertion_point(field_set_char:flyteidl.plugins.DaskCluster.image) + // @@protoc_insertion_point(field_set_char:flyteidl.plugins.WorkerGroup.image) } -inline void DaskCluster::set_image(const char* value, size_t size) { +inline void WorkerGroup::set_image(const char* value, size_t size) { image_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::string(reinterpret_cast(value), size)); - // @@protoc_insertion_point(field_set_pointer:flyteidl.plugins.DaskCluster.image) + // @@protoc_insertion_point(field_set_pointer:flyteidl.plugins.WorkerGroup.image) } -inline ::std::string* DaskCluster::mutable_image() { +inline ::std::string* WorkerGroup::mutable_image() { - // @@protoc_insertion_point(field_mutable:flyteidl.plugins.DaskCluster.image) + // @@protoc_insertion_point(field_mutable:flyteidl.plugins.WorkerGroup.image) return image_.MutableNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); } -inline ::std::string* DaskCluster::release_image() { - // @@protoc_insertion_point(field_release:flyteidl.plugins.DaskCluster.image) +inline ::std::string* WorkerGroup::release_image() { + // @@protoc_insertion_point(field_release:flyteidl.plugins.WorkerGroup.image) return image_.ReleaseNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); } -inline void DaskCluster::set_allocated_image(::std::string* image) { +inline void WorkerGroup::set_allocated_image(::std::string* image) { if (image != nullptr) { } else { } image_.SetAllocatedNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), image); - // @@protoc_insertion_point(field_set_allocated:flyteidl.plugins.DaskCluster.image) -} - -// int32 nWorkers = 2; -inline void DaskCluster::clear_nworkers() { - nworkers_ = 0; -} -inline ::google::protobuf::int32 DaskCluster::nworkers() const { - // @@protoc_insertion_point(field_get:flyteidl.plugins.DaskCluster.nWorkers) - return nworkers_; -} -inline void DaskCluster::set_nworkers(::google::protobuf::int32 value) { - - nworkers_ = value; - // @@protoc_insertion_point(field_set:flyteidl.plugins.DaskCluster.nWorkers) + // @@protoc_insertion_point(field_set_allocated:flyteidl.plugins.WorkerGroup.image) } // .flyteidl.core.Resources resources = 3; -inline bool DaskCluster::has_resources() const { +inline bool WorkerGroup::has_resources() const { return this != internal_default_instance() && resources_ != nullptr; } -inline const ::flyteidl::core::Resources& DaskCluster::resources() const { +inline const ::flyteidl::core::Resources& WorkerGroup::resources() const { const ::flyteidl::core::Resources* p = resources_; - // @@protoc_insertion_point(field_get:flyteidl.plugins.DaskCluster.resources) + // @@protoc_insertion_point(field_get:flyteidl.plugins.WorkerGroup.resources) return p != nullptr ? *p : *reinterpret_cast( &::flyteidl::core::_Resources_default_instance_); } -inline ::flyteidl::core::Resources* DaskCluster::release_resources() { - // @@protoc_insertion_point(field_release:flyteidl.plugins.DaskCluster.resources) +inline ::flyteidl::core::Resources* WorkerGroup::release_resources() { + // @@protoc_insertion_point(field_release:flyteidl.plugins.WorkerGroup.resources) ::flyteidl::core::Resources* temp = resources_; resources_ = nullptr; return temp; } -inline ::flyteidl::core::Resources* DaskCluster::mutable_resources() { +inline ::flyteidl::core::Resources* WorkerGroup::mutable_resources() { if (resources_ == nullptr) { auto* p = CreateMaybeMessage<::flyteidl::core::Resources>(GetArenaNoVirtual()); resources_ = p; } - // @@protoc_insertion_point(field_mutable:flyteidl.plugins.DaskCluster.resources) + // @@protoc_insertion_point(field_mutable:flyteidl.plugins.WorkerGroup.resources) return resources_; } -inline void DaskCluster::set_allocated_resources(::flyteidl::core::Resources* resources) { +inline void WorkerGroup::set_allocated_resources(::flyteidl::core::Resources* resources) { ::google::protobuf::Arena* message_arena = GetArenaNoVirtual(); if (message_arena == nullptr) { delete reinterpret_cast< ::google::protobuf::MessageLite*>(resources_); @@ -792,7 +792,7 @@ inline void DaskCluster::set_allocated_resources(::flyteidl::core::Resources* re } resources_ = resources; - // @@protoc_insertion_point(field_set_allocated:flyteidl.plugins.DaskCluster.resources) + // @@protoc_insertion_point(field_set_allocated:flyteidl.plugins.WorkerGroup.resources) } #ifdef __GNUC__ diff --git a/gen/pb-go/flyteidl/plugins/dask.pb.go b/gen/pb-go/flyteidl/plugins/dask.pb.go index 12dfdcf13..4b51b0b0c 100644 --- a/gen/pb-go/flyteidl/plugins/dask.pb.go +++ b/gen/pb-go/flyteidl/plugins/dask.pb.go @@ -21,12 +21,12 @@ var _ = math.Inf // proto package needs to be updated. const _ = proto.ProtoPackageIsVersion3 // please upgrade the proto package -// Custom Proto for Dask Plugin +// Custom Proto for Dask Plugin. type DaskJob struct { - // Spec for the job pod - JobPodSpec *JobPodSpec `protobuf:"bytes,1,opt,name=jobPodSpec,proto3" json:"jobPodSpec,omitempty"` - // Cluster - Cluster *DaskCluster `protobuf:"bytes,2,opt,name=cluster,proto3" json:"cluster,omitempty"` + // Spec for the scheduler pod. + Scheduler *Scheduler `protobuf:"bytes,1,opt,name=scheduler,proto3" json:"scheduler,omitempty"` + // Spec of the default worker group. + Workers *WorkerGroup `protobuf:"bytes,2,opt,name=workers,proto3" json:"workers,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` @@ -57,77 +57,76 @@ func (m *DaskJob) XXX_DiscardUnknown() { var xxx_messageInfo_DaskJob proto.InternalMessageInfo -func (m *DaskJob) GetJobPodSpec() *JobPodSpec { +func (m *DaskJob) GetScheduler() *Scheduler { if m != nil { - return m.JobPodSpec + return m.Scheduler } return nil } -func (m *DaskJob) GetCluster() *DaskCluster { +func (m *DaskJob) GetWorkers() *WorkerGroup { if m != nil { - return m.Cluster + return m.Workers } return nil } -// Specification for the job pod -type JobPodSpec struct { +// Specification for the scheduler pod. +type Scheduler struct { // Optional image to use. If unset, will use the default image. Image string `protobuf:"bytes,1,opt,name=image,proto3" json:"image,omitempty"` - // Resources assigned to the job pod. + // Resources assigned to the scheduler pod. Resources *core.Resources `protobuf:"bytes,2,opt,name=resources,proto3" json:"resources,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` } -func (m *JobPodSpec) Reset() { *m = JobPodSpec{} } -func (m *JobPodSpec) String() string { return proto.CompactTextString(m) } -func (*JobPodSpec) ProtoMessage() {} -func (*JobPodSpec) Descriptor() ([]byte, []int) { +func (m *Scheduler) Reset() { *m = Scheduler{} } +func (m *Scheduler) String() string { return proto.CompactTextString(m) } +func (*Scheduler) ProtoMessage() {} +func (*Scheduler) Descriptor() ([]byte, []int) { return fileDescriptor_d719e18eb4f4b89f, []int{1} } -func (m *JobPodSpec) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_JobPodSpec.Unmarshal(m, b) +func (m *Scheduler) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Scheduler.Unmarshal(m, b) } -func (m *JobPodSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_JobPodSpec.Marshal(b, m, deterministic) +func (m *Scheduler) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Scheduler.Marshal(b, m, deterministic) } -func (m *JobPodSpec) XXX_Merge(src proto.Message) { - xxx_messageInfo_JobPodSpec.Merge(m, src) +func (m *Scheduler) XXX_Merge(src proto.Message) { + xxx_messageInfo_Scheduler.Merge(m, src) } -func (m *JobPodSpec) XXX_Size() int { - return xxx_messageInfo_JobPodSpec.Size(m) +func (m *Scheduler) XXX_Size() int { + return xxx_messageInfo_Scheduler.Size(m) } -func (m *JobPodSpec) XXX_DiscardUnknown() { - xxx_messageInfo_JobPodSpec.DiscardUnknown(m) +func (m *Scheduler) XXX_DiscardUnknown() { + xxx_messageInfo_Scheduler.DiscardUnknown(m) } -var xxx_messageInfo_JobPodSpec proto.InternalMessageInfo +var xxx_messageInfo_Scheduler proto.InternalMessageInfo -func (m *JobPodSpec) GetImage() string { +func (m *Scheduler) GetImage() string { if m != nil { return m.Image } return "" } -func (m *JobPodSpec) GetResources() *core.Resources { +func (m *Scheduler) GetResources() *core.Resources { if m != nil { return m.Resources } return nil } -type DaskCluster struct { - // Optional image to use for the scheduler as well as the default worker group. If unset, will use - // the default image. - Image string `protobuf:"bytes,1,opt,name=image,proto3" json:"image,omitempty"` - // Number of workers in the default worker group - NWorkers int32 `protobuf:"varint,2,opt,name=nWorkers,proto3" json:"nWorkers,omitempty"` - // Resources assigned to the scheduler as well as all pods of the default worker group. +type WorkerGroup struct { + // Number of workers in the group. + NumberOfWorkers uint32 `protobuf:"varint,1,opt,name=number_of_workers,json=numberOfWorkers,proto3" json:"number_of_workers,omitempty"` + // Optional image to use for the pods of the worker group. If unset, will use the default image. + Image string `protobuf:"bytes,2,opt,name=image,proto3" json:"image,omitempty"` + // Resources assigned to the all pods of the worker group. // As per https://kubernetes.dask.org/en/latest/kubecluster.html?highlight=limit#best-practices // it is advised to only set limits. If requests are not explicitly set, the plugin will make // sure to set requests==limits. @@ -138,46 +137,46 @@ type DaskCluster struct { XXX_sizecache int32 `json:"-"` } -func (m *DaskCluster) Reset() { *m = DaskCluster{} } -func (m *DaskCluster) String() string { return proto.CompactTextString(m) } -func (*DaskCluster) ProtoMessage() {} -func (*DaskCluster) Descriptor() ([]byte, []int) { +func (m *WorkerGroup) Reset() { *m = WorkerGroup{} } +func (m *WorkerGroup) String() string { return proto.CompactTextString(m) } +func (*WorkerGroup) ProtoMessage() {} +func (*WorkerGroup) Descriptor() ([]byte, []int) { return fileDescriptor_d719e18eb4f4b89f, []int{2} } -func (m *DaskCluster) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_DaskCluster.Unmarshal(m, b) +func (m *WorkerGroup) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_WorkerGroup.Unmarshal(m, b) } -func (m *DaskCluster) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_DaskCluster.Marshal(b, m, deterministic) +func (m *WorkerGroup) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_WorkerGroup.Marshal(b, m, deterministic) } -func (m *DaskCluster) XXX_Merge(src proto.Message) { - xxx_messageInfo_DaskCluster.Merge(m, src) +func (m *WorkerGroup) XXX_Merge(src proto.Message) { + xxx_messageInfo_WorkerGroup.Merge(m, src) } -func (m *DaskCluster) XXX_Size() int { - return xxx_messageInfo_DaskCluster.Size(m) +func (m *WorkerGroup) XXX_Size() int { + return xxx_messageInfo_WorkerGroup.Size(m) } -func (m *DaskCluster) XXX_DiscardUnknown() { - xxx_messageInfo_DaskCluster.DiscardUnknown(m) +func (m *WorkerGroup) XXX_DiscardUnknown() { + xxx_messageInfo_WorkerGroup.DiscardUnknown(m) } -var xxx_messageInfo_DaskCluster proto.InternalMessageInfo +var xxx_messageInfo_WorkerGroup proto.InternalMessageInfo -func (m *DaskCluster) GetImage() string { +func (m *WorkerGroup) GetNumberOfWorkers() uint32 { if m != nil { - return m.Image + return m.NumberOfWorkers } - return "" + return 0 } -func (m *DaskCluster) GetNWorkers() int32 { +func (m *WorkerGroup) GetImage() string { if m != nil { - return m.NWorkers + return m.Image } - return 0 + return "" } -func (m *DaskCluster) GetResources() *core.Resources { +func (m *WorkerGroup) GetResources() *core.Resources { if m != nil { return m.Resources } @@ -186,29 +185,30 @@ func (m *DaskCluster) GetResources() *core.Resources { func init() { proto.RegisterType((*DaskJob)(nil), "flyteidl.plugins.DaskJob") - proto.RegisterType((*JobPodSpec)(nil), "flyteidl.plugins.JobPodSpec") - proto.RegisterType((*DaskCluster)(nil), "flyteidl.plugins.DaskCluster") + proto.RegisterType((*Scheduler)(nil), "flyteidl.plugins.Scheduler") + proto.RegisterType((*WorkerGroup)(nil), "flyteidl.plugins.WorkerGroup") } func init() { proto.RegisterFile("flyteidl/plugins/dask.proto", fileDescriptor_d719e18eb4f4b89f) } var fileDescriptor_d719e18eb4f4b89f = []byte{ - // 263 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x91, 0xc1, 0x4b, 0xc3, 0x30, - 0x14, 0xc6, 0xa9, 0x32, 0xe7, 0xde, 0x2e, 0x12, 0x3c, 0xd4, 0xa9, 0x20, 0x3d, 0x79, 0x31, 0x01, - 0x05, 0x87, 0xe0, 0x49, 0x3d, 0xed, 0x24, 0xf1, 0x20, 0xec, 0xd6, 0xa4, 0xcf, 0x58, 0xdb, 0xf5, - 0x95, 0x24, 0x45, 0xbc, 0xf9, 0xa7, 0xcb, 0xda, 0xb5, 0x9d, 0x15, 0x61, 0xc7, 0xf0, 0x7e, 0xdf, - 0xef, 0x23, 0x7c, 0x70, 0xfa, 0x96, 0x7f, 0x79, 0x4c, 0x93, 0x5c, 0x94, 0x79, 0x65, 0xd2, 0xc2, - 0x89, 0x24, 0x76, 0x19, 0x2f, 0x2d, 0x79, 0x62, 0x47, 0xed, 0x91, 0x6f, 0x8e, 0xb3, 0x93, 0x0e, - 0xd7, 0x64, 0x51, 0xf8, 0xd8, 0x65, 0xae, 0x81, 0xa3, 0xef, 0x00, 0xc6, 0x4f, 0xb1, 0xcb, 0x16, - 0xa4, 0xd8, 0x3d, 0xc0, 0x07, 0xa9, 0x67, 0x4a, 0x5e, 0x4a, 0xd4, 0x61, 0x70, 0x11, 0x5c, 0x4e, - 0xaf, 0xcf, 0xf8, 0xd0, 0xc6, 0x17, 0x1d, 0x23, 0xb7, 0x78, 0x36, 0x87, 0xb1, 0xce, 0x2b, 0xe7, - 0xd1, 0x86, 0x7b, 0x75, 0xf4, 0xfc, 0x6f, 0x74, 0xdd, 0xf4, 0xd8, 0x40, 0xb2, 0xa5, 0xa3, 0x25, - 0x40, 0xaf, 0x64, 0xc7, 0x30, 0x4a, 0x57, 0xb1, 0xc1, 0xba, 0x7f, 0x22, 0x9b, 0x07, 0xbb, 0x85, - 0x89, 0x45, 0x47, 0x95, 0xd5, 0xe8, 0x36, 0xfa, 0xb0, 0xd7, 0xaf, 0x7f, 0xc5, 0x65, 0x7b, 0x97, - 0x3d, 0x1a, 0x7d, 0xc2, 0x74, 0xab, 0xf3, 0x1f, 0xf9, 0x0c, 0x0e, 0x8b, 0x57, 0xb2, 0x19, 0xda, - 0xc6, 0x3d, 0x92, 0xdd, 0xfb, 0x77, 0xf1, 0xfe, 0xce, 0xc5, 0x0f, 0x77, 0xcb, 0xb9, 0x49, 0xfd, - 0x7b, 0xa5, 0xb8, 0xa6, 0x95, 0xa8, 0x03, 0x64, 0x8d, 0xe8, 0x86, 0x30, 0x58, 0x88, 0x52, 0x5d, - 0x19, 0x12, 0xc3, 0x29, 0xd5, 0x41, 0xbd, 0xcc, 0xcd, 0x4f, 0x00, 0x00, 0x00, 0xff, 0xff, 0xba, - 0x86, 0xdc, 0x43, 0xe5, 0x01, 0x00, 0x00, + // 277 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x91, 0xcb, 0x4b, 0xf4, 0x30, + 0x14, 0xc5, 0xe9, 0x7c, 0x7c, 0x0e, 0xcd, 0x20, 0x6a, 0x70, 0x51, 0x1d, 0x04, 0xe9, 0x4a, 0x04, + 0x13, 0x50, 0x70, 0x98, 0xad, 0x08, 0x82, 0x1b, 0x21, 0x2e, 0x44, 0x37, 0x43, 0x1f, 0xb7, 0x99, + 0xd2, 0xc7, 0x2d, 0x37, 0x0d, 0xe2, 0xc2, 0xb5, 0xff, 0xb6, 0x4c, 0x6b, 0x5a, 0x19, 0x11, 0x5c, + 0x86, 0xf3, 0xbb, 0xe7, 0x9c, 0x70, 0xd8, 0x3c, 0x2b, 0xdf, 0x5a, 0xc8, 0xd3, 0x52, 0x36, 0xa5, + 0xd5, 0x79, 0x6d, 0x64, 0x1a, 0x99, 0x42, 0x34, 0x84, 0x2d, 0xf2, 0x7d, 0x27, 0x8a, 0x2f, 0xf1, + 0xf8, 0x68, 0xc0, 0x13, 0x24, 0x90, 0x6d, 0x64, 0x0a, 0xd3, 0xc3, 0xe1, 0x3b, 0x9b, 0xde, 0x46, + 0xa6, 0xb8, 0xc7, 0x98, 0x2f, 0x99, 0x6f, 0x92, 0x35, 0xa4, 0xb6, 0x04, 0x0a, 0xbc, 0x53, 0xef, + 0x6c, 0x76, 0x39, 0x17, 0xdb, 0x5e, 0xe2, 0xd1, 0x21, 0x6a, 0xa4, 0xf9, 0x82, 0x4d, 0x5f, 0x91, + 0x0a, 0x20, 0x13, 0x4c, 0xba, 0xc3, 0x93, 0x9f, 0x87, 0x4f, 0x1d, 0x70, 0x47, 0x68, 0x1b, 0xe5, + 0xe8, 0xf0, 0x99, 0xf9, 0x83, 0x21, 0x3f, 0x64, 0xff, 0xf3, 0x2a, 0xd2, 0xd0, 0x85, 0xfb, 0xaa, + 0x7f, 0xf0, 0x6b, 0xe6, 0x13, 0x18, 0xb4, 0x94, 0x80, 0x73, 0x0f, 0x46, 0xf7, 0xcd, 0x87, 0x84, + 0x72, 0xba, 0x1a, 0xd1, 0xf0, 0xc3, 0x63, 0xb3, 0x6f, 0x99, 0xfc, 0x9c, 0x1d, 0xd4, 0xb6, 0x8a, + 0x81, 0x56, 0x98, 0xad, 0x5c, 0xdb, 0x4d, 0xd2, 0xae, 0xda, 0xeb, 0x85, 0x87, 0xac, 0xe7, 0xcd, + 0xd8, 0x64, 0xf2, 0x6b, 0x93, 0x7f, 0x7f, 0x6e, 0x72, 0xb3, 0x7c, 0x59, 0xe8, 0xbc, 0x5d, 0xdb, + 0x58, 0x24, 0x58, 0xc9, 0xee, 0x00, 0x49, 0xcb, 0x61, 0x14, 0x0d, 0xb5, 0x6c, 0xe2, 0x0b, 0x8d, + 0x72, 0x7b, 0xd6, 0x78, 0xa7, 0x5b, 0xe9, 0xea, 0x33, 0x00, 0x00, 0xff, 0xff, 0x1e, 0xa6, 0x9a, + 0xc6, 0xf1, 0x01, 0x00, 0x00, } diff --git a/gen/pb-go/flyteidl/plugins/dask.pb.validate.go b/gen/pb-go/flyteidl/plugins/dask.pb.validate.go index de97a46bd..972caa5b4 100644 --- a/gen/pb-go/flyteidl/plugins/dask.pb.validate.go +++ b/gen/pb-go/flyteidl/plugins/dask.pb.validate.go @@ -43,20 +43,20 @@ func (m *DaskJob) Validate() error { return nil } - if v, ok := interface{}(m.GetJobPodSpec()).(interface{ Validate() error }); ok { + if v, ok := interface{}(m.GetScheduler()).(interface{ Validate() error }); ok { if err := v.Validate(); err != nil { return DaskJobValidationError{ - field: "JobPodSpec", + field: "Scheduler", reason: "embedded message failed validation", cause: err, } } } - if v, ok := interface{}(m.GetCluster()).(interface{ Validate() error }); ok { + if v, ok := interface{}(m.GetWorkers()).(interface{ Validate() error }); ok { if err := v.Validate(); err != nil { return DaskJobValidationError{ - field: "Cluster", + field: "Workers", reason: "embedded message failed validation", cause: err, } @@ -120,9 +120,9 @@ var _ interface { ErrorName() string } = DaskJobValidationError{} -// Validate checks the field values on JobPodSpec with the rules defined in the +// Validate checks the field values on Scheduler with the rules defined in the // proto definition for this message. If any rules are violated, an error is returned. -func (m *JobPodSpec) Validate() error { +func (m *Scheduler) Validate() error { if m == nil { return nil } @@ -131,7 +131,7 @@ func (m *JobPodSpec) Validate() error { if v, ok := interface{}(m.GetResources()).(interface{ Validate() error }); ok { if err := v.Validate(); err != nil { - return JobPodSpecValidationError{ + return SchedulerValidationError{ field: "Resources", reason: "embedded message failed validation", cause: err, @@ -142,9 +142,9 @@ func (m *JobPodSpec) Validate() error { return nil } -// JobPodSpecValidationError is the validation error returned by -// JobPodSpec.Validate if the designated constraints aren't met. -type JobPodSpecValidationError struct { +// SchedulerValidationError is the validation error returned by +// Scheduler.Validate if the designated constraints aren't met. +type SchedulerValidationError struct { field string reason string cause error @@ -152,22 +152,22 @@ type JobPodSpecValidationError struct { } // Field function returns field value. -func (e JobPodSpecValidationError) Field() string { return e.field } +func (e SchedulerValidationError) Field() string { return e.field } // Reason function returns reason value. -func (e JobPodSpecValidationError) Reason() string { return e.reason } +func (e SchedulerValidationError) Reason() string { return e.reason } // Cause function returns cause value. -func (e JobPodSpecValidationError) Cause() error { return e.cause } +func (e SchedulerValidationError) Cause() error { return e.cause } // Key function returns key value. -func (e JobPodSpecValidationError) Key() bool { return e.key } +func (e SchedulerValidationError) Key() bool { return e.key } // ErrorName returns error name. -func (e JobPodSpecValidationError) ErrorName() string { return "JobPodSpecValidationError" } +func (e SchedulerValidationError) ErrorName() string { return "SchedulerValidationError" } // Error satisfies the builtin error interface -func (e JobPodSpecValidationError) Error() string { +func (e SchedulerValidationError) Error() string { cause := "" if e.cause != nil { cause = fmt.Sprintf(" | caused by: %v", e.cause) @@ -179,14 +179,14 @@ func (e JobPodSpecValidationError) Error() string { } return fmt.Sprintf( - "invalid %sJobPodSpec.%s: %s%s", + "invalid %sScheduler.%s: %s%s", key, e.field, e.reason, cause) } -var _ error = JobPodSpecValidationError{} +var _ error = SchedulerValidationError{} var _ interface { Field() string @@ -194,23 +194,23 @@ var _ interface { Key() bool Cause() error ErrorName() string -} = JobPodSpecValidationError{} +} = SchedulerValidationError{} -// Validate checks the field values on DaskCluster with the rules defined in +// Validate checks the field values on WorkerGroup with the rules defined in // the proto definition for this message. If any rules are violated, an error // is returned. -func (m *DaskCluster) Validate() error { +func (m *WorkerGroup) Validate() error { if m == nil { return nil } - // no validation rules for Image + // no validation rules for NumberOfWorkers - // no validation rules for NWorkers + // no validation rules for Image if v, ok := interface{}(m.GetResources()).(interface{ Validate() error }); ok { if err := v.Validate(); err != nil { - return DaskClusterValidationError{ + return WorkerGroupValidationError{ field: "Resources", reason: "embedded message failed validation", cause: err, @@ -221,9 +221,9 @@ func (m *DaskCluster) Validate() error { return nil } -// DaskClusterValidationError is the validation error returned by -// DaskCluster.Validate if the designated constraints aren't met. -type DaskClusterValidationError struct { +// WorkerGroupValidationError is the validation error returned by +// WorkerGroup.Validate if the designated constraints aren't met. +type WorkerGroupValidationError struct { field string reason string cause error @@ -231,22 +231,22 @@ type DaskClusterValidationError struct { } // Field function returns field value. -func (e DaskClusterValidationError) Field() string { return e.field } +func (e WorkerGroupValidationError) Field() string { return e.field } // Reason function returns reason value. -func (e DaskClusterValidationError) Reason() string { return e.reason } +func (e WorkerGroupValidationError) Reason() string { return e.reason } // Cause function returns cause value. -func (e DaskClusterValidationError) Cause() error { return e.cause } +func (e WorkerGroupValidationError) Cause() error { return e.cause } // Key function returns key value. -func (e DaskClusterValidationError) Key() bool { return e.key } +func (e WorkerGroupValidationError) Key() bool { return e.key } // ErrorName returns error name. -func (e DaskClusterValidationError) ErrorName() string { return "DaskClusterValidationError" } +func (e WorkerGroupValidationError) ErrorName() string { return "WorkerGroupValidationError" } // Error satisfies the builtin error interface -func (e DaskClusterValidationError) Error() string { +func (e WorkerGroupValidationError) Error() string { cause := "" if e.cause != nil { cause = fmt.Sprintf(" | caused by: %v", e.cause) @@ -258,14 +258,14 @@ func (e DaskClusterValidationError) Error() string { } return fmt.Sprintf( - "invalid %sDaskCluster.%s: %s%s", + "invalid %sWorkerGroup.%s: %s%s", key, e.field, e.reason, cause) } -var _ error = DaskClusterValidationError{} +var _ error = WorkerGroupValidationError{} var _ interface { Field() string @@ -273,4 +273,4 @@ var _ interface { Key() bool Cause() error ErrorName() string -} = DaskClusterValidationError{} +} = WorkerGroupValidationError{} diff --git a/gen/pb-java/flyteidl/plugins/Dask.java b/gen/pb-java/flyteidl/plugins/Dask.java index edb0d61d1..b7d4cbca1 100644 --- a/gen/pb-java/flyteidl/plugins/Dask.java +++ b/gen/pb-java/flyteidl/plugins/Dask.java @@ -20,57 +20,57 @@ public interface DaskJobOrBuilder extends /** *
-     * Spec for the job pod
+     * Spec for the scheduler pod.
      * 
* - * .flyteidl.plugins.JobPodSpec jobPodSpec = 1; + * .flyteidl.plugins.Scheduler scheduler = 1; */ - boolean hasJobPodSpec(); + boolean hasScheduler(); /** *
-     * Spec for the job pod
+     * Spec for the scheduler pod.
      * 
* - * .flyteidl.plugins.JobPodSpec jobPodSpec = 1; + * .flyteidl.plugins.Scheduler scheduler = 1; */ - flyteidl.plugins.Dask.JobPodSpec getJobPodSpec(); + flyteidl.plugins.Dask.Scheduler getScheduler(); /** *
-     * Spec for the job pod
+     * Spec for the scheduler pod.
      * 
* - * .flyteidl.plugins.JobPodSpec jobPodSpec = 1; + * .flyteidl.plugins.Scheduler scheduler = 1; */ - flyteidl.plugins.Dask.JobPodSpecOrBuilder getJobPodSpecOrBuilder(); + flyteidl.plugins.Dask.SchedulerOrBuilder getSchedulerOrBuilder(); /** *
-     * Cluster
+     * Spec of the default worker group.
      * 
* - * .flyteidl.plugins.DaskCluster cluster = 2; + * .flyteidl.plugins.WorkerGroup workers = 2; */ - boolean hasCluster(); + boolean hasWorkers(); /** *
-     * Cluster
+     * Spec of the default worker group.
      * 
* - * .flyteidl.plugins.DaskCluster cluster = 2; + * .flyteidl.plugins.WorkerGroup workers = 2; */ - flyteidl.plugins.Dask.DaskCluster getCluster(); + flyteidl.plugins.Dask.WorkerGroup getWorkers(); /** *
-     * Cluster
+     * Spec of the default worker group.
      * 
* - * .flyteidl.plugins.DaskCluster cluster = 2; + * .flyteidl.plugins.WorkerGroup workers = 2; */ - flyteidl.plugins.Dask.DaskClusterOrBuilder getClusterOrBuilder(); + flyteidl.plugins.Dask.WorkerGroupOrBuilder getWorkersOrBuilder(); } /** *
-   * Custom Proto for Dask Plugin
+   * Custom Proto for Dask Plugin.
    * 
* * Protobuf type {@code flyteidl.plugins.DaskJob} @@ -112,27 +112,27 @@ private DaskJob( done = true; break; case 10: { - flyteidl.plugins.Dask.JobPodSpec.Builder subBuilder = null; - if (jobPodSpec_ != null) { - subBuilder = jobPodSpec_.toBuilder(); + flyteidl.plugins.Dask.Scheduler.Builder subBuilder = null; + if (scheduler_ != null) { + subBuilder = scheduler_.toBuilder(); } - jobPodSpec_ = input.readMessage(flyteidl.plugins.Dask.JobPodSpec.parser(), extensionRegistry); + scheduler_ = input.readMessage(flyteidl.plugins.Dask.Scheduler.parser(), extensionRegistry); if (subBuilder != null) { - subBuilder.mergeFrom(jobPodSpec_); - jobPodSpec_ = subBuilder.buildPartial(); + subBuilder.mergeFrom(scheduler_); + scheduler_ = subBuilder.buildPartial(); } break; } case 18: { - flyteidl.plugins.Dask.DaskCluster.Builder subBuilder = null; - if (cluster_ != null) { - subBuilder = cluster_.toBuilder(); + flyteidl.plugins.Dask.WorkerGroup.Builder subBuilder = null; + if (workers_ != null) { + subBuilder = workers_.toBuilder(); } - cluster_ = input.readMessage(flyteidl.plugins.Dask.DaskCluster.parser(), extensionRegistry); + workers_ = input.readMessage(flyteidl.plugins.Dask.WorkerGroup.parser(), extensionRegistry); if (subBuilder != null) { - subBuilder.mergeFrom(cluster_); - cluster_ = subBuilder.buildPartial(); + subBuilder.mergeFrom(workers_); + workers_ = subBuilder.buildPartial(); } break; @@ -169,70 +169,70 @@ private DaskJob( flyteidl.plugins.Dask.DaskJob.class, flyteidl.plugins.Dask.DaskJob.Builder.class); } - public static final int JOBPODSPEC_FIELD_NUMBER = 1; - private flyteidl.plugins.Dask.JobPodSpec jobPodSpec_; + public static final int SCHEDULER_FIELD_NUMBER = 1; + private flyteidl.plugins.Dask.Scheduler scheduler_; /** *
-     * Spec for the job pod
+     * Spec for the scheduler pod.
      * 
* - * .flyteidl.plugins.JobPodSpec jobPodSpec = 1; + * .flyteidl.plugins.Scheduler scheduler = 1; */ - public boolean hasJobPodSpec() { - return jobPodSpec_ != null; + public boolean hasScheduler() { + return scheduler_ != null; } /** *
-     * Spec for the job pod
+     * Spec for the scheduler pod.
      * 
* - * .flyteidl.plugins.JobPodSpec jobPodSpec = 1; + * .flyteidl.plugins.Scheduler scheduler = 1; */ - public flyteidl.plugins.Dask.JobPodSpec getJobPodSpec() { - return jobPodSpec_ == null ? flyteidl.plugins.Dask.JobPodSpec.getDefaultInstance() : jobPodSpec_; + public flyteidl.plugins.Dask.Scheduler getScheduler() { + return scheduler_ == null ? flyteidl.plugins.Dask.Scheduler.getDefaultInstance() : scheduler_; } /** *
-     * Spec for the job pod
+     * Spec for the scheduler pod.
      * 
* - * .flyteidl.plugins.JobPodSpec jobPodSpec = 1; + * .flyteidl.plugins.Scheduler scheduler = 1; */ - public flyteidl.plugins.Dask.JobPodSpecOrBuilder getJobPodSpecOrBuilder() { - return getJobPodSpec(); + public flyteidl.plugins.Dask.SchedulerOrBuilder getSchedulerOrBuilder() { + return getScheduler(); } - public static final int CLUSTER_FIELD_NUMBER = 2; - private flyteidl.plugins.Dask.DaskCluster cluster_; + public static final int WORKERS_FIELD_NUMBER = 2; + private flyteidl.plugins.Dask.WorkerGroup workers_; /** *
-     * Cluster
+     * Spec of the default worker group.
      * 
* - * .flyteidl.plugins.DaskCluster cluster = 2; + * .flyteidl.plugins.WorkerGroup workers = 2; */ - public boolean hasCluster() { - return cluster_ != null; + public boolean hasWorkers() { + return workers_ != null; } /** *
-     * Cluster
+     * Spec of the default worker group.
      * 
* - * .flyteidl.plugins.DaskCluster cluster = 2; + * .flyteidl.plugins.WorkerGroup workers = 2; */ - public flyteidl.plugins.Dask.DaskCluster getCluster() { - return cluster_ == null ? flyteidl.plugins.Dask.DaskCluster.getDefaultInstance() : cluster_; + public flyteidl.plugins.Dask.WorkerGroup getWorkers() { + return workers_ == null ? flyteidl.plugins.Dask.WorkerGroup.getDefaultInstance() : workers_; } /** *
-     * Cluster
+     * Spec of the default worker group.
      * 
* - * .flyteidl.plugins.DaskCluster cluster = 2; + * .flyteidl.plugins.WorkerGroup workers = 2; */ - public flyteidl.plugins.Dask.DaskClusterOrBuilder getClusterOrBuilder() { - return getCluster(); + public flyteidl.plugins.Dask.WorkerGroupOrBuilder getWorkersOrBuilder() { + return getWorkers(); } private byte memoizedIsInitialized = -1; @@ -249,11 +249,11 @@ public final boolean isInitialized() { @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - if (jobPodSpec_ != null) { - output.writeMessage(1, getJobPodSpec()); + if (scheduler_ != null) { + output.writeMessage(1, getScheduler()); } - if (cluster_ != null) { - output.writeMessage(2, getCluster()); + if (workers_ != null) { + output.writeMessage(2, getWorkers()); } unknownFields.writeTo(output); } @@ -264,13 +264,13 @@ public int getSerializedSize() { if (size != -1) return size; size = 0; - if (jobPodSpec_ != null) { + if (scheduler_ != null) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, getJobPodSpec()); + .computeMessageSize(1, getScheduler()); } - if (cluster_ != null) { + if (workers_ != null) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(2, getCluster()); + .computeMessageSize(2, getWorkers()); } size += unknownFields.getSerializedSize(); memoizedSize = size; @@ -287,15 +287,15 @@ public boolean equals(final java.lang.Object obj) { } flyteidl.plugins.Dask.DaskJob other = (flyteidl.plugins.Dask.DaskJob) obj; - if (hasJobPodSpec() != other.hasJobPodSpec()) return false; - if (hasJobPodSpec()) { - if (!getJobPodSpec() - .equals(other.getJobPodSpec())) return false; + if (hasScheduler() != other.hasScheduler()) return false; + if (hasScheduler()) { + if (!getScheduler() + .equals(other.getScheduler())) return false; } - if (hasCluster() != other.hasCluster()) return false; - if (hasCluster()) { - if (!getCluster() - .equals(other.getCluster())) return false; + if (hasWorkers() != other.hasWorkers()) return false; + if (hasWorkers()) { + if (!getWorkers() + .equals(other.getWorkers())) return false; } if (!unknownFields.equals(other.unknownFields)) return false; return true; @@ -308,13 +308,13 @@ public int hashCode() { } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); - if (hasJobPodSpec()) { - hash = (37 * hash) + JOBPODSPEC_FIELD_NUMBER; - hash = (53 * hash) + getJobPodSpec().hashCode(); + if (hasScheduler()) { + hash = (37 * hash) + SCHEDULER_FIELD_NUMBER; + hash = (53 * hash) + getScheduler().hashCode(); } - if (hasCluster()) { - hash = (37 * hash) + CLUSTER_FIELD_NUMBER; - hash = (53 * hash) + getCluster().hashCode(); + if (hasWorkers()) { + hash = (37 * hash) + WORKERS_FIELD_NUMBER; + hash = (53 * hash) + getWorkers().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; @@ -413,7 +413,7 @@ protected Builder newBuilderForType( } /** *
-     * Custom Proto for Dask Plugin
+     * Custom Proto for Dask Plugin.
      * 
* * Protobuf type {@code flyteidl.plugins.DaskJob} @@ -453,17 +453,17 @@ private void maybeForceBuilderInitialization() { @java.lang.Override public Builder clear() { super.clear(); - if (jobPodSpecBuilder_ == null) { - jobPodSpec_ = null; + if (schedulerBuilder_ == null) { + scheduler_ = null; } else { - jobPodSpec_ = null; - jobPodSpecBuilder_ = null; + scheduler_ = null; + schedulerBuilder_ = null; } - if (clusterBuilder_ == null) { - cluster_ = null; + if (workersBuilder_ == null) { + workers_ = null; } else { - cluster_ = null; - clusterBuilder_ = null; + workers_ = null; + workersBuilder_ = null; } return this; } @@ -491,15 +491,15 @@ public flyteidl.plugins.Dask.DaskJob build() { @java.lang.Override public flyteidl.plugins.Dask.DaskJob buildPartial() { flyteidl.plugins.Dask.DaskJob result = new flyteidl.plugins.Dask.DaskJob(this); - if (jobPodSpecBuilder_ == null) { - result.jobPodSpec_ = jobPodSpec_; + if (schedulerBuilder_ == null) { + result.scheduler_ = scheduler_; } else { - result.jobPodSpec_ = jobPodSpecBuilder_.build(); + result.scheduler_ = schedulerBuilder_.build(); } - if (clusterBuilder_ == null) { - result.cluster_ = cluster_; + if (workersBuilder_ == null) { + result.workers_ = workers_; } else { - result.cluster_ = clusterBuilder_.build(); + result.workers_ = workersBuilder_.build(); } onBuilt(); return result; @@ -549,11 +549,11 @@ public Builder mergeFrom(com.google.protobuf.Message other) { public Builder mergeFrom(flyteidl.plugins.Dask.DaskJob other) { if (other == flyteidl.plugins.Dask.DaskJob.getDefaultInstance()) return this; - if (other.hasJobPodSpec()) { - mergeJobPodSpec(other.getJobPodSpec()); + if (other.hasScheduler()) { + mergeScheduler(other.getScheduler()); } - if (other.hasCluster()) { - mergeCluster(other.getCluster()); + if (other.hasWorkers()) { + mergeWorkers(other.getWorkers()); } this.mergeUnknownFields(other.unknownFields); onChanged(); @@ -584,310 +584,310 @@ public Builder mergeFrom( return this; } - private flyteidl.plugins.Dask.JobPodSpec jobPodSpec_; + private flyteidl.plugins.Dask.Scheduler scheduler_; private com.google.protobuf.SingleFieldBuilderV3< - flyteidl.plugins.Dask.JobPodSpec, flyteidl.plugins.Dask.JobPodSpec.Builder, flyteidl.plugins.Dask.JobPodSpecOrBuilder> jobPodSpecBuilder_; + flyteidl.plugins.Dask.Scheduler, flyteidl.plugins.Dask.Scheduler.Builder, flyteidl.plugins.Dask.SchedulerOrBuilder> schedulerBuilder_; /** *
-       * Spec for the job pod
+       * Spec for the scheduler pod.
        * 
* - * .flyteidl.plugins.JobPodSpec jobPodSpec = 1; + * .flyteidl.plugins.Scheduler scheduler = 1; */ - public boolean hasJobPodSpec() { - return jobPodSpecBuilder_ != null || jobPodSpec_ != null; + public boolean hasScheduler() { + return schedulerBuilder_ != null || scheduler_ != null; } /** *
-       * Spec for the job pod
+       * Spec for the scheduler pod.
        * 
* - * .flyteidl.plugins.JobPodSpec jobPodSpec = 1; + * .flyteidl.plugins.Scheduler scheduler = 1; */ - public flyteidl.plugins.Dask.JobPodSpec getJobPodSpec() { - if (jobPodSpecBuilder_ == null) { - return jobPodSpec_ == null ? flyteidl.plugins.Dask.JobPodSpec.getDefaultInstance() : jobPodSpec_; + public flyteidl.plugins.Dask.Scheduler getScheduler() { + if (schedulerBuilder_ == null) { + return scheduler_ == null ? flyteidl.plugins.Dask.Scheduler.getDefaultInstance() : scheduler_; } else { - return jobPodSpecBuilder_.getMessage(); + return schedulerBuilder_.getMessage(); } } /** *
-       * Spec for the job pod
+       * Spec for the scheduler pod.
        * 
* - * .flyteidl.plugins.JobPodSpec jobPodSpec = 1; + * .flyteidl.plugins.Scheduler scheduler = 1; */ - public Builder setJobPodSpec(flyteidl.plugins.Dask.JobPodSpec value) { - if (jobPodSpecBuilder_ == null) { + public Builder setScheduler(flyteidl.plugins.Dask.Scheduler value) { + if (schedulerBuilder_ == null) { if (value == null) { throw new NullPointerException(); } - jobPodSpec_ = value; + scheduler_ = value; onChanged(); } else { - jobPodSpecBuilder_.setMessage(value); + schedulerBuilder_.setMessage(value); } return this; } /** *
-       * Spec for the job pod
+       * Spec for the scheduler pod.
        * 
* - * .flyteidl.plugins.JobPodSpec jobPodSpec = 1; + * .flyteidl.plugins.Scheduler scheduler = 1; */ - public Builder setJobPodSpec( - flyteidl.plugins.Dask.JobPodSpec.Builder builderForValue) { - if (jobPodSpecBuilder_ == null) { - jobPodSpec_ = builderForValue.build(); + public Builder setScheduler( + flyteidl.plugins.Dask.Scheduler.Builder builderForValue) { + if (schedulerBuilder_ == null) { + scheduler_ = builderForValue.build(); onChanged(); } else { - jobPodSpecBuilder_.setMessage(builderForValue.build()); + schedulerBuilder_.setMessage(builderForValue.build()); } return this; } /** *
-       * Spec for the job pod
+       * Spec for the scheduler pod.
        * 
* - * .flyteidl.plugins.JobPodSpec jobPodSpec = 1; + * .flyteidl.plugins.Scheduler scheduler = 1; */ - public Builder mergeJobPodSpec(flyteidl.plugins.Dask.JobPodSpec value) { - if (jobPodSpecBuilder_ == null) { - if (jobPodSpec_ != null) { - jobPodSpec_ = - flyteidl.plugins.Dask.JobPodSpec.newBuilder(jobPodSpec_).mergeFrom(value).buildPartial(); + public Builder mergeScheduler(flyteidl.plugins.Dask.Scheduler value) { + if (schedulerBuilder_ == null) { + if (scheduler_ != null) { + scheduler_ = + flyteidl.plugins.Dask.Scheduler.newBuilder(scheduler_).mergeFrom(value).buildPartial(); } else { - jobPodSpec_ = value; + scheduler_ = value; } onChanged(); } else { - jobPodSpecBuilder_.mergeFrom(value); + schedulerBuilder_.mergeFrom(value); } return this; } /** *
-       * Spec for the job pod
+       * Spec for the scheduler pod.
        * 
* - * .flyteidl.plugins.JobPodSpec jobPodSpec = 1; + * .flyteidl.plugins.Scheduler scheduler = 1; */ - public Builder clearJobPodSpec() { - if (jobPodSpecBuilder_ == null) { - jobPodSpec_ = null; + public Builder clearScheduler() { + if (schedulerBuilder_ == null) { + scheduler_ = null; onChanged(); } else { - jobPodSpec_ = null; - jobPodSpecBuilder_ = null; + scheduler_ = null; + schedulerBuilder_ = null; } return this; } /** *
-       * Spec for the job pod
+       * Spec for the scheduler pod.
        * 
* - * .flyteidl.plugins.JobPodSpec jobPodSpec = 1; + * .flyteidl.plugins.Scheduler scheduler = 1; */ - public flyteidl.plugins.Dask.JobPodSpec.Builder getJobPodSpecBuilder() { + public flyteidl.plugins.Dask.Scheduler.Builder getSchedulerBuilder() { onChanged(); - return getJobPodSpecFieldBuilder().getBuilder(); + return getSchedulerFieldBuilder().getBuilder(); } /** *
-       * Spec for the job pod
+       * Spec for the scheduler pod.
        * 
* - * .flyteidl.plugins.JobPodSpec jobPodSpec = 1; + * .flyteidl.plugins.Scheduler scheduler = 1; */ - public flyteidl.plugins.Dask.JobPodSpecOrBuilder getJobPodSpecOrBuilder() { - if (jobPodSpecBuilder_ != null) { - return jobPodSpecBuilder_.getMessageOrBuilder(); + public flyteidl.plugins.Dask.SchedulerOrBuilder getSchedulerOrBuilder() { + if (schedulerBuilder_ != null) { + return schedulerBuilder_.getMessageOrBuilder(); } else { - return jobPodSpec_ == null ? - flyteidl.plugins.Dask.JobPodSpec.getDefaultInstance() : jobPodSpec_; + return scheduler_ == null ? + flyteidl.plugins.Dask.Scheduler.getDefaultInstance() : scheduler_; } } /** *
-       * Spec for the job pod
+       * Spec for the scheduler pod.
        * 
* - * .flyteidl.plugins.JobPodSpec jobPodSpec = 1; + * .flyteidl.plugins.Scheduler scheduler = 1; */ private com.google.protobuf.SingleFieldBuilderV3< - flyteidl.plugins.Dask.JobPodSpec, flyteidl.plugins.Dask.JobPodSpec.Builder, flyteidl.plugins.Dask.JobPodSpecOrBuilder> - getJobPodSpecFieldBuilder() { - if (jobPodSpecBuilder_ == null) { - jobPodSpecBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< - flyteidl.plugins.Dask.JobPodSpec, flyteidl.plugins.Dask.JobPodSpec.Builder, flyteidl.plugins.Dask.JobPodSpecOrBuilder>( - getJobPodSpec(), + flyteidl.plugins.Dask.Scheduler, flyteidl.plugins.Dask.Scheduler.Builder, flyteidl.plugins.Dask.SchedulerOrBuilder> + getSchedulerFieldBuilder() { + if (schedulerBuilder_ == null) { + schedulerBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + flyteidl.plugins.Dask.Scheduler, flyteidl.plugins.Dask.Scheduler.Builder, flyteidl.plugins.Dask.SchedulerOrBuilder>( + getScheduler(), getParentForChildren(), isClean()); - jobPodSpec_ = null; + scheduler_ = null; } - return jobPodSpecBuilder_; + return schedulerBuilder_; } - private flyteidl.plugins.Dask.DaskCluster cluster_; + private flyteidl.plugins.Dask.WorkerGroup workers_; private com.google.protobuf.SingleFieldBuilderV3< - flyteidl.plugins.Dask.DaskCluster, flyteidl.plugins.Dask.DaskCluster.Builder, flyteidl.plugins.Dask.DaskClusterOrBuilder> clusterBuilder_; + flyteidl.plugins.Dask.WorkerGroup, flyteidl.plugins.Dask.WorkerGroup.Builder, flyteidl.plugins.Dask.WorkerGroupOrBuilder> workersBuilder_; /** *
-       * Cluster
+       * Spec of the default worker group.
        * 
* - * .flyteidl.plugins.DaskCluster cluster = 2; + * .flyteidl.plugins.WorkerGroup workers = 2; */ - public boolean hasCluster() { - return clusterBuilder_ != null || cluster_ != null; + public boolean hasWorkers() { + return workersBuilder_ != null || workers_ != null; } /** *
-       * Cluster
+       * Spec of the default worker group.
        * 
* - * .flyteidl.plugins.DaskCluster cluster = 2; + * .flyteidl.plugins.WorkerGroup workers = 2; */ - public flyteidl.plugins.Dask.DaskCluster getCluster() { - if (clusterBuilder_ == null) { - return cluster_ == null ? flyteidl.plugins.Dask.DaskCluster.getDefaultInstance() : cluster_; + public flyteidl.plugins.Dask.WorkerGroup getWorkers() { + if (workersBuilder_ == null) { + return workers_ == null ? flyteidl.plugins.Dask.WorkerGroup.getDefaultInstance() : workers_; } else { - return clusterBuilder_.getMessage(); + return workersBuilder_.getMessage(); } } /** *
-       * Cluster
+       * Spec of the default worker group.
        * 
* - * .flyteidl.plugins.DaskCluster cluster = 2; + * .flyteidl.plugins.WorkerGroup workers = 2; */ - public Builder setCluster(flyteidl.plugins.Dask.DaskCluster value) { - if (clusterBuilder_ == null) { + public Builder setWorkers(flyteidl.plugins.Dask.WorkerGroup value) { + if (workersBuilder_ == null) { if (value == null) { throw new NullPointerException(); } - cluster_ = value; + workers_ = value; onChanged(); } else { - clusterBuilder_.setMessage(value); + workersBuilder_.setMessage(value); } return this; } /** *
-       * Cluster
+       * Spec of the default worker group.
        * 
* - * .flyteidl.plugins.DaskCluster cluster = 2; + * .flyteidl.plugins.WorkerGroup workers = 2; */ - public Builder setCluster( - flyteidl.plugins.Dask.DaskCluster.Builder builderForValue) { - if (clusterBuilder_ == null) { - cluster_ = builderForValue.build(); + public Builder setWorkers( + flyteidl.plugins.Dask.WorkerGroup.Builder builderForValue) { + if (workersBuilder_ == null) { + workers_ = builderForValue.build(); onChanged(); } else { - clusterBuilder_.setMessage(builderForValue.build()); + workersBuilder_.setMessage(builderForValue.build()); } return this; } /** *
-       * Cluster
+       * Spec of the default worker group.
        * 
* - * .flyteidl.plugins.DaskCluster cluster = 2; + * .flyteidl.plugins.WorkerGroup workers = 2; */ - public Builder mergeCluster(flyteidl.plugins.Dask.DaskCluster value) { - if (clusterBuilder_ == null) { - if (cluster_ != null) { - cluster_ = - flyteidl.plugins.Dask.DaskCluster.newBuilder(cluster_).mergeFrom(value).buildPartial(); + public Builder mergeWorkers(flyteidl.plugins.Dask.WorkerGroup value) { + if (workersBuilder_ == null) { + if (workers_ != null) { + workers_ = + flyteidl.plugins.Dask.WorkerGroup.newBuilder(workers_).mergeFrom(value).buildPartial(); } else { - cluster_ = value; + workers_ = value; } onChanged(); } else { - clusterBuilder_.mergeFrom(value); + workersBuilder_.mergeFrom(value); } return this; } /** *
-       * Cluster
+       * Spec of the default worker group.
        * 
* - * .flyteidl.plugins.DaskCluster cluster = 2; + * .flyteidl.plugins.WorkerGroup workers = 2; */ - public Builder clearCluster() { - if (clusterBuilder_ == null) { - cluster_ = null; + public Builder clearWorkers() { + if (workersBuilder_ == null) { + workers_ = null; onChanged(); } else { - cluster_ = null; - clusterBuilder_ = null; + workers_ = null; + workersBuilder_ = null; } return this; } /** *
-       * Cluster
+       * Spec of the default worker group.
        * 
* - * .flyteidl.plugins.DaskCluster cluster = 2; + * .flyteidl.plugins.WorkerGroup workers = 2; */ - public flyteidl.plugins.Dask.DaskCluster.Builder getClusterBuilder() { + public flyteidl.plugins.Dask.WorkerGroup.Builder getWorkersBuilder() { onChanged(); - return getClusterFieldBuilder().getBuilder(); + return getWorkersFieldBuilder().getBuilder(); } /** *
-       * Cluster
+       * Spec of the default worker group.
        * 
* - * .flyteidl.plugins.DaskCluster cluster = 2; + * .flyteidl.plugins.WorkerGroup workers = 2; */ - public flyteidl.plugins.Dask.DaskClusterOrBuilder getClusterOrBuilder() { - if (clusterBuilder_ != null) { - return clusterBuilder_.getMessageOrBuilder(); + public flyteidl.plugins.Dask.WorkerGroupOrBuilder getWorkersOrBuilder() { + if (workersBuilder_ != null) { + return workersBuilder_.getMessageOrBuilder(); } else { - return cluster_ == null ? - flyteidl.plugins.Dask.DaskCluster.getDefaultInstance() : cluster_; + return workers_ == null ? + flyteidl.plugins.Dask.WorkerGroup.getDefaultInstance() : workers_; } } /** *
-       * Cluster
+       * Spec of the default worker group.
        * 
* - * .flyteidl.plugins.DaskCluster cluster = 2; + * .flyteidl.plugins.WorkerGroup workers = 2; */ private com.google.protobuf.SingleFieldBuilderV3< - flyteidl.plugins.Dask.DaskCluster, flyteidl.plugins.Dask.DaskCluster.Builder, flyteidl.plugins.Dask.DaskClusterOrBuilder> - getClusterFieldBuilder() { - if (clusterBuilder_ == null) { - clusterBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< - flyteidl.plugins.Dask.DaskCluster, flyteidl.plugins.Dask.DaskCluster.Builder, flyteidl.plugins.Dask.DaskClusterOrBuilder>( - getCluster(), + flyteidl.plugins.Dask.WorkerGroup, flyteidl.plugins.Dask.WorkerGroup.Builder, flyteidl.plugins.Dask.WorkerGroupOrBuilder> + getWorkersFieldBuilder() { + if (workersBuilder_ == null) { + workersBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + flyteidl.plugins.Dask.WorkerGroup, flyteidl.plugins.Dask.WorkerGroup.Builder, flyteidl.plugins.Dask.WorkerGroupOrBuilder>( + getWorkers(), getParentForChildren(), isClean()); - cluster_ = null; + workers_ = null; } - return clusterBuilder_; + return workersBuilder_; } @java.lang.Override public final Builder setUnknownFields( @@ -942,8 +942,8 @@ public flyteidl.plugins.Dask.DaskJob getDefaultInstanceForType() { } - public interface JobPodSpecOrBuilder extends - // @@protoc_insertion_point(interface_extends:flyteidl.plugins.JobPodSpec) + public interface SchedulerOrBuilder extends + // @@protoc_insertion_point(interface_extends:flyteidl.plugins.Scheduler) com.google.protobuf.MessageOrBuilder { /** @@ -966,7 +966,7 @@ public interface JobPodSpecOrBuilder extends /** *
-     * Resources assigned to the job pod.
+     * Resources assigned to the scheduler pod.
      * 
* * .flyteidl.core.Resources resources = 2; @@ -974,7 +974,7 @@ public interface JobPodSpecOrBuilder extends boolean hasResources(); /** *
-     * Resources assigned to the job pod.
+     * Resources assigned to the scheduler pod.
      * 
* * .flyteidl.core.Resources resources = 2; @@ -982,7 +982,7 @@ public interface JobPodSpecOrBuilder extends flyteidl.core.Tasks.Resources getResources(); /** *
-     * Resources assigned to the job pod.
+     * Resources assigned to the scheduler pod.
      * 
* * .flyteidl.core.Resources resources = 2; @@ -991,21 +991,21 @@ public interface JobPodSpecOrBuilder extends } /** *
-   * Specification for the job pod
+   * Specification for the scheduler pod.
    * 
* - * Protobuf type {@code flyteidl.plugins.JobPodSpec} + * Protobuf type {@code flyteidl.plugins.Scheduler} */ - public static final class JobPodSpec extends + public static final class Scheduler extends com.google.protobuf.GeneratedMessageV3 implements - // @@protoc_insertion_point(message_implements:flyteidl.plugins.JobPodSpec) - JobPodSpecOrBuilder { + // @@protoc_insertion_point(message_implements:flyteidl.plugins.Scheduler) + SchedulerOrBuilder { private static final long serialVersionUID = 0L; - // Use JobPodSpec.newBuilder() to construct. - private JobPodSpec(com.google.protobuf.GeneratedMessageV3.Builder builder) { + // Use Scheduler.newBuilder() to construct. + private Scheduler(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } - private JobPodSpec() { + private Scheduler() { image_ = ""; } @@ -1014,7 +1014,7 @@ private JobPodSpec() { getUnknownFields() { return this.unknownFields; } - private JobPodSpec( + private Scheduler( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { @@ -1073,15 +1073,15 @@ private JobPodSpec( } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return flyteidl.plugins.Dask.internal_static_flyteidl_plugins_JobPodSpec_descriptor; + return flyteidl.plugins.Dask.internal_static_flyteidl_plugins_Scheduler_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { - return flyteidl.plugins.Dask.internal_static_flyteidl_plugins_JobPodSpec_fieldAccessorTable + return flyteidl.plugins.Dask.internal_static_flyteidl_plugins_Scheduler_fieldAccessorTable .ensureFieldAccessorsInitialized( - flyteidl.plugins.Dask.JobPodSpec.class, flyteidl.plugins.Dask.JobPodSpec.Builder.class); + flyteidl.plugins.Dask.Scheduler.class, flyteidl.plugins.Dask.Scheduler.Builder.class); } public static final int IMAGE_FIELD_NUMBER = 1; @@ -1130,7 +1130,7 @@ public java.lang.String getImage() { private flyteidl.core.Tasks.Resources resources_; /** *
-     * Resources assigned to the job pod.
+     * Resources assigned to the scheduler pod.
      * 
* * .flyteidl.core.Resources resources = 2; @@ -1140,7 +1140,7 @@ public boolean hasResources() { } /** *
-     * Resources assigned to the job pod.
+     * Resources assigned to the scheduler pod.
      * 
* * .flyteidl.core.Resources resources = 2; @@ -1150,7 +1150,7 @@ public flyteidl.core.Tasks.Resources getResources() { } /** *
-     * Resources assigned to the job pod.
+     * Resources assigned to the scheduler pod.
      * 
* * .flyteidl.core.Resources resources = 2; @@ -1205,10 +1205,10 @@ public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } - if (!(obj instanceof flyteidl.plugins.Dask.JobPodSpec)) { + if (!(obj instanceof flyteidl.plugins.Dask.Scheduler)) { return super.equals(obj); } - flyteidl.plugins.Dask.JobPodSpec other = (flyteidl.plugins.Dask.JobPodSpec) obj; + flyteidl.plugins.Dask.Scheduler other = (flyteidl.plugins.Dask.Scheduler) obj; if (!getImage() .equals(other.getImage())) return false; @@ -1239,69 +1239,69 @@ public int hashCode() { return hash; } - public static flyteidl.plugins.Dask.JobPodSpec parseFrom( + public static flyteidl.plugins.Dask.Scheduler parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } - public static flyteidl.plugins.Dask.JobPodSpec parseFrom( + public static flyteidl.plugins.Dask.Scheduler parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } - public static flyteidl.plugins.Dask.JobPodSpec parseFrom( + public static flyteidl.plugins.Dask.Scheduler parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } - public static flyteidl.plugins.Dask.JobPodSpec parseFrom( + public static flyteidl.plugins.Dask.Scheduler parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } - public static flyteidl.plugins.Dask.JobPodSpec parseFrom(byte[] data) + public static flyteidl.plugins.Dask.Scheduler parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } - public static flyteidl.plugins.Dask.JobPodSpec parseFrom( + public static flyteidl.plugins.Dask.Scheduler parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } - public static flyteidl.plugins.Dask.JobPodSpec parseFrom(java.io.InputStream input) + public static flyteidl.plugins.Dask.Scheduler parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } - public static flyteidl.plugins.Dask.JobPodSpec parseFrom( + public static flyteidl.plugins.Dask.Scheduler parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } - public static flyteidl.plugins.Dask.JobPodSpec parseDelimitedFrom(java.io.InputStream input) + public static flyteidl.plugins.Dask.Scheduler parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } - public static flyteidl.plugins.Dask.JobPodSpec parseDelimitedFrom( + public static flyteidl.plugins.Dask.Scheduler parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } - public static flyteidl.plugins.Dask.JobPodSpec parseFrom( + public static flyteidl.plugins.Dask.Scheduler parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } - public static flyteidl.plugins.Dask.JobPodSpec parseFrom( + public static flyteidl.plugins.Dask.Scheduler parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -1314,7 +1314,7 @@ public static flyteidl.plugins.Dask.JobPodSpec parseFrom( public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } - public static Builder newBuilder(flyteidl.plugins.Dask.JobPodSpec prototype) { + public static Builder newBuilder(flyteidl.plugins.Dask.Scheduler prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override @@ -1331,29 +1331,29 @@ protected Builder newBuilderForType( } /** *
-     * Specification for the job pod
+     * Specification for the scheduler pod.
      * 
* - * Protobuf type {@code flyteidl.plugins.JobPodSpec} + * Protobuf type {@code flyteidl.plugins.Scheduler} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder implements - // @@protoc_insertion_point(builder_implements:flyteidl.plugins.JobPodSpec) - flyteidl.plugins.Dask.JobPodSpecOrBuilder { + // @@protoc_insertion_point(builder_implements:flyteidl.plugins.Scheduler) + flyteidl.plugins.Dask.SchedulerOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return flyteidl.plugins.Dask.internal_static_flyteidl_plugins_JobPodSpec_descriptor; + return flyteidl.plugins.Dask.internal_static_flyteidl_plugins_Scheduler_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { - return flyteidl.plugins.Dask.internal_static_flyteidl_plugins_JobPodSpec_fieldAccessorTable + return flyteidl.plugins.Dask.internal_static_flyteidl_plugins_Scheduler_fieldAccessorTable .ensureFieldAccessorsInitialized( - flyteidl.plugins.Dask.JobPodSpec.class, flyteidl.plugins.Dask.JobPodSpec.Builder.class); + flyteidl.plugins.Dask.Scheduler.class, flyteidl.plugins.Dask.Scheduler.Builder.class); } - // Construct using flyteidl.plugins.Dask.JobPodSpec.newBuilder() + // Construct using flyteidl.plugins.Dask.Scheduler.newBuilder() private Builder() { maybeForceBuilderInitialization(); } @@ -1385,17 +1385,17 @@ public Builder clear() { @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return flyteidl.plugins.Dask.internal_static_flyteidl_plugins_JobPodSpec_descriptor; + return flyteidl.plugins.Dask.internal_static_flyteidl_plugins_Scheduler_descriptor; } @java.lang.Override - public flyteidl.plugins.Dask.JobPodSpec getDefaultInstanceForType() { - return flyteidl.plugins.Dask.JobPodSpec.getDefaultInstance(); + public flyteidl.plugins.Dask.Scheduler getDefaultInstanceForType() { + return flyteidl.plugins.Dask.Scheduler.getDefaultInstance(); } @java.lang.Override - public flyteidl.plugins.Dask.JobPodSpec build() { - flyteidl.plugins.Dask.JobPodSpec result = buildPartial(); + public flyteidl.plugins.Dask.Scheduler build() { + flyteidl.plugins.Dask.Scheduler result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } @@ -1403,8 +1403,8 @@ public flyteidl.plugins.Dask.JobPodSpec build() { } @java.lang.Override - public flyteidl.plugins.Dask.JobPodSpec buildPartial() { - flyteidl.plugins.Dask.JobPodSpec result = new flyteidl.plugins.Dask.JobPodSpec(this); + public flyteidl.plugins.Dask.Scheduler buildPartial() { + flyteidl.plugins.Dask.Scheduler result = new flyteidl.plugins.Dask.Scheduler(this); result.image_ = image_; if (resourcesBuilder_ == null) { result.resources_ = resources_; @@ -1449,16 +1449,16 @@ public Builder addRepeatedField( } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof flyteidl.plugins.Dask.JobPodSpec) { - return mergeFrom((flyteidl.plugins.Dask.JobPodSpec)other); + if (other instanceof flyteidl.plugins.Dask.Scheduler) { + return mergeFrom((flyteidl.plugins.Dask.Scheduler)other); } else { super.mergeFrom(other); return this; } } - public Builder mergeFrom(flyteidl.plugins.Dask.JobPodSpec other) { - if (other == flyteidl.plugins.Dask.JobPodSpec.getDefaultInstance()) return this; + public Builder mergeFrom(flyteidl.plugins.Dask.Scheduler other) { + if (other == flyteidl.plugins.Dask.Scheduler.getDefaultInstance()) return this; if (!other.getImage().isEmpty()) { image_ = other.image_; onChanged(); @@ -1481,11 +1481,11 @@ public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - flyteidl.plugins.Dask.JobPodSpec parsedMessage = null; + flyteidl.plugins.Dask.Scheduler parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (flyteidl.plugins.Dask.JobPodSpec) e.getUnfinishedMessage(); + parsedMessage = (flyteidl.plugins.Dask.Scheduler) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { @@ -1589,7 +1589,7 @@ public Builder setImageBytes( flyteidl.core.Tasks.Resources, flyteidl.core.Tasks.Resources.Builder, flyteidl.core.Tasks.ResourcesOrBuilder> resourcesBuilder_; /** *
-       * Resources assigned to the job pod.
+       * Resources assigned to the scheduler pod.
        * 
* * .flyteidl.core.Resources resources = 2; @@ -1599,7 +1599,7 @@ public boolean hasResources() { } /** *
-       * Resources assigned to the job pod.
+       * Resources assigned to the scheduler pod.
        * 
* * .flyteidl.core.Resources resources = 2; @@ -1613,7 +1613,7 @@ public flyteidl.core.Tasks.Resources getResources() { } /** *
-       * Resources assigned to the job pod.
+       * Resources assigned to the scheduler pod.
        * 
* * .flyteidl.core.Resources resources = 2; @@ -1633,7 +1633,7 @@ public Builder setResources(flyteidl.core.Tasks.Resources value) { } /** *
-       * Resources assigned to the job pod.
+       * Resources assigned to the scheduler pod.
        * 
* * .flyteidl.core.Resources resources = 2; @@ -1651,7 +1651,7 @@ public Builder setResources( } /** *
-       * Resources assigned to the job pod.
+       * Resources assigned to the scheduler pod.
        * 
* * .flyteidl.core.Resources resources = 2; @@ -1673,7 +1673,7 @@ public Builder mergeResources(flyteidl.core.Tasks.Resources value) { } /** *
-       * Resources assigned to the job pod.
+       * Resources assigned to the scheduler pod.
        * 
* * .flyteidl.core.Resources resources = 2; @@ -1691,7 +1691,7 @@ public Builder clearResources() { } /** *
-       * Resources assigned to the job pod.
+       * Resources assigned to the scheduler pod.
        * 
* * .flyteidl.core.Resources resources = 2; @@ -1703,7 +1703,7 @@ public flyteidl.core.Tasks.Resources.Builder getResourcesBuilder() { } /** *
-       * Resources assigned to the job pod.
+       * Resources assigned to the scheduler pod.
        * 
* * .flyteidl.core.Resources resources = 2; @@ -1718,7 +1718,7 @@ public flyteidl.core.Tasks.ResourcesOrBuilder getResourcesOrBuilder() { } /** *
-       * Resources assigned to the job pod.
+       * Resources assigned to the scheduler pod.
        * 
* * .flyteidl.core.Resources resources = 2; @@ -1749,82 +1749,80 @@ public final Builder mergeUnknownFields( } - // @@protoc_insertion_point(builder_scope:flyteidl.plugins.JobPodSpec) + // @@protoc_insertion_point(builder_scope:flyteidl.plugins.Scheduler) } - // @@protoc_insertion_point(class_scope:flyteidl.plugins.JobPodSpec) - private static final flyteidl.plugins.Dask.JobPodSpec DEFAULT_INSTANCE; + // @@protoc_insertion_point(class_scope:flyteidl.plugins.Scheduler) + private static final flyteidl.plugins.Dask.Scheduler DEFAULT_INSTANCE; static { - DEFAULT_INSTANCE = new flyteidl.plugins.Dask.JobPodSpec(); + DEFAULT_INSTANCE = new flyteidl.plugins.Dask.Scheduler(); } - public static flyteidl.plugins.Dask.JobPodSpec getDefaultInstance() { + public static flyteidl.plugins.Dask.Scheduler getDefaultInstance() { return DEFAULT_INSTANCE; } - private static final com.google.protobuf.Parser - PARSER = new com.google.protobuf.AbstractParser() { + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { @java.lang.Override - public JobPodSpec parsePartialFrom( + public Scheduler parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return new JobPodSpec(input, extensionRegistry); + return new Scheduler(input, extensionRegistry); } }; - public static com.google.protobuf.Parser parser() { + public static com.google.protobuf.Parser parser() { return PARSER; } @java.lang.Override - public com.google.protobuf.Parser getParserForType() { + public com.google.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override - public flyteidl.plugins.Dask.JobPodSpec getDefaultInstanceForType() { + public flyteidl.plugins.Dask.Scheduler getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } - public interface DaskClusterOrBuilder extends - // @@protoc_insertion_point(interface_extends:flyteidl.plugins.DaskCluster) + public interface WorkerGroupOrBuilder extends + // @@protoc_insertion_point(interface_extends:flyteidl.plugins.WorkerGroup) com.google.protobuf.MessageOrBuilder { /** *
-     * Optional image to use for the scheduler as well as the default worker group. If unset, will use 
-     * the default image.
+     * Number of workers in the group.
      * 
* - * string image = 1; + * uint32 number_of_workers = 1; */ - java.lang.String getImage(); + int getNumberOfWorkers(); + /** *
-     * Optional image to use for the scheduler as well as the default worker group. If unset, will use 
-     * the default image.
+     * Optional image to use for the pods of the worker group. If unset, will use the default image.
      * 
* - * string image = 1; + * string image = 2; */ - com.google.protobuf.ByteString - getImageBytes(); - + java.lang.String getImage(); /** *
-     * Number of workers in the default worker group
+     * Optional image to use for the pods of the worker group. If unset, will use the default image.
      * 
* - * int32 nWorkers = 2; + * string image = 2; */ - int getNWorkers(); + com.google.protobuf.ByteString + getImageBytes(); /** *
-     * Resources assigned to the scheduler as well as all pods of the default worker group.
+     * Resources assigned to the all pods of the worker group.
      * As per https://kubernetes.dask.org/en/latest/kubecluster.html?highlight=limit#best-practices 
      * it is advised to only set limits. If requests are not explicitly set, the plugin will make
      * sure to set requests==limits.
@@ -1836,7 +1834,7 @@ public interface DaskClusterOrBuilder extends
     boolean hasResources();
     /**
      * 
-     * Resources assigned to the scheduler as well as all pods of the default worker group.
+     * Resources assigned to the all pods of the worker group.
      * As per https://kubernetes.dask.org/en/latest/kubecluster.html?highlight=limit#best-practices 
      * it is advised to only set limits. If requests are not explicitly set, the plugin will make
      * sure to set requests==limits.
@@ -1848,7 +1846,7 @@ public interface DaskClusterOrBuilder extends
     flyteidl.core.Tasks.Resources getResources();
     /**
      * 
-     * Resources assigned to the scheduler as well as all pods of the default worker group.
+     * Resources assigned to the all pods of the worker group.
      * As per https://kubernetes.dask.org/en/latest/kubecluster.html?highlight=limit#best-practices 
      * it is advised to only set limits. If requests are not explicitly set, the plugin will make
      * sure to set requests==limits.
@@ -1860,18 +1858,18 @@ public interface DaskClusterOrBuilder extends
     flyteidl.core.Tasks.ResourcesOrBuilder getResourcesOrBuilder();
   }
   /**
-   * Protobuf type {@code flyteidl.plugins.DaskCluster}
+   * Protobuf type {@code flyteidl.plugins.WorkerGroup}
    */
-  public  static final class DaskCluster extends
+  public  static final class WorkerGroup extends
       com.google.protobuf.GeneratedMessageV3 implements
-      // @@protoc_insertion_point(message_implements:flyteidl.plugins.DaskCluster)
-      DaskClusterOrBuilder {
+      // @@protoc_insertion_point(message_implements:flyteidl.plugins.WorkerGroup)
+      WorkerGroupOrBuilder {
   private static final long serialVersionUID = 0L;
-    // Use DaskCluster.newBuilder() to construct.
-    private DaskCluster(com.google.protobuf.GeneratedMessageV3.Builder builder) {
+    // Use WorkerGroup.newBuilder() to construct.
+    private WorkerGroup(com.google.protobuf.GeneratedMessageV3.Builder builder) {
       super(builder);
     }
-    private DaskCluster() {
+    private WorkerGroup() {
       image_ = "";
     }
 
@@ -1880,7 +1878,7 @@ private DaskCluster() {
     getUnknownFields() {
       return this.unknownFields;
     }
-    private DaskCluster(
+    private WorkerGroup(
         com.google.protobuf.CodedInputStream input,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws com.google.protobuf.InvalidProtocolBufferException {
@@ -1899,15 +1897,15 @@ private DaskCluster(
             case 0:
               done = true;
               break;
-            case 10: {
-              java.lang.String s = input.readStringRequireUtf8();
+            case 8: {
 
-              image_ = s;
+              numberOfWorkers_ = input.readUInt32();
               break;
             }
-            case 16: {
+            case 18: {
+              java.lang.String s = input.readStringRequireUtf8();
 
-              nWorkers_ = input.readInt32();
+              image_ = s;
               break;
             }
             case 26: {
@@ -1944,26 +1942,38 @@ private DaskCluster(
     }
     public static final com.google.protobuf.Descriptors.Descriptor
         getDescriptor() {
-      return flyteidl.plugins.Dask.internal_static_flyteidl_plugins_DaskCluster_descriptor;
+      return flyteidl.plugins.Dask.internal_static_flyteidl_plugins_WorkerGroup_descriptor;
     }
 
     @java.lang.Override
     protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
         internalGetFieldAccessorTable() {
-      return flyteidl.plugins.Dask.internal_static_flyteidl_plugins_DaskCluster_fieldAccessorTable
+      return flyteidl.plugins.Dask.internal_static_flyteidl_plugins_WorkerGroup_fieldAccessorTable
           .ensureFieldAccessorsInitialized(
-              flyteidl.plugins.Dask.DaskCluster.class, flyteidl.plugins.Dask.DaskCluster.Builder.class);
+              flyteidl.plugins.Dask.WorkerGroup.class, flyteidl.plugins.Dask.WorkerGroup.Builder.class);
     }
 
-    public static final int IMAGE_FIELD_NUMBER = 1;
+    public static final int NUMBER_OF_WORKERS_FIELD_NUMBER = 1;
+    private int numberOfWorkers_;
+    /**
+     * 
+     * Number of workers in the group.
+     * 
+ * + * uint32 number_of_workers = 1; + */ + public int getNumberOfWorkers() { + return numberOfWorkers_; + } + + public static final int IMAGE_FIELD_NUMBER = 2; private volatile java.lang.Object image_; /** *
-     * Optional image to use for the scheduler as well as the default worker group. If unset, will use 
-     * the default image.
+     * Optional image to use for the pods of the worker group. If unset, will use the default image.
      * 
* - * string image = 1; + * string image = 2; */ public java.lang.String getImage() { java.lang.Object ref = image_; @@ -1979,11 +1989,10 @@ public java.lang.String getImage() { } /** *
-     * Optional image to use for the scheduler as well as the default worker group. If unset, will use 
-     * the default image.
+     * Optional image to use for the pods of the worker group. If unset, will use the default image.
      * 
* - * string image = 1; + * string image = 2; */ public com.google.protobuf.ByteString getImageBytes() { @@ -1999,24 +2008,11 @@ public java.lang.String getImage() { } } - public static final int NWORKERS_FIELD_NUMBER = 2; - private int nWorkers_; - /** - *
-     * Number of workers in the default worker group
-     * 
- * - * int32 nWorkers = 2; - */ - public int getNWorkers() { - return nWorkers_; - } - public static final int RESOURCES_FIELD_NUMBER = 3; private flyteidl.core.Tasks.Resources resources_; /** *
-     * Resources assigned to the scheduler as well as all pods of the default worker group.
+     * Resources assigned to the all pods of the worker group.
      * As per https://kubernetes.dask.org/en/latest/kubecluster.html?highlight=limit#best-practices 
      * it is advised to only set limits. If requests are not explicitly set, the plugin will make
      * sure to set requests==limits.
@@ -2030,7 +2026,7 @@ public boolean hasResources() {
     }
     /**
      * 
-     * Resources assigned to the scheduler as well as all pods of the default worker group.
+     * Resources assigned to the all pods of the worker group.
      * As per https://kubernetes.dask.org/en/latest/kubecluster.html?highlight=limit#best-practices 
      * it is advised to only set limits. If requests are not explicitly set, the plugin will make
      * sure to set requests==limits.
@@ -2044,7 +2040,7 @@ public flyteidl.core.Tasks.Resources getResources() {
     }
     /**
      * 
-     * Resources assigned to the scheduler as well as all pods of the default worker group.
+     * Resources assigned to the all pods of the worker group.
      * As per https://kubernetes.dask.org/en/latest/kubecluster.html?highlight=limit#best-practices 
      * it is advised to only set limits. If requests are not explicitly set, the plugin will make
      * sure to set requests==limits.
@@ -2071,11 +2067,11 @@ public final boolean isInitialized() {
     @java.lang.Override
     public void writeTo(com.google.protobuf.CodedOutputStream output)
                         throws java.io.IOException {
-      if (!getImageBytes().isEmpty()) {
-        com.google.protobuf.GeneratedMessageV3.writeString(output, 1, image_);
+      if (numberOfWorkers_ != 0) {
+        output.writeUInt32(1, numberOfWorkers_);
       }
-      if (nWorkers_ != 0) {
-        output.writeInt32(2, nWorkers_);
+      if (!getImageBytes().isEmpty()) {
+        com.google.protobuf.GeneratedMessageV3.writeString(output, 2, image_);
       }
       if (resources_ != null) {
         output.writeMessage(3, getResources());
@@ -2089,12 +2085,12 @@ public int getSerializedSize() {
       if (size != -1) return size;
 
       size = 0;
-      if (!getImageBytes().isEmpty()) {
-        size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, image_);
-      }
-      if (nWorkers_ != 0) {
+      if (numberOfWorkers_ != 0) {
         size += com.google.protobuf.CodedOutputStream
-          .computeInt32Size(2, nWorkers_);
+          .computeUInt32Size(1, numberOfWorkers_);
+      }
+      if (!getImageBytes().isEmpty()) {
+        size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, image_);
       }
       if (resources_ != null) {
         size += com.google.protobuf.CodedOutputStream
@@ -2110,15 +2106,15 @@ public boolean equals(final java.lang.Object obj) {
       if (obj == this) {
        return true;
       }
-      if (!(obj instanceof flyteidl.plugins.Dask.DaskCluster)) {
+      if (!(obj instanceof flyteidl.plugins.Dask.WorkerGroup)) {
         return super.equals(obj);
       }
-      flyteidl.plugins.Dask.DaskCluster other = (flyteidl.plugins.Dask.DaskCluster) obj;
+      flyteidl.plugins.Dask.WorkerGroup other = (flyteidl.plugins.Dask.WorkerGroup) obj;
 
+      if (getNumberOfWorkers()
+          != other.getNumberOfWorkers()) return false;
       if (!getImage()
           .equals(other.getImage())) return false;
-      if (getNWorkers()
-          != other.getNWorkers()) return false;
       if (hasResources() != other.hasResources()) return false;
       if (hasResources()) {
         if (!getResources()
@@ -2135,10 +2131,10 @@ public int hashCode() {
       }
       int hash = 41;
       hash = (19 * hash) + getDescriptor().hashCode();
+      hash = (37 * hash) + NUMBER_OF_WORKERS_FIELD_NUMBER;
+      hash = (53 * hash) + getNumberOfWorkers();
       hash = (37 * hash) + IMAGE_FIELD_NUMBER;
       hash = (53 * hash) + getImage().hashCode();
-      hash = (37 * hash) + NWORKERS_FIELD_NUMBER;
-      hash = (53 * hash) + getNWorkers();
       if (hasResources()) {
         hash = (37 * hash) + RESOURCES_FIELD_NUMBER;
         hash = (53 * hash) + getResources().hashCode();
@@ -2148,69 +2144,69 @@ public int hashCode() {
       return hash;
     }
 
-    public static flyteidl.plugins.Dask.DaskCluster parseFrom(
+    public static flyteidl.plugins.Dask.WorkerGroup parseFrom(
         java.nio.ByteBuffer data)
         throws com.google.protobuf.InvalidProtocolBufferException {
       return PARSER.parseFrom(data);
     }
-    public static flyteidl.plugins.Dask.DaskCluster parseFrom(
+    public static flyteidl.plugins.Dask.WorkerGroup parseFrom(
         java.nio.ByteBuffer data,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws com.google.protobuf.InvalidProtocolBufferException {
       return PARSER.parseFrom(data, extensionRegistry);
     }
-    public static flyteidl.plugins.Dask.DaskCluster parseFrom(
+    public static flyteidl.plugins.Dask.WorkerGroup parseFrom(
         com.google.protobuf.ByteString data)
         throws com.google.protobuf.InvalidProtocolBufferException {
       return PARSER.parseFrom(data);
     }
-    public static flyteidl.plugins.Dask.DaskCluster parseFrom(
+    public static flyteidl.plugins.Dask.WorkerGroup parseFrom(
         com.google.protobuf.ByteString data,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws com.google.protobuf.InvalidProtocolBufferException {
       return PARSER.parseFrom(data, extensionRegistry);
     }
-    public static flyteidl.plugins.Dask.DaskCluster parseFrom(byte[] data)
+    public static flyteidl.plugins.Dask.WorkerGroup parseFrom(byte[] data)
         throws com.google.protobuf.InvalidProtocolBufferException {
       return PARSER.parseFrom(data);
     }
-    public static flyteidl.plugins.Dask.DaskCluster parseFrom(
+    public static flyteidl.plugins.Dask.WorkerGroup parseFrom(
         byte[] data,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws com.google.protobuf.InvalidProtocolBufferException {
       return PARSER.parseFrom(data, extensionRegistry);
     }
-    public static flyteidl.plugins.Dask.DaskCluster parseFrom(java.io.InputStream input)
+    public static flyteidl.plugins.Dask.WorkerGroup parseFrom(java.io.InputStream input)
         throws java.io.IOException {
       return com.google.protobuf.GeneratedMessageV3
           .parseWithIOException(PARSER, input);
     }
-    public static flyteidl.plugins.Dask.DaskCluster parseFrom(
+    public static flyteidl.plugins.Dask.WorkerGroup parseFrom(
         java.io.InputStream input,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws java.io.IOException {
       return com.google.protobuf.GeneratedMessageV3
           .parseWithIOException(PARSER, input, extensionRegistry);
     }
-    public static flyteidl.plugins.Dask.DaskCluster parseDelimitedFrom(java.io.InputStream input)
+    public static flyteidl.plugins.Dask.WorkerGroup parseDelimitedFrom(java.io.InputStream input)
         throws java.io.IOException {
       return com.google.protobuf.GeneratedMessageV3
           .parseDelimitedWithIOException(PARSER, input);
     }
-    public static flyteidl.plugins.Dask.DaskCluster parseDelimitedFrom(
+    public static flyteidl.plugins.Dask.WorkerGroup parseDelimitedFrom(
         java.io.InputStream input,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws java.io.IOException {
       return com.google.protobuf.GeneratedMessageV3
           .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
     }
-    public static flyteidl.plugins.Dask.DaskCluster parseFrom(
+    public static flyteidl.plugins.Dask.WorkerGroup parseFrom(
         com.google.protobuf.CodedInputStream input)
         throws java.io.IOException {
       return com.google.protobuf.GeneratedMessageV3
           .parseWithIOException(PARSER, input);
     }
-    public static flyteidl.plugins.Dask.DaskCluster parseFrom(
+    public static flyteidl.plugins.Dask.WorkerGroup parseFrom(
         com.google.protobuf.CodedInputStream input,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws java.io.IOException {
@@ -2223,7 +2219,7 @@ public static flyteidl.plugins.Dask.DaskCluster parseFrom(
     public static Builder newBuilder() {
       return DEFAULT_INSTANCE.toBuilder();
     }
-    public static Builder newBuilder(flyteidl.plugins.Dask.DaskCluster prototype) {
+    public static Builder newBuilder(flyteidl.plugins.Dask.WorkerGroup prototype) {
       return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
     }
     @java.lang.Override
@@ -2239,26 +2235,26 @@ protected Builder newBuilderForType(
       return builder;
     }
     /**
-     * Protobuf type {@code flyteidl.plugins.DaskCluster}
+     * Protobuf type {@code flyteidl.plugins.WorkerGroup}
      */
     public static final class Builder extends
         com.google.protobuf.GeneratedMessageV3.Builder implements
-        // @@protoc_insertion_point(builder_implements:flyteidl.plugins.DaskCluster)
-        flyteidl.plugins.Dask.DaskClusterOrBuilder {
+        // @@protoc_insertion_point(builder_implements:flyteidl.plugins.WorkerGroup)
+        flyteidl.plugins.Dask.WorkerGroupOrBuilder {
       public static final com.google.protobuf.Descriptors.Descriptor
           getDescriptor() {
-        return flyteidl.plugins.Dask.internal_static_flyteidl_plugins_DaskCluster_descriptor;
+        return flyteidl.plugins.Dask.internal_static_flyteidl_plugins_WorkerGroup_descriptor;
       }
 
       @java.lang.Override
       protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
           internalGetFieldAccessorTable() {
-        return flyteidl.plugins.Dask.internal_static_flyteidl_plugins_DaskCluster_fieldAccessorTable
+        return flyteidl.plugins.Dask.internal_static_flyteidl_plugins_WorkerGroup_fieldAccessorTable
             .ensureFieldAccessorsInitialized(
-                flyteidl.plugins.Dask.DaskCluster.class, flyteidl.plugins.Dask.DaskCluster.Builder.class);
+                flyteidl.plugins.Dask.WorkerGroup.class, flyteidl.plugins.Dask.WorkerGroup.Builder.class);
       }
 
-      // Construct using flyteidl.plugins.Dask.DaskCluster.newBuilder()
+      // Construct using flyteidl.plugins.Dask.WorkerGroup.newBuilder()
       private Builder() {
         maybeForceBuilderInitialization();
       }
@@ -2276,9 +2272,9 @@ private void maybeForceBuilderInitialization() {
       @java.lang.Override
       public Builder clear() {
         super.clear();
-        image_ = "";
+        numberOfWorkers_ = 0;
 
-        nWorkers_ = 0;
+        image_ = "";
 
         if (resourcesBuilder_ == null) {
           resources_ = null;
@@ -2292,17 +2288,17 @@ public Builder clear() {
       @java.lang.Override
       public com.google.protobuf.Descriptors.Descriptor
           getDescriptorForType() {
-        return flyteidl.plugins.Dask.internal_static_flyteidl_plugins_DaskCluster_descriptor;
+        return flyteidl.plugins.Dask.internal_static_flyteidl_plugins_WorkerGroup_descriptor;
       }
 
       @java.lang.Override
-      public flyteidl.plugins.Dask.DaskCluster getDefaultInstanceForType() {
-        return flyteidl.plugins.Dask.DaskCluster.getDefaultInstance();
+      public flyteidl.plugins.Dask.WorkerGroup getDefaultInstanceForType() {
+        return flyteidl.plugins.Dask.WorkerGroup.getDefaultInstance();
       }
 
       @java.lang.Override
-      public flyteidl.plugins.Dask.DaskCluster build() {
-        flyteidl.plugins.Dask.DaskCluster result = buildPartial();
+      public flyteidl.plugins.Dask.WorkerGroup build() {
+        flyteidl.plugins.Dask.WorkerGroup result = buildPartial();
         if (!result.isInitialized()) {
           throw newUninitializedMessageException(result);
         }
@@ -2310,10 +2306,10 @@ public flyteidl.plugins.Dask.DaskCluster build() {
       }
 
       @java.lang.Override
-      public flyteidl.plugins.Dask.DaskCluster buildPartial() {
-        flyteidl.plugins.Dask.DaskCluster result = new flyteidl.plugins.Dask.DaskCluster(this);
+      public flyteidl.plugins.Dask.WorkerGroup buildPartial() {
+        flyteidl.plugins.Dask.WorkerGroup result = new flyteidl.plugins.Dask.WorkerGroup(this);
+        result.numberOfWorkers_ = numberOfWorkers_;
         result.image_ = image_;
-        result.nWorkers_ = nWorkers_;
         if (resourcesBuilder_ == null) {
           result.resources_ = resources_;
         } else {
@@ -2357,23 +2353,23 @@ public Builder addRepeatedField(
       }
       @java.lang.Override
       public Builder mergeFrom(com.google.protobuf.Message other) {
-        if (other instanceof flyteidl.plugins.Dask.DaskCluster) {
-          return mergeFrom((flyteidl.plugins.Dask.DaskCluster)other);
+        if (other instanceof flyteidl.plugins.Dask.WorkerGroup) {
+          return mergeFrom((flyteidl.plugins.Dask.WorkerGroup)other);
         } else {
           super.mergeFrom(other);
           return this;
         }
       }
 
-      public Builder mergeFrom(flyteidl.plugins.Dask.DaskCluster other) {
-        if (other == flyteidl.plugins.Dask.DaskCluster.getDefaultInstance()) return this;
+      public Builder mergeFrom(flyteidl.plugins.Dask.WorkerGroup other) {
+        if (other == flyteidl.plugins.Dask.WorkerGroup.getDefaultInstance()) return this;
+        if (other.getNumberOfWorkers() != 0) {
+          setNumberOfWorkers(other.getNumberOfWorkers());
+        }
         if (!other.getImage().isEmpty()) {
           image_ = other.image_;
           onChanged();
         }
-        if (other.getNWorkers() != 0) {
-          setNWorkers(other.getNWorkers());
-        }
         if (other.hasResources()) {
           mergeResources(other.getResources());
         }
@@ -2392,11 +2388,11 @@ public Builder mergeFrom(
           com.google.protobuf.CodedInputStream input,
           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
           throws java.io.IOException {
-        flyteidl.plugins.Dask.DaskCluster parsedMessage = null;
+        flyteidl.plugins.Dask.WorkerGroup parsedMessage = null;
         try {
           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
-          parsedMessage = (flyteidl.plugins.Dask.DaskCluster) e.getUnfinishedMessage();
+          parsedMessage = (flyteidl.plugins.Dask.WorkerGroup) e.getUnfinishedMessage();
           throw e.unwrapIOException();
         } finally {
           if (parsedMessage != null) {
@@ -2406,14 +2402,51 @@ public Builder mergeFrom(
         return this;
       }
 
+      private int numberOfWorkers_ ;
+      /**
+       * 
+       * Number of workers in the group.
+       * 
+ * + * uint32 number_of_workers = 1; + */ + public int getNumberOfWorkers() { + return numberOfWorkers_; + } + /** + *
+       * Number of workers in the group.
+       * 
+ * + * uint32 number_of_workers = 1; + */ + public Builder setNumberOfWorkers(int value) { + + numberOfWorkers_ = value; + onChanged(); + return this; + } + /** + *
+       * Number of workers in the group.
+       * 
+ * + * uint32 number_of_workers = 1; + */ + public Builder clearNumberOfWorkers() { + + numberOfWorkers_ = 0; + onChanged(); + return this; + } + private java.lang.Object image_ = ""; /** *
-       * Optional image to use for the scheduler as well as the default worker group. If unset, will use 
-       * the default image.
+       * Optional image to use for the pods of the worker group. If unset, will use the default image.
        * 
* - * string image = 1; + * string image = 2; */ public java.lang.String getImage() { java.lang.Object ref = image_; @@ -2429,11 +2462,10 @@ public java.lang.String getImage() { } /** *
-       * Optional image to use for the scheduler as well as the default worker group. If unset, will use 
-       * the default image.
+       * Optional image to use for the pods of the worker group. If unset, will use the default image.
        * 
* - * string image = 1; + * string image = 2; */ public com.google.protobuf.ByteString getImageBytes() { @@ -2450,11 +2482,10 @@ public java.lang.String getImage() { } /** *
-       * Optional image to use for the scheduler as well as the default worker group. If unset, will use 
-       * the default image.
+       * Optional image to use for the pods of the worker group. If unset, will use the default image.
        * 
* - * string image = 1; + * string image = 2; */ public Builder setImage( java.lang.String value) { @@ -2468,11 +2499,10 @@ public Builder setImage( } /** *
-       * Optional image to use for the scheduler as well as the default worker group. If unset, will use 
-       * the default image.
+       * Optional image to use for the pods of the worker group. If unset, will use the default image.
        * 
* - * string image = 1; + * string image = 2; */ public Builder clearImage() { @@ -2482,11 +2512,10 @@ public Builder clearImage() { } /** *
-       * Optional image to use for the scheduler as well as the default worker group. If unset, will use 
-       * the default image.
+       * Optional image to use for the pods of the worker group. If unset, will use the default image.
        * 
* - * string image = 1; + * string image = 2; */ public Builder setImageBytes( com.google.protobuf.ByteString value) { @@ -2500,50 +2529,12 @@ public Builder setImageBytes( return this; } - private int nWorkers_ ; - /** - *
-       * Number of workers in the default worker group
-       * 
- * - * int32 nWorkers = 2; - */ - public int getNWorkers() { - return nWorkers_; - } - /** - *
-       * Number of workers in the default worker group
-       * 
- * - * int32 nWorkers = 2; - */ - public Builder setNWorkers(int value) { - - nWorkers_ = value; - onChanged(); - return this; - } - /** - *
-       * Number of workers in the default worker group
-       * 
- * - * int32 nWorkers = 2; - */ - public Builder clearNWorkers() { - - nWorkers_ = 0; - onChanged(); - return this; - } - private flyteidl.core.Tasks.Resources resources_; private com.google.protobuf.SingleFieldBuilderV3< flyteidl.core.Tasks.Resources, flyteidl.core.Tasks.Resources.Builder, flyteidl.core.Tasks.ResourcesOrBuilder> resourcesBuilder_; /** *
-       * Resources assigned to the scheduler as well as all pods of the default worker group.
+       * Resources assigned to the all pods of the worker group.
        * As per https://kubernetes.dask.org/en/latest/kubecluster.html?highlight=limit#best-practices 
        * it is advised to only set limits. If requests are not explicitly set, the plugin will make
        * sure to set requests==limits.
@@ -2557,7 +2548,7 @@ public boolean hasResources() {
       }
       /**
        * 
-       * Resources assigned to the scheduler as well as all pods of the default worker group.
+       * Resources assigned to the all pods of the worker group.
        * As per https://kubernetes.dask.org/en/latest/kubecluster.html?highlight=limit#best-practices 
        * it is advised to only set limits. If requests are not explicitly set, the plugin will make
        * sure to set requests==limits.
@@ -2575,7 +2566,7 @@ public flyteidl.core.Tasks.Resources getResources() {
       }
       /**
        * 
-       * Resources assigned to the scheduler as well as all pods of the default worker group.
+       * Resources assigned to the all pods of the worker group.
        * As per https://kubernetes.dask.org/en/latest/kubecluster.html?highlight=limit#best-practices 
        * it is advised to only set limits. If requests are not explicitly set, the plugin will make
        * sure to set requests==limits.
@@ -2599,7 +2590,7 @@ public Builder setResources(flyteidl.core.Tasks.Resources value) {
       }
       /**
        * 
-       * Resources assigned to the scheduler as well as all pods of the default worker group.
+       * Resources assigned to the all pods of the worker group.
        * As per https://kubernetes.dask.org/en/latest/kubecluster.html?highlight=limit#best-practices 
        * it is advised to only set limits. If requests are not explicitly set, the plugin will make
        * sure to set requests==limits.
@@ -2621,7 +2612,7 @@ public Builder setResources(
       }
       /**
        * 
-       * Resources assigned to the scheduler as well as all pods of the default worker group.
+       * Resources assigned to the all pods of the worker group.
        * As per https://kubernetes.dask.org/en/latest/kubecluster.html?highlight=limit#best-practices 
        * it is advised to only set limits. If requests are not explicitly set, the plugin will make
        * sure to set requests==limits.
@@ -2647,7 +2638,7 @@ public Builder mergeResources(flyteidl.core.Tasks.Resources value) {
       }
       /**
        * 
-       * Resources assigned to the scheduler as well as all pods of the default worker group.
+       * Resources assigned to the all pods of the worker group.
        * As per https://kubernetes.dask.org/en/latest/kubecluster.html?highlight=limit#best-practices 
        * it is advised to only set limits. If requests are not explicitly set, the plugin will make
        * sure to set requests==limits.
@@ -2669,7 +2660,7 @@ public Builder clearResources() {
       }
       /**
        * 
-       * Resources assigned to the scheduler as well as all pods of the default worker group.
+       * Resources assigned to the all pods of the worker group.
        * As per https://kubernetes.dask.org/en/latest/kubecluster.html?highlight=limit#best-practices 
        * it is advised to only set limits. If requests are not explicitly set, the plugin will make
        * sure to set requests==limits.
@@ -2685,7 +2676,7 @@ public flyteidl.core.Tasks.Resources.Builder getResourcesBuilder() {
       }
       /**
        * 
-       * Resources assigned to the scheduler as well as all pods of the default worker group.
+       * Resources assigned to the all pods of the worker group.
        * As per https://kubernetes.dask.org/en/latest/kubecluster.html?highlight=limit#best-practices 
        * it is advised to only set limits. If requests are not explicitly set, the plugin will make
        * sure to set requests==limits.
@@ -2704,7 +2695,7 @@ public flyteidl.core.Tasks.ResourcesOrBuilder getResourcesOrBuilder() {
       }
       /**
        * 
-       * Resources assigned to the scheduler as well as all pods of the default worker group.
+       * Resources assigned to the all pods of the worker group.
        * As per https://kubernetes.dask.org/en/latest/kubecluster.html?highlight=limit#best-practices 
        * it is advised to only set limits. If requests are not explicitly set, the plugin will make
        * sure to set requests==limits.
@@ -2739,41 +2730,41 @@ public final Builder mergeUnknownFields(
       }
 
 
-      // @@protoc_insertion_point(builder_scope:flyteidl.plugins.DaskCluster)
+      // @@protoc_insertion_point(builder_scope:flyteidl.plugins.WorkerGroup)
     }
 
-    // @@protoc_insertion_point(class_scope:flyteidl.plugins.DaskCluster)
-    private static final flyteidl.plugins.Dask.DaskCluster DEFAULT_INSTANCE;
+    // @@protoc_insertion_point(class_scope:flyteidl.plugins.WorkerGroup)
+    private static final flyteidl.plugins.Dask.WorkerGroup DEFAULT_INSTANCE;
     static {
-      DEFAULT_INSTANCE = new flyteidl.plugins.Dask.DaskCluster();
+      DEFAULT_INSTANCE = new flyteidl.plugins.Dask.WorkerGroup();
     }
 
-    public static flyteidl.plugins.Dask.DaskCluster getDefaultInstance() {
+    public static flyteidl.plugins.Dask.WorkerGroup getDefaultInstance() {
       return DEFAULT_INSTANCE;
     }
 
-    private static final com.google.protobuf.Parser
-        PARSER = new com.google.protobuf.AbstractParser() {
+    private static final com.google.protobuf.Parser
+        PARSER = new com.google.protobuf.AbstractParser() {
       @java.lang.Override
-      public DaskCluster parsePartialFrom(
+      public WorkerGroup parsePartialFrom(
           com.google.protobuf.CodedInputStream input,
           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
           throws com.google.protobuf.InvalidProtocolBufferException {
-        return new DaskCluster(input, extensionRegistry);
+        return new WorkerGroup(input, extensionRegistry);
       }
     };
 
-    public static com.google.protobuf.Parser parser() {
+    public static com.google.protobuf.Parser parser() {
       return PARSER;
     }
 
     @java.lang.Override
-    public com.google.protobuf.Parser getParserForType() {
+    public com.google.protobuf.Parser getParserForType() {
       return PARSER;
     }
 
     @java.lang.Override
-    public flyteidl.plugins.Dask.DaskCluster getDefaultInstanceForType() {
+    public flyteidl.plugins.Dask.WorkerGroup getDefaultInstanceForType() {
       return DEFAULT_INSTANCE;
     }
 
@@ -2785,15 +2776,15 @@ public flyteidl.plugins.Dask.DaskCluster getDefaultInstanceForType() {
     com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
       internal_static_flyteidl_plugins_DaskJob_fieldAccessorTable;
   private static final com.google.protobuf.Descriptors.Descriptor
-    internal_static_flyteidl_plugins_JobPodSpec_descriptor;
+    internal_static_flyteidl_plugins_Scheduler_descriptor;
   private static final 
     com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
-      internal_static_flyteidl_plugins_JobPodSpec_fieldAccessorTable;
+      internal_static_flyteidl_plugins_Scheduler_fieldAccessorTable;
   private static final com.google.protobuf.Descriptors.Descriptor
-    internal_static_flyteidl_plugins_DaskCluster_descriptor;
+    internal_static_flyteidl_plugins_WorkerGroup_descriptor;
   private static final 
     com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
-      internal_static_flyteidl_plugins_DaskCluster_fieldAccessorTable;
+      internal_static_flyteidl_plugins_WorkerGroup_fieldAccessorTable;
 
   public static com.google.protobuf.Descriptors.FileDescriptor
       getDescriptor() {
@@ -2804,16 +2795,16 @@ public flyteidl.plugins.Dask.DaskCluster getDefaultInstanceForType() {
   static {
     java.lang.String[] descriptorData = {
       "\n\033flyteidl/plugins/dask.proto\022\020flyteidl." +
-      "plugins\032\031flyteidl/core/tasks.proto\"k\n\007Da" +
-      "skJob\0220\n\njobPodSpec\030\001 \001(\0132\034.flyteidl.plu" +
-      "gins.JobPodSpec\022.\n\007cluster\030\002 \001(\0132\035.flyte" +
-      "idl.plugins.DaskCluster\"H\n\nJobPodSpec\022\r\n" +
-      "\005image\030\001 \001(\t\022+\n\tresources\030\002 \001(\0132\030.flytei" +
-      "dl.core.Resources\"[\n\013DaskCluster\022\r\n\005imag" +
-      "e\030\001 \001(\t\022\020\n\010nWorkers\030\002 \001(\005\022+\n\tresources\030\003" +
-      " \001(\0132\030.flyteidl.core.ResourcesB9Z7github" +
-      ".com/flyteorg/flyteidl/gen/pb-go/flyteid" +
-      "l/pluginsb\006proto3"
+      "plugins\032\031flyteidl/core/tasks.proto\"i\n\007Da" +
+      "skJob\022.\n\tscheduler\030\001 \001(\0132\033.flyteidl.plug" +
+      "ins.Scheduler\022.\n\007workers\030\002 \001(\0132\035.flyteid" +
+      "l.plugins.WorkerGroup\"G\n\tScheduler\022\r\n\005im" +
+      "age\030\001 \001(\t\022+\n\tresources\030\002 \001(\0132\030.flyteidl." +
+      "core.Resources\"d\n\013WorkerGroup\022\031\n\021number_" +
+      "of_workers\030\001 \001(\r\022\r\n\005image\030\002 \001(\t\022+\n\tresou" +
+      "rces\030\003 \001(\0132\030.flyteidl.core.ResourcesB9Z7" +
+      "github.com/flyteorg/flyteidl/gen/pb-go/f" +
+      "lyteidl/pluginsb\006proto3"
     };
     com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
         new com.google.protobuf.Descriptors.FileDescriptor.    InternalDescriptorAssigner() {
@@ -2833,19 +2824,19 @@ public com.google.protobuf.ExtensionRegistry assignDescriptors(
     internal_static_flyteidl_plugins_DaskJob_fieldAccessorTable = new
       com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
         internal_static_flyteidl_plugins_DaskJob_descriptor,
-        new java.lang.String[] { "JobPodSpec", "Cluster", });
-    internal_static_flyteidl_plugins_JobPodSpec_descriptor =
+        new java.lang.String[] { "Scheduler", "Workers", });
+    internal_static_flyteidl_plugins_Scheduler_descriptor =
       getDescriptor().getMessageTypes().get(1);
-    internal_static_flyteidl_plugins_JobPodSpec_fieldAccessorTable = new
+    internal_static_flyteidl_plugins_Scheduler_fieldAccessorTable = new
       com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
-        internal_static_flyteidl_plugins_JobPodSpec_descriptor,
+        internal_static_flyteidl_plugins_Scheduler_descriptor,
         new java.lang.String[] { "Image", "Resources", });
-    internal_static_flyteidl_plugins_DaskCluster_descriptor =
+    internal_static_flyteidl_plugins_WorkerGroup_descriptor =
       getDescriptor().getMessageTypes().get(2);
-    internal_static_flyteidl_plugins_DaskCluster_fieldAccessorTable = new
+    internal_static_flyteidl_plugins_WorkerGroup_fieldAccessorTable = new
       com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
-        internal_static_flyteidl_plugins_DaskCluster_descriptor,
-        new java.lang.String[] { "Image", "NWorkers", "Resources", });
+        internal_static_flyteidl_plugins_WorkerGroup_descriptor,
+        new java.lang.String[] { "NumberOfWorkers", "Image", "Resources", });
     flyteidl.core.Tasks.getDescriptor();
   }
 
diff --git a/gen/pb_python/flyteidl/plugins/dask_pb2.py b/gen/pb_python/flyteidl/plugins/dask_pb2.py
index c93faf845..7c530787f 100644
--- a/gen/pb_python/flyteidl/plugins/dask_pb2.py
+++ b/gen/pb_python/flyteidl/plugins/dask_pb2.py
@@ -14,7 +14,7 @@
 from flyteidl.core import tasks_pb2 as flyteidl_dot_core_dot_tasks__pb2
 
 
-DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1b\x66lyteidl/plugins/dask.proto\x12\x10\x66lyteidl.plugins\x1a\x19\x66lyteidl/core/tasks.proto\"\x80\x01\n\x07\x44\x61skJob\x12<\n\njobPodSpec\x18\x01 \x01(\x0b\x32\x1c.flyteidl.plugins.JobPodSpecR\njobPodSpec\x12\x37\n\x07\x63luster\x18\x02 \x01(\x0b\x32\x1d.flyteidl.plugins.DaskClusterR\x07\x63luster\"Z\n\nJobPodSpec\x12\x14\n\x05image\x18\x01 \x01(\tR\x05image\x12\x36\n\tresources\x18\x02 \x01(\x0b\x32\x18.flyteidl.core.ResourcesR\tresources\"w\n\x0b\x44\x61skCluster\x12\x14\n\x05image\x18\x01 \x01(\tR\x05image\x12\x1a\n\x08nWorkers\x18\x02 \x01(\x05R\x08nWorkers\x12\x36\n\tresources\x18\x03 \x01(\x0b\x32\x18.flyteidl.core.ResourcesR\tresourcesB\xbb\x01\n\x14\x63om.flyteidl.pluginsB\tDaskProtoP\x01Z7github.com/flyteorg/flyteidl/gen/pb-go/flyteidl/plugins\xa2\x02\x03\x46PX\xaa\x02\x10\x46lyteidl.Plugins\xca\x02\x10\x46lyteidl\\Plugins\xe2\x02\x1c\x46lyteidl\\Plugins\\GPBMetadata\xea\x02\x11\x46lyteidl::Pluginsb\x06proto3')
+DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1b\x66lyteidl/plugins/dask.proto\x12\x10\x66lyteidl.plugins\x1a\x19\x66lyteidl/core/tasks.proto\"}\n\x07\x44\x61skJob\x12\x39\n\tscheduler\x18\x01 \x01(\x0b\x32\x1b.flyteidl.plugins.SchedulerR\tscheduler\x12\x37\n\x07workers\x18\x02 \x01(\x0b\x32\x1d.flyteidl.plugins.WorkerGroupR\x07workers\"Y\n\tScheduler\x12\x14\n\x05image\x18\x01 \x01(\tR\x05image\x12\x36\n\tresources\x18\x02 \x01(\x0b\x32\x18.flyteidl.core.ResourcesR\tresources\"\x87\x01\n\x0bWorkerGroup\x12*\n\x11number_of_workers\x18\x01 \x01(\rR\x0fnumberOfWorkers\x12\x14\n\x05image\x18\x02 \x01(\tR\x05image\x12\x36\n\tresources\x18\x03 \x01(\x0b\x32\x18.flyteidl.core.ResourcesR\tresourcesB\xbb\x01\n\x14\x63om.flyteidl.pluginsB\tDaskProtoP\x01Z7github.com/flyteorg/flyteidl/gen/pb-go/flyteidl/plugins\xa2\x02\x03\x46PX\xaa\x02\x10\x46lyteidl.Plugins\xca\x02\x10\x46lyteidl\\Plugins\xe2\x02\x1c\x46lyteidl\\Plugins\\GPBMetadata\xea\x02\x11\x46lyteidl::Pluginsb\x06proto3')
 
 _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals())
 _builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'flyteidl.plugins.dask_pb2', globals())
@@ -22,10 +22,10 @@
 
   DESCRIPTOR._options = None
   DESCRIPTOR._serialized_options = b'\n\024com.flyteidl.pluginsB\tDaskProtoP\001Z7github.com/flyteorg/flyteidl/gen/pb-go/flyteidl/plugins\242\002\003FPX\252\002\020Flyteidl.Plugins\312\002\020Flyteidl\\Plugins\342\002\034Flyteidl\\Plugins\\GPBMetadata\352\002\021Flyteidl::Plugins'
-  _DASKJOB._serialized_start=77
-  _DASKJOB._serialized_end=205
-  _JOBPODSPEC._serialized_start=207
-  _JOBPODSPEC._serialized_end=297
-  _DASKCLUSTER._serialized_start=299
-  _DASKCLUSTER._serialized_end=418
+  _DASKJOB._serialized_start=76
+  _DASKJOB._serialized_end=201
+  _SCHEDULER._serialized_start=203
+  _SCHEDULER._serialized_end=292
+  _WORKERGROUP._serialized_start=295
+  _WORKERGROUP._serialized_end=430
 # @@protoc_insertion_point(module_scope)
diff --git a/gen/pb_python/flyteidl/plugins/dask_pb2.pyi b/gen/pb_python/flyteidl/plugins/dask_pb2.pyi
index 3b7854d38..608211825 100644
--- a/gen/pb_python/flyteidl/plugins/dask_pb2.pyi
+++ b/gen/pb_python/flyteidl/plugins/dask_pb2.pyi
@@ -5,28 +5,28 @@ from typing import ClassVar as _ClassVar, Mapping as _Mapping, Optional as _Opti
 
 DESCRIPTOR: _descriptor.FileDescriptor
 
-class DaskCluster(_message.Message):
-    __slots__ = ["image", "nWorkers", "resources"]
+class DaskJob(_message.Message):
+    __slots__ = ["scheduler", "workers"]
+    SCHEDULER_FIELD_NUMBER: _ClassVar[int]
+    WORKERS_FIELD_NUMBER: _ClassVar[int]
+    scheduler: Scheduler
+    workers: WorkerGroup
+    def __init__(self, scheduler: _Optional[_Union[Scheduler, _Mapping]] = ..., workers: _Optional[_Union[WorkerGroup, _Mapping]] = ...) -> None: ...
+
+class Scheduler(_message.Message):
+    __slots__ = ["image", "resources"]
     IMAGE_FIELD_NUMBER: _ClassVar[int]
-    NWORKERS_FIELD_NUMBER: _ClassVar[int]
     RESOURCES_FIELD_NUMBER: _ClassVar[int]
     image: str
-    nWorkers: int
     resources: _tasks_pb2.Resources
-    def __init__(self, image: _Optional[str] = ..., nWorkers: _Optional[int] = ..., resources: _Optional[_Union[_tasks_pb2.Resources, _Mapping]] = ...) -> None: ...
-
-class DaskJob(_message.Message):
-    __slots__ = ["cluster", "jobPodSpec"]
-    CLUSTER_FIELD_NUMBER: _ClassVar[int]
-    JOBPODSPEC_FIELD_NUMBER: _ClassVar[int]
-    cluster: DaskCluster
-    jobPodSpec: JobPodSpec
-    def __init__(self, jobPodSpec: _Optional[_Union[JobPodSpec, _Mapping]] = ..., cluster: _Optional[_Union[DaskCluster, _Mapping]] = ...) -> None: ...
+    def __init__(self, image: _Optional[str] = ..., resources: _Optional[_Union[_tasks_pb2.Resources, _Mapping]] = ...) -> None: ...
 
-class JobPodSpec(_message.Message):
-    __slots__ = ["image", "resources"]
+class WorkerGroup(_message.Message):
+    __slots__ = ["image", "number_of_workers", "resources"]
     IMAGE_FIELD_NUMBER: _ClassVar[int]
+    NUMBER_OF_WORKERS_FIELD_NUMBER: _ClassVar[int]
     RESOURCES_FIELD_NUMBER: _ClassVar[int]
     image: str
+    number_of_workers: int
     resources: _tasks_pb2.Resources
-    def __init__(self, image: _Optional[str] = ..., resources: _Optional[_Union[_tasks_pb2.Resources, _Mapping]] = ...) -> None: ...
+    def __init__(self, number_of_workers: _Optional[int] = ..., image: _Optional[str] = ..., resources: _Optional[_Union[_tasks_pb2.Resources, _Mapping]] = ...) -> None: ...
diff --git a/protos/flyteidl/plugins/dask.proto b/protos/flyteidl/plugins/dask.proto
index 5da0926b8..a29c4bdb0 100644
--- a/protos/flyteidl/plugins/dask.proto
+++ b/protos/flyteidl/plugins/dask.proto
@@ -7,30 +7,32 @@ package flyteidl.plugins;
 option go_package = "github.com/flyteorg/flyteidl/gen/pb-go/flyteidl/plugins";
 
 
-// Custom Proto for Dask Plugin
+// Custom Proto for Dask Plugin.
 message DaskJob {
-    // Spec for the job pod
-    JobPodSpec jobPodSpec = 1;    
-    // Cluster
-    DaskCluster cluster = 2;
-}
+    // Spec for the scheduler pod.
+    Scheduler scheduler = 1; 
 
+    // Spec of the default worker group.
+    WorkerGroup workers = 2;
+}
 
-// Specification for the job pod
-message JobPodSpec {
+// Specification for the scheduler pod.
+message Scheduler {
     // Optional image to use. If unset, will use the default image.
     string image = 1;
-    // Resources assigned to the job pod.
+
+    // Resources assigned to the scheduler pod.
     core.Resources resources = 2;
 }
 
-message DaskCluster {
-    // Optional image to use for the scheduler as well as the default worker group. If unset, will use 
-    // the default image.
-    string image = 1;
-    // Number of workers in the default worker group
-    int32 nWorkers = 2;
-    // Resources assigned to the scheduler as well as all pods of the default worker group.
+message WorkerGroup {
+    // Number of workers in the group.
+    uint32 number_of_workers = 1;
+
+    // Optional image to use for the pods of the worker group. If unset, will use the default image.
+    string image = 2;
+    
+    // Resources assigned to the all pods of the worker group.
     // As per https://kubernetes.dask.org/en/latest/kubecluster.html?highlight=limit#best-practices 
     // it is advised to only set limits. If requests are not explicitly set, the plugin will make
     // sure to set requests==limits.