diff --git a/gen/pb-cpp/flyteidl/plugins/spark.pb.cc b/gen/pb-cpp/flyteidl/plugins/spark.pb.cc index c96c7a264..60f8d21c9 100644 --- a/gen/pb-cpp/flyteidl/plugins/spark.pb.cc +++ b/gen/pb-cpp/flyteidl/plugins/spark.pb.cc @@ -18,6 +18,7 @@ extern PROTOBUF_INTERNAL_EXPORT_flyteidl_2fplugins_2fspark_2eproto ::google::protobuf::internal::SCCInfo<0> scc_info_SparkJob_HadoopConfEntry_DoNotUse_flyteidl_2fplugins_2fspark_2eproto; extern PROTOBUF_INTERNAL_EXPORT_flyteidl_2fplugins_2fspark_2eproto ::google::protobuf::internal::SCCInfo<0> scc_info_SparkJob_SparkConfEntry_DoNotUse_flyteidl_2fplugins_2fspark_2eproto; +extern PROTOBUF_INTERNAL_EXPORT_google_2fprotobuf_2fstruct_2eproto ::google::protobuf::internal::SCCInfo<0> scc_info_ListValue_google_2fprotobuf_2fstruct_2eproto; namespace flyteidl { namespace plugins { class SparkApplicationDefaultTypeInternal { @@ -89,10 +90,11 @@ static void InitDefaultsSparkJob_flyteidl_2fplugins_2fspark_2eproto() { ::flyteidl::plugins::SparkJob::InitAsDefaultInstance(); } -::google::protobuf::internal::SCCInfo<2> scc_info_SparkJob_flyteidl_2fplugins_2fspark_2eproto = - {{ATOMIC_VAR_INIT(::google::protobuf::internal::SCCInfoBase::kUninitialized), 2, InitDefaultsSparkJob_flyteidl_2fplugins_2fspark_2eproto}, { +::google::protobuf::internal::SCCInfo<3> scc_info_SparkJob_flyteidl_2fplugins_2fspark_2eproto = + {{ATOMIC_VAR_INIT(::google::protobuf::internal::SCCInfoBase::kUninitialized), 3, InitDefaultsSparkJob_flyteidl_2fplugins_2fspark_2eproto}, { &scc_info_SparkJob_SparkConfEntry_DoNotUse_flyteidl_2fplugins_2fspark_2eproto.base, - &scc_info_SparkJob_HadoopConfEntry_DoNotUse_flyteidl_2fplugins_2fspark_2eproto.base,}}; + &scc_info_SparkJob_HadoopConfEntry_DoNotUse_flyteidl_2fplugins_2fspark_2eproto.base, + &scc_info_ListValue_google_2fprotobuf_2fstruct_2eproto.base,}}; void InitDefaults_flyteidl_2fplugins_2fspark_2eproto() { ::google::protobuf::internal::InitSCC(&scc_info_SparkApplication_flyteidl_2fplugins_2fspark_2eproto.base); @@ -140,6 +142,9 @@ const ::google::protobuf::uint32 TableStruct_flyteidl_2fplugins_2fspark_2eproto: PROTOBUF_FIELD_OFFSET(::flyteidl::plugins::SparkJob, sparkconf_), PROTOBUF_FIELD_OFFSET(::flyteidl::plugins::SparkJob, hadoopconf_), PROTOBUF_FIELD_OFFSET(::flyteidl::plugins::SparkJob, executorpath_), + PROTOBUF_FIELD_OFFSET(::flyteidl::plugins::SparkJob, databricksconf_), + PROTOBUF_FIELD_OFFSET(::flyteidl::plugins::SparkJob, databrickstoken_), + PROTOBUF_FIELD_OFFSET(::flyteidl::plugins::SparkJob, databricksinstance_), }; static const ::google::protobuf::internal::MigrationSchema schemas[] PROTOBUF_SECTION_VARIABLE(protodesc_cold) = { { 0, -1, sizeof(::flyteidl::plugins::SparkApplication)}, @@ -163,31 +168,36 @@ ::google::protobuf::internal::AssignDescriptorsTable assign_descriptors_table_fl const char descriptor_table_protodef_flyteidl_2fplugins_2fspark_2eproto[] = "\n\034flyteidl/plugins/spark.proto\022\020flyteidl" - ".plugins\"B\n\020SparkApplication\".\n\004Type\022\n\n\006" - "PYTHON\020\000\022\010\n\004JAVA\020\001\022\t\n\005SCALA\020\002\022\005\n\001R\020\003\"\365\002\n" - "\010SparkJob\022@\n\017applicationType\030\001 \001(\0162\'.fly" - "teidl.plugins.SparkApplication.Type\022\033\n\023m" - "ainApplicationFile\030\002 \001(\t\022\021\n\tmainClass\030\003 " - "\001(\t\022<\n\tsparkConf\030\004 \003(\0132).flyteidl.plugin" - "s.SparkJob.SparkConfEntry\022>\n\nhadoopConf\030" - "\005 \003(\0132*.flyteidl.plugins.SparkJob.Hadoop" - "ConfEntry\022\024\n\014executorPath\030\006 \001(\t\0320\n\016Spark" - "ConfEntry\022\013\n\003key\030\001 \001(\t\022\r\n\005value\030\002 \001(\t:\0028" - "\001\0321\n\017HadoopConfEntry\022\013\n\003key\030\001 \001(\t\022\r\n\005val" - "ue\030\002 \001(\t:\0028\001B9Z7github.com/flyteorg/flyt" - "eidl/gen/pb-go/flyteidl/pluginsb\006proto3" + ".plugins\032\034google/protobuf/struct.proto\"B" + "\n\020SparkApplication\".\n\004Type\022\n\n\006PYTHON\020\000\022\010" + "\n\004JAVA\020\001\022\t\n\005SCALA\020\002\022\005\n\001R\020\003\"\333\003\n\010SparkJob\022" + "@\n\017applicationType\030\001 \001(\0162\'.flyteidl.plug" + "ins.SparkApplication.Type\022\033\n\023mainApplica" + "tionFile\030\002 \001(\t\022\021\n\tmainClass\030\003 \001(\t\022<\n\tspa" + "rkConf\030\004 \003(\0132).flyteidl.plugins.SparkJob" + ".SparkConfEntry\022>\n\nhadoopConf\030\005 \003(\0132*.fl" + "yteidl.plugins.SparkJob.HadoopConfEntry\022" + "\024\n\014executorPath\030\006 \001(\t\022/\n\016databricksConf\030" + "\007 \001(\0132\027.google.protobuf.Struct\022\027\n\017databr" + "icksToken\030\010 \001(\t\022\032\n\022databricksInstance\030\t " + "\001(\t\0320\n\016SparkConfEntry\022\013\n\003key\030\001 \001(\t\022\r\n\005va" + "lue\030\002 \001(\t:\0028\001\0321\n\017HadoopConfEntry\022\013\n\003key\030" + "\001 \001(\t\022\r\n\005value\030\002 \001(\t:\0028\001B9Z7github.com/f" + "lyteorg/flyteidl/gen/pb-go/flyteidl/plug" + "insb\006proto3" ; ::google::protobuf::internal::DescriptorTable descriptor_table_flyteidl_2fplugins_2fspark_2eproto = { false, InitDefaults_flyteidl_2fplugins_2fspark_2eproto, descriptor_table_protodef_flyteidl_2fplugins_2fspark_2eproto, - "flyteidl/plugins/spark.proto", &assign_descriptors_table_flyteidl_2fplugins_2fspark_2eproto, 559, + "flyteidl/plugins/spark.proto", &assign_descriptors_table_flyteidl_2fplugins_2fspark_2eproto, 691, }; void AddDescriptors_flyteidl_2fplugins_2fspark_2eproto() { static constexpr ::google::protobuf::internal::InitFunc deps[1] = { + ::AddDescriptors_google_2fprotobuf_2fstruct_2eproto, }; - ::google::protobuf::internal::AddDescriptors(&descriptor_table_flyteidl_2fplugins_2fspark_2eproto, deps, 0); + ::google::protobuf::internal::AddDescriptors(&descriptor_table_flyteidl_2fplugins_2fspark_2eproto, deps, 1); } // Force running AddDescriptors() at dynamic initialization time. @@ -514,11 +524,24 @@ bool SparkJob_HadoopConfEntry_DoNotUse::_ParseMap(const char* begin, const char* // =================================================================== void SparkJob::InitAsDefaultInstance() { + ::flyteidl::plugins::_SparkJob_default_instance_._instance.get_mutable()->databricksconf_ = const_cast< ::google::protobuf::Struct*>( + ::google::protobuf::Struct::internal_default_instance()); } class SparkJob::HasBitSetters { public: + static const ::google::protobuf::Struct& databricksconf(const SparkJob* msg); }; +const ::google::protobuf::Struct& +SparkJob::HasBitSetters::databricksconf(const SparkJob* msg) { + return *msg->databricksconf_; +} +void SparkJob::clear_databricksconf() { + if (GetArenaNoVirtual() == nullptr && databricksconf_ != nullptr) { + delete databricksconf_; + } + databricksconf_ = nullptr; +} #if !defined(_MSC_VER) || _MSC_VER >= 1900 const int SparkJob::kApplicationTypeFieldNumber; const int SparkJob::kMainApplicationFileFieldNumber; @@ -526,6 +549,9 @@ const int SparkJob::kMainClassFieldNumber; const int SparkJob::kSparkConfFieldNumber; const int SparkJob::kHadoopConfFieldNumber; const int SparkJob::kExecutorPathFieldNumber; +const int SparkJob::kDatabricksConfFieldNumber; +const int SparkJob::kDatabricksTokenFieldNumber; +const int SparkJob::kDatabricksInstanceFieldNumber; #endif // !defined(_MSC_VER) || _MSC_VER >= 1900 SparkJob::SparkJob() @@ -551,6 +577,19 @@ SparkJob::SparkJob(const SparkJob& from) if (from.executorpath().size() > 0) { executorpath_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.executorpath_); } + databrickstoken_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + if (from.databrickstoken().size() > 0) { + databrickstoken_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.databrickstoken_); + } + databricksinstance_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + if (from.databricksinstance().size() > 0) { + databricksinstance_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.databricksinstance_); + } + if (from.has_databricksconf()) { + databricksconf_ = new ::google::protobuf::Struct(*from.databricksconf_); + } else { + databricksconf_ = nullptr; + } applicationtype_ = from.applicationtype_; // @@protoc_insertion_point(copy_constructor:flyteidl.plugins.SparkJob) } @@ -561,7 +600,11 @@ void SparkJob::SharedCtor() { mainapplicationfile_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); mainclass_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); executorpath_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); - applicationtype_ = 0; + databrickstoken_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + databricksinstance_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + ::memset(&databricksconf_, 0, static_cast( + reinterpret_cast(&applicationtype_) - + reinterpret_cast(&databricksconf_)) + sizeof(applicationtype_)); } SparkJob::~SparkJob() { @@ -573,6 +616,9 @@ void SparkJob::SharedDtor() { mainapplicationfile_.DestroyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); mainclass_.DestroyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); executorpath_.DestroyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + databrickstoken_.DestroyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + databricksinstance_.DestroyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + if (this != internal_default_instance()) delete databricksconf_; } void SparkJob::SetCachedSize(int size) const { @@ -595,6 +641,12 @@ void SparkJob::Clear() { mainapplicationfile_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); mainclass_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); executorpath_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + databrickstoken_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + databricksinstance_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + if (GetArenaNoVirtual() == nullptr && databricksconf_ != nullptr) { + delete databricksconf_; + } + databricksconf_ = nullptr; applicationtype_ = 0; _internal_metadata_.Clear(); } @@ -706,6 +758,51 @@ const char* SparkJob::_InternalParse(const char* begin, const char* end, void* o ptr += size; break; } + // .google.protobuf.Struct databricksConf = 7; + case 7: { + if (static_cast<::google::protobuf::uint8>(tag) != 58) goto handle_unusual; + ptr = ::google::protobuf::io::ReadSize(ptr, &size); + GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); + parser_till_end = ::google::protobuf::Struct::_InternalParse; + object = msg->mutable_databricksconf(); + if (size > end - ptr) goto len_delim_till_end; + ptr += size; + GOOGLE_PROTOBUF_PARSER_ASSERT(ctx->ParseExactRange( + {parser_till_end, object}, ptr - size, ptr)); + break; + } + // string databricksToken = 8; + case 8: { + if (static_cast<::google::protobuf::uint8>(tag) != 66) goto handle_unusual; + ptr = ::google::protobuf::io::ReadSize(ptr, &size); + GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); + ctx->extra_parse_data().SetFieldName("flyteidl.plugins.SparkJob.databricksToken"); + object = msg->mutable_databrickstoken(); + if (size > end - ptr + ::google::protobuf::internal::ParseContext::kSlopBytes) { + parser_till_end = ::google::protobuf::internal::GreedyStringParserUTF8; + goto string_till_end; + } + GOOGLE_PROTOBUF_PARSER_ASSERT(::google::protobuf::internal::StringCheckUTF8(ptr, size, ctx)); + ::google::protobuf::internal::InlineGreedyStringParser(object, ptr, size, ctx); + ptr += size; + break; + } + // string databricksInstance = 9; + case 9: { + if (static_cast<::google::protobuf::uint8>(tag) != 74) goto handle_unusual; + ptr = ::google::protobuf::io::ReadSize(ptr, &size); + GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); + ctx->extra_parse_data().SetFieldName("flyteidl.plugins.SparkJob.databricksInstance"); + object = msg->mutable_databricksinstance(); + if (size > end - ptr + ::google::protobuf::internal::ParseContext::kSlopBytes) { + parser_till_end = ::google::protobuf::internal::GreedyStringParserUTF8; + goto string_till_end; + } + GOOGLE_PROTOBUF_PARSER_ASSERT(::google::protobuf::internal::StringCheckUTF8(ptr, size, ctx)); + ::google::protobuf::internal::InlineGreedyStringParser(object, ptr, size, ctx); + ptr += size; + break; + } default: { handle_unusual: if ((tag & 7) == 4 || tag == 0) { @@ -851,6 +948,47 @@ bool SparkJob::MergePartialFromCodedStream( break; } + // .google.protobuf.Struct databricksConf = 7; + case 7: { + if (static_cast< ::google::protobuf::uint8>(tag) == (58 & 0xFF)) { + DO_(::google::protobuf::internal::WireFormatLite::ReadMessage( + input, mutable_databricksconf())); + } else { + goto handle_unusual; + } + break; + } + + // string databricksToken = 8; + case 8: { + if (static_cast< ::google::protobuf::uint8>(tag) == (66 & 0xFF)) { + DO_(::google::protobuf::internal::WireFormatLite::ReadString( + input, this->mutable_databrickstoken())); + DO_(::google::protobuf::internal::WireFormatLite::VerifyUtf8String( + this->databrickstoken().data(), static_cast(this->databrickstoken().length()), + ::google::protobuf::internal::WireFormatLite::PARSE, + "flyteidl.plugins.SparkJob.databricksToken")); + } else { + goto handle_unusual; + } + break; + } + + // string databricksInstance = 9; + case 9: { + if (static_cast< ::google::protobuf::uint8>(tag) == (74 & 0xFF)) { + DO_(::google::protobuf::internal::WireFormatLite::ReadString( + input, this->mutable_databricksinstance())); + DO_(::google::protobuf::internal::WireFormatLite::VerifyUtf8String( + this->databricksinstance().data(), static_cast(this->databricksinstance().length()), + ::google::protobuf::internal::WireFormatLite::PARSE, + "flyteidl.plugins.SparkJob.databricksInstance")); + } else { + goto handle_unusual; + } + break; + } + default: { handle_unusual: if (tag == 0) { @@ -1012,6 +1150,32 @@ void SparkJob::SerializeWithCachedSizes( 6, this->executorpath(), output); } + // .google.protobuf.Struct databricksConf = 7; + if (this->has_databricksconf()) { + ::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray( + 7, HasBitSetters::databricksconf(this), output); + } + + // string databricksToken = 8; + if (this->databrickstoken().size() > 0) { + ::google::protobuf::internal::WireFormatLite::VerifyUtf8String( + this->databrickstoken().data(), static_cast(this->databrickstoken().length()), + ::google::protobuf::internal::WireFormatLite::SERIALIZE, + "flyteidl.plugins.SparkJob.databricksToken"); + ::google::protobuf::internal::WireFormatLite::WriteStringMaybeAliased( + 8, this->databrickstoken(), output); + } + + // string databricksInstance = 9; + if (this->databricksinstance().size() > 0) { + ::google::protobuf::internal::WireFormatLite::VerifyUtf8String( + this->databricksinstance().data(), static_cast(this->databricksinstance().length()), + ::google::protobuf::internal::WireFormatLite::SERIALIZE, + "flyteidl.plugins.SparkJob.databricksInstance"); + ::google::protobuf::internal::WireFormatLite::WriteStringMaybeAliased( + 9, this->databricksinstance(), output); + } + if (_internal_metadata_.have_unknown_fields()) { ::google::protobuf::internal::WireFormat::SerializeUnknownFields( _internal_metadata_.unknown_fields(), output); @@ -1162,6 +1326,35 @@ ::google::protobuf::uint8* SparkJob::InternalSerializeWithCachedSizesToArray( 6, this->executorpath(), target); } + // .google.protobuf.Struct databricksConf = 7; + if (this->has_databricksconf()) { + target = ::google::protobuf::internal::WireFormatLite:: + InternalWriteMessageToArray( + 7, HasBitSetters::databricksconf(this), target); + } + + // string databricksToken = 8; + if (this->databrickstoken().size() > 0) { + ::google::protobuf::internal::WireFormatLite::VerifyUtf8String( + this->databrickstoken().data(), static_cast(this->databrickstoken().length()), + ::google::protobuf::internal::WireFormatLite::SERIALIZE, + "flyteidl.plugins.SparkJob.databricksToken"); + target = + ::google::protobuf::internal::WireFormatLite::WriteStringToArray( + 8, this->databrickstoken(), target); + } + + // string databricksInstance = 9; + if (this->databricksinstance().size() > 0) { + ::google::protobuf::internal::WireFormatLite::VerifyUtf8String( + this->databricksinstance().data(), static_cast(this->databricksinstance().length()), + ::google::protobuf::internal::WireFormatLite::SERIALIZE, + "flyteidl.plugins.SparkJob.databricksInstance"); + target = + ::google::protobuf::internal::WireFormatLite::WriteStringToArray( + 9, this->databricksinstance(), target); + } + if (_internal_metadata_.have_unknown_fields()) { target = ::google::protobuf::internal::WireFormat::SerializeUnknownFieldsToArray( _internal_metadata_.unknown_fields(), target); @@ -1232,6 +1425,27 @@ size_t SparkJob::ByteSizeLong() const { this->executorpath()); } + // string databricksToken = 8; + if (this->databrickstoken().size() > 0) { + total_size += 1 + + ::google::protobuf::internal::WireFormatLite::StringSize( + this->databrickstoken()); + } + + // string databricksInstance = 9; + if (this->databricksinstance().size() > 0) { + total_size += 1 + + ::google::protobuf::internal::WireFormatLite::StringSize( + this->databricksinstance()); + } + + // .google.protobuf.Struct databricksConf = 7; + if (this->has_databricksconf()) { + total_size += 1 + + ::google::protobuf::internal::WireFormatLite::MessageSize( + *databricksconf_); + } + // .flyteidl.plugins.SparkApplication.Type applicationType = 1; if (this->applicationtype() != 0) { total_size += 1 + @@ -1279,6 +1493,17 @@ void SparkJob::MergeFrom(const SparkJob& from) { executorpath_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.executorpath_); } + if (from.databrickstoken().size() > 0) { + + databrickstoken_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.databrickstoken_); + } + if (from.databricksinstance().size() > 0) { + + databricksinstance_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.databricksinstance_); + } + if (from.has_databricksconf()) { + mutable_databricksconf()->::google::protobuf::Struct::MergeFrom(from.databricksconf()); + } if (from.applicationtype() != 0) { set_applicationtype(from.applicationtype()); } @@ -1317,6 +1542,11 @@ void SparkJob::InternalSwap(SparkJob* other) { GetArenaNoVirtual()); executorpath_.Swap(&other->executorpath_, &::google::protobuf::internal::GetEmptyStringAlreadyInited(), GetArenaNoVirtual()); + databrickstoken_.Swap(&other->databrickstoken_, &::google::protobuf::internal::GetEmptyStringAlreadyInited(), + GetArenaNoVirtual()); + databricksinstance_.Swap(&other->databricksinstance_, &::google::protobuf::internal::GetEmptyStringAlreadyInited(), + GetArenaNoVirtual()); + swap(databricksconf_, other->databricksconf_); swap(applicationtype_, other->applicationtype_); } diff --git a/gen/pb-cpp/flyteidl/plugins/spark.pb.h b/gen/pb-cpp/flyteidl/plugins/spark.pb.h index 6fa11bdb0..4b60371d8 100644 --- a/gen/pb-cpp/flyteidl/plugins/spark.pb.h +++ b/gen/pb-cpp/flyteidl/plugins/spark.pb.h @@ -35,6 +35,7 @@ #include #include #include +#include // @@protoc_insertion_point(includes) #include #define PROTOBUF_INTERNAL_EXPORT_flyteidl_2fplugins_2fspark_2eproto @@ -443,6 +444,43 @@ class SparkJob final : ::std::string* release_executorpath(); void set_allocated_executorpath(::std::string* executorpath); + // string databricksToken = 8; + void clear_databrickstoken(); + static const int kDatabricksTokenFieldNumber = 8; + const ::std::string& databrickstoken() const; + void set_databrickstoken(const ::std::string& value); + #if LANG_CXX11 + void set_databrickstoken(::std::string&& value); + #endif + void set_databrickstoken(const char* value); + void set_databrickstoken(const char* value, size_t size); + ::std::string* mutable_databrickstoken(); + ::std::string* release_databrickstoken(); + void set_allocated_databrickstoken(::std::string* databrickstoken); + + // string databricksInstance = 9; + void clear_databricksinstance(); + static const int kDatabricksInstanceFieldNumber = 9; + const ::std::string& databricksinstance() const; + void set_databricksinstance(const ::std::string& value); + #if LANG_CXX11 + void set_databricksinstance(::std::string&& value); + #endif + void set_databricksinstance(const char* value); + void set_databricksinstance(const char* value, size_t size); + ::std::string* mutable_databricksinstance(); + ::std::string* release_databricksinstance(); + void set_allocated_databricksinstance(::std::string* databricksinstance); + + // .google.protobuf.Struct databricksConf = 7; + bool has_databricksconf() const; + void clear_databricksconf(); + static const int kDatabricksConfFieldNumber = 7; + const ::google::protobuf::Struct& databricksconf() const; + ::google::protobuf::Struct* release_databricksconf(); + ::google::protobuf::Struct* mutable_databricksconf(); + void set_allocated_databricksconf(::google::protobuf::Struct* databricksconf); + // .flyteidl.plugins.SparkApplication.Type applicationType = 1; void clear_applicationtype(); static const int kApplicationTypeFieldNumber = 1; @@ -469,6 +507,9 @@ class SparkJob final : ::google::protobuf::internal::ArenaStringPtr mainapplicationfile_; ::google::protobuf::internal::ArenaStringPtr mainclass_; ::google::protobuf::internal::ArenaStringPtr executorpath_; + ::google::protobuf::internal::ArenaStringPtr databrickstoken_; + ::google::protobuf::internal::ArenaStringPtr databricksinstance_; + ::google::protobuf::Struct* databricksconf_; int applicationtype_; mutable ::google::protobuf::internal::CachedSize _cached_size_; friend struct ::TableStruct_flyteidl_2fplugins_2fspark_2eproto; @@ -701,6 +742,158 @@ inline void SparkJob::set_allocated_executorpath(::std::string* executorpath) { // @@protoc_insertion_point(field_set_allocated:flyteidl.plugins.SparkJob.executorPath) } +// .google.protobuf.Struct databricksConf = 7; +inline bool SparkJob::has_databricksconf() const { + return this != internal_default_instance() && databricksconf_ != nullptr; +} +inline const ::google::protobuf::Struct& SparkJob::databricksconf() const { + const ::google::protobuf::Struct* p = databricksconf_; + // @@protoc_insertion_point(field_get:flyteidl.plugins.SparkJob.databricksConf) + return p != nullptr ? *p : *reinterpret_cast( + &::google::protobuf::_Struct_default_instance_); +} +inline ::google::protobuf::Struct* SparkJob::release_databricksconf() { + // @@protoc_insertion_point(field_release:flyteidl.plugins.SparkJob.databricksConf) + + ::google::protobuf::Struct* temp = databricksconf_; + databricksconf_ = nullptr; + return temp; +} +inline ::google::protobuf::Struct* SparkJob::mutable_databricksconf() { + + if (databricksconf_ == nullptr) { + auto* p = CreateMaybeMessage<::google::protobuf::Struct>(GetArenaNoVirtual()); + databricksconf_ = p; + } + // @@protoc_insertion_point(field_mutable:flyteidl.plugins.SparkJob.databricksConf) + return databricksconf_; +} +inline void SparkJob::set_allocated_databricksconf(::google::protobuf::Struct* databricksconf) { + ::google::protobuf::Arena* message_arena = GetArenaNoVirtual(); + if (message_arena == nullptr) { + delete reinterpret_cast< ::google::protobuf::MessageLite*>(databricksconf_); + } + if (databricksconf) { + ::google::protobuf::Arena* submessage_arena = + reinterpret_cast<::google::protobuf::MessageLite*>(databricksconf)->GetArena(); + if (message_arena != submessage_arena) { + databricksconf = ::google::protobuf::internal::GetOwnedMessage( + message_arena, databricksconf, submessage_arena); + } + + } else { + + } + databricksconf_ = databricksconf; + // @@protoc_insertion_point(field_set_allocated:flyteidl.plugins.SparkJob.databricksConf) +} + +// string databricksToken = 8; +inline void SparkJob::clear_databrickstoken() { + databrickstoken_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); +} +inline const ::std::string& SparkJob::databrickstoken() const { + // @@protoc_insertion_point(field_get:flyteidl.plugins.SparkJob.databricksToken) + return databrickstoken_.GetNoArena(); +} +inline void SparkJob::set_databrickstoken(const ::std::string& value) { + + databrickstoken_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), value); + // @@protoc_insertion_point(field_set:flyteidl.plugins.SparkJob.databricksToken) +} +#if LANG_CXX11 +inline void SparkJob::set_databrickstoken(::std::string&& value) { + + databrickstoken_.SetNoArena( + &::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); + // @@protoc_insertion_point(field_set_rvalue:flyteidl.plugins.SparkJob.databricksToken) +} +#endif +inline void SparkJob::set_databrickstoken(const char* value) { + GOOGLE_DCHECK(value != nullptr); + + databrickstoken_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); + // @@protoc_insertion_point(field_set_char:flyteidl.plugins.SparkJob.databricksToken) +} +inline void SparkJob::set_databrickstoken(const char* value, size_t size) { + + databrickstoken_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), + ::std::string(reinterpret_cast(value), size)); + // @@protoc_insertion_point(field_set_pointer:flyteidl.plugins.SparkJob.databricksToken) +} +inline ::std::string* SparkJob::mutable_databrickstoken() { + + // @@protoc_insertion_point(field_mutable:flyteidl.plugins.SparkJob.databricksToken) + return databrickstoken_.MutableNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); +} +inline ::std::string* SparkJob::release_databrickstoken() { + // @@protoc_insertion_point(field_release:flyteidl.plugins.SparkJob.databricksToken) + + return databrickstoken_.ReleaseNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); +} +inline void SparkJob::set_allocated_databrickstoken(::std::string* databrickstoken) { + if (databrickstoken != nullptr) { + + } else { + + } + databrickstoken_.SetAllocatedNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), databrickstoken); + // @@protoc_insertion_point(field_set_allocated:flyteidl.plugins.SparkJob.databricksToken) +} + +// string databricksInstance = 9; +inline void SparkJob::clear_databricksinstance() { + databricksinstance_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); +} +inline const ::std::string& SparkJob::databricksinstance() const { + // @@protoc_insertion_point(field_get:flyteidl.plugins.SparkJob.databricksInstance) + return databricksinstance_.GetNoArena(); +} +inline void SparkJob::set_databricksinstance(const ::std::string& value) { + + databricksinstance_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), value); + // @@protoc_insertion_point(field_set:flyteidl.plugins.SparkJob.databricksInstance) +} +#if LANG_CXX11 +inline void SparkJob::set_databricksinstance(::std::string&& value) { + + databricksinstance_.SetNoArena( + &::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); + // @@protoc_insertion_point(field_set_rvalue:flyteidl.plugins.SparkJob.databricksInstance) +} +#endif +inline void SparkJob::set_databricksinstance(const char* value) { + GOOGLE_DCHECK(value != nullptr); + + databricksinstance_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); + // @@protoc_insertion_point(field_set_char:flyteidl.plugins.SparkJob.databricksInstance) +} +inline void SparkJob::set_databricksinstance(const char* value, size_t size) { + + databricksinstance_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), + ::std::string(reinterpret_cast(value), size)); + // @@protoc_insertion_point(field_set_pointer:flyteidl.plugins.SparkJob.databricksInstance) +} +inline ::std::string* SparkJob::mutable_databricksinstance() { + + // @@protoc_insertion_point(field_mutable:flyteidl.plugins.SparkJob.databricksInstance) + return databricksinstance_.MutableNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); +} +inline ::std::string* SparkJob::release_databricksinstance() { + // @@protoc_insertion_point(field_release:flyteidl.plugins.SparkJob.databricksInstance) + + return databricksinstance_.ReleaseNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); +} +inline void SparkJob::set_allocated_databricksinstance(::std::string* databricksinstance) { + if (databricksinstance != nullptr) { + + } else { + + } + databricksinstance_.SetAllocatedNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), databricksinstance); + // @@protoc_insertion_point(field_set_allocated:flyteidl.plugins.SparkJob.databricksInstance) +} + #ifdef __GNUC__ #pragma GCC diagnostic pop #endif // __GNUC__ diff --git a/gen/pb-go/flyteidl/plugins/spark.pb.go b/gen/pb-go/flyteidl/plugins/spark.pb.go index 4eb696e83..768bd46ba 100644 --- a/gen/pb-go/flyteidl/plugins/spark.pb.go +++ b/gen/pb-go/flyteidl/plugins/spark.pb.go @@ -6,6 +6,7 @@ package plugins import ( fmt "fmt" proto "github.com/golang/protobuf/proto" + _struct "github.com/golang/protobuf/ptypes/struct" math "math" ) @@ -84,15 +85,24 @@ var xxx_messageInfo_SparkApplication proto.InternalMessageInfo // Custom Proto for Spark Plugin. type SparkJob struct { - ApplicationType SparkApplication_Type `protobuf:"varint,1,opt,name=applicationType,proto3,enum=flyteidl.plugins.SparkApplication_Type" json:"applicationType,omitempty"` - MainApplicationFile string `protobuf:"bytes,2,opt,name=mainApplicationFile,proto3" json:"mainApplicationFile,omitempty"` - MainClass string `protobuf:"bytes,3,opt,name=mainClass,proto3" json:"mainClass,omitempty"` - SparkConf map[string]string `protobuf:"bytes,4,rep,name=sparkConf,proto3" json:"sparkConf,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` - HadoopConf map[string]string `protobuf:"bytes,5,rep,name=hadoopConf,proto3" json:"hadoopConf,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` - ExecutorPath string `protobuf:"bytes,6,opt,name=executorPath,proto3" json:"executorPath,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` + ApplicationType SparkApplication_Type `protobuf:"varint,1,opt,name=applicationType,proto3,enum=flyteidl.plugins.SparkApplication_Type" json:"applicationType,omitempty"` + MainApplicationFile string `protobuf:"bytes,2,opt,name=mainApplicationFile,proto3" json:"mainApplicationFile,omitempty"` + MainClass string `protobuf:"bytes,3,opt,name=mainClass,proto3" json:"mainClass,omitempty"` + SparkConf map[string]string `protobuf:"bytes,4,rep,name=sparkConf,proto3" json:"sparkConf,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + HadoopConf map[string]string `protobuf:"bytes,5,rep,name=hadoopConf,proto3" json:"hadoopConf,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + ExecutorPath string `protobuf:"bytes,6,opt,name=executorPath,proto3" json:"executorPath,omitempty"` + // Databricks job configuration. + // Config structure can be found here. https://docs.databricks.com/dev-tools/api/2.0/jobs.html#request-structure. + DatabricksConf *_struct.Struct `protobuf:"bytes,7,opt,name=databricksConf,proto3" json:"databricksConf,omitempty"` + // Databricks access token. https://docs.databricks.com/dev-tools/api/latest/authentication.html + // This token can be set in either flytepropeller or flytekit. + DatabricksToken string `protobuf:"bytes,8,opt,name=databricksToken,proto3" json:"databricksToken,omitempty"` + // Domain name of your deployment. Use the form .cloud.databricks.com. + // This instance name can be set in either flytepropeller or flytekit. + DatabricksInstance string `protobuf:"bytes,9,opt,name=databricksInstance,proto3" json:"databricksInstance,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } func (m *SparkJob) Reset() { *m = SparkJob{} } @@ -162,6 +172,27 @@ func (m *SparkJob) GetExecutorPath() string { return "" } +func (m *SparkJob) GetDatabricksConf() *_struct.Struct { + if m != nil { + return m.DatabricksConf + } + return nil +} + +func (m *SparkJob) GetDatabricksToken() string { + if m != nil { + return m.DatabricksToken + } + return "" +} + +func (m *SparkJob) GetDatabricksInstance() string { + if m != nil { + return m.DatabricksInstance + } + return "" +} + func init() { proto.RegisterEnum("flyteidl.plugins.SparkApplication_Type", SparkApplication_Type_name, SparkApplication_Type_value) proto.RegisterType((*SparkApplication)(nil), "flyteidl.plugins.SparkApplication") @@ -173,28 +204,33 @@ func init() { func init() { proto.RegisterFile("flyteidl/plugins/spark.proto", fileDescriptor_ca8a069b9820144a) } var fileDescriptor_ca8a069b9820144a = []byte{ - // 367 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x92, 0x4f, 0x4f, 0xea, 0x40, - 0x14, 0xc5, 0x5f, 0x29, 0x25, 0xf4, 0xbe, 0x17, 0x68, 0xe6, 0xb9, 0x68, 0x08, 0x0b, 0xd2, 0x8d, - 0x68, 0x62, 0x6b, 0x70, 0xe1, 0x9f, 0xe8, 0xa2, 0x10, 0x95, 0x34, 0x46, 0xb1, 0x10, 0x13, 0xdd, - 0x4d, 0xa1, 0x94, 0x86, 0xa1, 0x33, 0x69, 0xa7, 0xc6, 0x7e, 0x5e, 0xbf, 0x88, 0xe9, 0x20, 0xff, - 0x1a, 0x35, 0x71, 0x37, 0xbd, 0xf7, 0x9c, 0xdf, 0x3d, 0xbd, 0xb9, 0xd0, 0x9c, 0x92, 0x8c, 0xfb, - 0xe1, 0x84, 0x58, 0x8c, 0xa4, 0x41, 0x18, 0x25, 0x56, 0xc2, 0x70, 0x3c, 0x37, 0x59, 0x4c, 0x39, - 0x45, 0xda, 0xaa, 0x6b, 0x7e, 0x76, 0x8d, 0x2e, 0x68, 0xc3, 0x5c, 0x60, 0x33, 0x46, 0xc2, 0x31, - 0xe6, 0x21, 0x8d, 0x0c, 0x13, 0xca, 0xa3, 0x8c, 0xf9, 0x08, 0xa0, 0x32, 0x78, 0x1e, 0xf5, 0x1f, - 0xee, 0xb5, 0x3f, 0xa8, 0x0a, 0x65, 0xc7, 0x7e, 0xb2, 0x35, 0x09, 0xa9, 0xa0, 0x0c, 0x7b, 0xf6, - 0x9d, 0xad, 0x95, 0x90, 0x02, 0x92, 0xab, 0xc9, 0xc6, 0xbb, 0x0c, 0x55, 0x01, 0x71, 0xa8, 0x87, - 0x1e, 0xa1, 0x8e, 0x37, 0xac, 0x9c, 0xa3, 0x4b, 0x2d, 0xa9, 0x5d, 0xeb, 0xec, 0x9b, 0xc5, 0xe1, - 0x66, 0x71, 0xb2, 0x99, 0xcb, 0xdd, 0xa2, 0x1f, 0x1d, 0xc3, 0xff, 0x05, 0x0e, 0xa3, 0x2d, 0xe1, - 0x4d, 0x48, 0x7c, 0xbd, 0xd4, 0x92, 0xda, 0xaa, 0xfb, 0x55, 0x0b, 0x35, 0x41, 0xcd, 0xcb, 0x3d, - 0x82, 0x93, 0x44, 0x97, 0x85, 0x6e, 0x53, 0x40, 0xb7, 0xa0, 0x8a, 0xa5, 0xf4, 0x68, 0x34, 0xd5, - 0xcb, 0x2d, 0xb9, 0xfd, 0xb7, 0x73, 0xf0, 0x4d, 0x38, 0x87, 0x7a, 0xcb, 0x47, 0xae, 0xbd, 0x8e, - 0x78, 0x9c, 0xb9, 0x1b, 0x2f, 0x72, 0x00, 0x66, 0x78, 0x42, 0x29, 0x13, 0x24, 0x45, 0x90, 0x0e, - 0x7f, 0x20, 0xf5, 0xd7, 0xe2, 0x25, 0x6a, 0xcb, 0x8d, 0x0c, 0xf8, 0xe7, 0xbf, 0xf9, 0xe3, 0x94, - 0xd3, 0x78, 0x80, 0xf9, 0x4c, 0xaf, 0x88, 0xd4, 0x3b, 0xb5, 0xc6, 0x25, 0xd4, 0x76, 0xc3, 0x20, - 0x0d, 0xe4, 0xb9, 0x9f, 0x89, 0x0d, 0xab, 0x6e, 0xfe, 0x44, 0x7b, 0xa0, 0xbc, 0x62, 0x92, 0xae, - 0xd6, 0xb3, 0xfc, 0xb8, 0x28, 0x9d, 0x49, 0x8d, 0x2b, 0xa8, 0x17, 0x02, 0xfc, 0xc6, 0xde, 0x3d, - 0x7f, 0x39, 0x0d, 0x42, 0x3e, 0x4b, 0x3d, 0x73, 0x4c, 0x17, 0x96, 0xf8, 0x49, 0x1a, 0x07, 0xd6, - 0xfa, 0xde, 0x02, 0x3f, 0xb2, 0x98, 0x77, 0x14, 0x50, 0xab, 0x78, 0x82, 0x5e, 0x45, 0x5c, 0xdf, - 0xc9, 0x47, 0x00, 0x00, 0x00, 0xff, 0xff, 0x94, 0x6f, 0xaa, 0x2d, 0x9d, 0x02, 0x00, 0x00, + // 442 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x53, 0x5d, 0x8b, 0xd3, 0x40, + 0x14, 0x35, 0xfd, 0xb2, 0xb9, 0x2b, 0x6d, 0xb8, 0x0a, 0x86, 0xd2, 0x87, 0xd2, 0x17, 0xa3, 0xe0, + 0x44, 0xea, 0x83, 0x1f, 0x28, 0x92, 0x2d, 0xea, 0x5a, 0x44, 0xd7, 0xb4, 0x08, 0xfa, 0x36, 0x49, + 0xa7, 0x69, 0xe8, 0xec, 0xcc, 0x90, 0x4c, 0xc4, 0xfc, 0x79, 0x91, 0x4c, 0xec, 0x66, 0x37, 0xac, + 0x82, 0x6f, 0xc9, 0xb9, 0xe7, 0x9c, 0x7b, 0x39, 0x87, 0x81, 0xe9, 0x8e, 0x97, 0x9a, 0xa5, 0x5b, + 0xee, 0x2b, 0x5e, 0x24, 0xa9, 0xc8, 0xfd, 0x5c, 0xd1, 0xec, 0x40, 0x54, 0x26, 0xb5, 0x44, 0xe7, + 0x38, 0x25, 0x7f, 0xa6, 0x93, 0x69, 0x22, 0x65, 0xc2, 0x99, 0x6f, 0xe6, 0x51, 0xb1, 0xf3, 0x73, + 0x9d, 0x15, 0xb1, 0xae, 0xf9, 0xf3, 0x53, 0x70, 0xd6, 0x95, 0x3c, 0x50, 0x8a, 0xa7, 0x31, 0xd5, + 0xa9, 0x14, 0x73, 0x02, 0xbd, 0x4d, 0xa9, 0x18, 0x02, 0x0c, 0xce, 0xbf, 0x6d, 0xce, 0x3e, 0x7f, + 0x72, 0x6e, 0xe1, 0x10, 0x7a, 0xab, 0xe0, 0x6b, 0xe0, 0x58, 0x68, 0x43, 0x7f, 0xbd, 0x0c, 0x3e, + 0x06, 0x4e, 0x07, 0xfb, 0x60, 0x85, 0x4e, 0x77, 0xfe, 0xab, 0x07, 0x43, 0x63, 0xb2, 0x92, 0x11, + 0x7e, 0x81, 0x31, 0x6d, 0xbc, 0x2a, 0x1f, 0xd7, 0x9a, 0x59, 0xde, 0x68, 0xf1, 0x80, 0xb4, 0x4f, + 0x23, 0xed, 0xcd, 0xa4, 0xa2, 0x87, 0x6d, 0x3d, 0x3e, 0x81, 0xbb, 0x17, 0x34, 0x15, 0x57, 0x88, + 0xef, 0x52, 0xce, 0xdc, 0xce, 0xcc, 0xf2, 0xec, 0xf0, 0xa6, 0x11, 0x4e, 0xc1, 0xae, 0xe0, 0x25, + 0xa7, 0x79, 0xee, 0x76, 0x0d, 0xaf, 0x01, 0xf0, 0x3d, 0xd8, 0x26, 0xb2, 0xa5, 0x14, 0x3b, 0xb7, + 0x37, 0xeb, 0x7a, 0x27, 0x8b, 0x87, 0x7f, 0x39, 0x6e, 0x25, 0xa3, 0xfa, 0xa3, 0xe2, 0xbe, 0x15, + 0x3a, 0x2b, 0xc3, 0x46, 0x8b, 0x2b, 0x80, 0x3d, 0xdd, 0x4a, 0xa9, 0x8c, 0x53, 0xdf, 0x38, 0x3d, + 0xfa, 0x87, 0xd3, 0xd9, 0x25, 0xb9, 0xb6, 0xba, 0xa2, 0xc6, 0x39, 0xdc, 0x61, 0x3f, 0x59, 0x5c, + 0x68, 0x99, 0x9d, 0x53, 0xbd, 0x77, 0x07, 0xe6, 0xea, 0x6b, 0x18, 0xbe, 0x81, 0xd1, 0x96, 0x6a, + 0x1a, 0x65, 0x69, 0x7c, 0xc8, 0xcd, 0xce, 0xdb, 0x33, 0xcb, 0x3b, 0x59, 0xdc, 0x27, 0x75, 0xc7, + 0xe4, 0xd8, 0x31, 0x59, 0x9b, 0x8e, 0xc3, 0x16, 0x1d, 0x3d, 0x18, 0x37, 0xc8, 0x46, 0x1e, 0x98, + 0x70, 0x87, 0x66, 0x4f, 0x1b, 0x46, 0x02, 0xd8, 0x40, 0x1f, 0x44, 0xae, 0xa9, 0x88, 0x99, 0x6b, + 0x1b, 0xf2, 0x0d, 0x93, 0xc9, 0x2b, 0x18, 0x5d, 0xcf, 0x09, 0x1d, 0xe8, 0x1e, 0x58, 0x69, 0xca, + 0xb7, 0xc3, 0xea, 0x13, 0xef, 0x41, 0xff, 0x07, 0xe5, 0xc5, 0xb1, 0xb9, 0xfa, 0xe7, 0x65, 0xe7, + 0xb9, 0x35, 0x79, 0x0d, 0xe3, 0x56, 0x36, 0xff, 0x23, 0x3f, 0x7d, 0xf1, 0xfd, 0x59, 0x92, 0xea, + 0x7d, 0x11, 0x91, 0x58, 0x5e, 0xf8, 0x26, 0x7f, 0x99, 0x25, 0xfe, 0xe5, 0x43, 0x49, 0x98, 0xf0, + 0x55, 0xf4, 0x38, 0x91, 0x7e, 0xfb, 0xed, 0x44, 0x03, 0x13, 0xd9, 0xd3, 0xdf, 0x01, 0x00, 0x00, + 0xff, 0xff, 0x02, 0x51, 0x7a, 0xdc, 0x56, 0x03, 0x00, 0x00, } diff --git a/gen/pb-go/flyteidl/plugins/spark.pb.validate.go b/gen/pb-go/flyteidl/plugins/spark.pb.validate.go index 66f27a98c..0577090e2 100644 --- a/gen/pb-go/flyteidl/plugins/spark.pb.validate.go +++ b/gen/pb-go/flyteidl/plugins/spark.pb.validate.go @@ -120,6 +120,20 @@ func (m *SparkJob) Validate() error { // no validation rules for ExecutorPath + if v, ok := interface{}(m.GetDatabricksConf()).(interface{ Validate() error }); ok { + if err := v.Validate(); err != nil { + return SparkJobValidationError{ + field: "DatabricksConf", + reason: "embedded message failed validation", + cause: err, + } + } + } + + // no validation rules for DatabricksToken + + // no validation rules for DatabricksInstance + return nil } diff --git a/gen/pb-java/flyteidl/plugins/Spark.java b/gen/pb-java/flyteidl/plugins/Spark.java index 20796e690..57bfefee8 100644 --- a/gen/pb-java/flyteidl/plugins/Spark.java +++ b/gen/pb-java/flyteidl/plugins/Spark.java @@ -659,6 +659,74 @@ java.lang.String getHadoopConfOrThrow( */ com.google.protobuf.ByteString getExecutorPathBytes(); + + /** + *
+     * Databricks job configuration.
+     * Config structure can be found here. https://docs.databricks.com/dev-tools/api/2.0/jobs.html#request-structure.
+     * 
+ * + * .google.protobuf.Struct databricksConf = 7; + */ + boolean hasDatabricksConf(); + /** + *
+     * Databricks job configuration.
+     * Config structure can be found here. https://docs.databricks.com/dev-tools/api/2.0/jobs.html#request-structure.
+     * 
+ * + * .google.protobuf.Struct databricksConf = 7; + */ + com.google.protobuf.Struct getDatabricksConf(); + /** + *
+     * Databricks job configuration.
+     * Config structure can be found here. https://docs.databricks.com/dev-tools/api/2.0/jobs.html#request-structure.
+     * 
+ * + * .google.protobuf.Struct databricksConf = 7; + */ + com.google.protobuf.StructOrBuilder getDatabricksConfOrBuilder(); + + /** + *
+     * Databricks access token. https://docs.databricks.com/dev-tools/api/latest/authentication.html
+     * This token can be set in either flytepropeller or flytekit.
+     * 
+ * + * string databricksToken = 8; + */ + java.lang.String getDatabricksToken(); + /** + *
+     * Databricks access token. https://docs.databricks.com/dev-tools/api/latest/authentication.html
+     * This token can be set in either flytepropeller or flytekit.
+     * 
+ * + * string databricksToken = 8; + */ + com.google.protobuf.ByteString + getDatabricksTokenBytes(); + + /** + *
+     * Domain name of your deployment. Use the form <account>.cloud.databricks.com.
+     * This instance name can be set in either flytepropeller or flytekit.
+     * 
+ * + * string databricksInstance = 9; + */ + java.lang.String getDatabricksInstance(); + /** + *
+     * Domain name of your deployment. Use the form <account>.cloud.databricks.com.
+     * This instance name can be set in either flytepropeller or flytekit.
+     * 
+ * + * string databricksInstance = 9; + */ + com.google.protobuf.ByteString + getDatabricksInstanceBytes(); } /** *
@@ -681,6 +749,8 @@ private SparkJob() {
       mainApplicationFile_ = "";
       mainClass_ = "";
       executorPath_ = "";
+      databricksToken_ = "";
+      databricksInstance_ = "";
     }
 
     @java.lang.Override
@@ -757,6 +827,31 @@ private SparkJob(
               executorPath_ = s;
               break;
             }
+            case 58: {
+              com.google.protobuf.Struct.Builder subBuilder = null;
+              if (databricksConf_ != null) {
+                subBuilder = databricksConf_.toBuilder();
+              }
+              databricksConf_ = input.readMessage(com.google.protobuf.Struct.parser(), extensionRegistry);
+              if (subBuilder != null) {
+                subBuilder.mergeFrom(databricksConf_);
+                databricksConf_ = subBuilder.buildPartial();
+              }
+
+              break;
+            }
+            case 66: {
+              java.lang.String s = input.readStringRequireUtf8();
+
+              databricksToken_ = s;
+              break;
+            }
+            case 74: {
+              java.lang.String s = input.readStringRequireUtf8();
+
+              databricksInstance_ = s;
+              break;
+            }
             default: {
               if (!parseUnknownField(
                   input, unknownFields, extensionRegistry, tag)) {
@@ -1083,6 +1178,130 @@ public java.lang.String getExecutorPath() {
       }
     }
 
+    public static final int DATABRICKSCONF_FIELD_NUMBER = 7;
+    private com.google.protobuf.Struct databricksConf_;
+    /**
+     * 
+     * Databricks job configuration.
+     * Config structure can be found here. https://docs.databricks.com/dev-tools/api/2.0/jobs.html#request-structure.
+     * 
+ * + * .google.protobuf.Struct databricksConf = 7; + */ + public boolean hasDatabricksConf() { + return databricksConf_ != null; + } + /** + *
+     * Databricks job configuration.
+     * Config structure can be found here. https://docs.databricks.com/dev-tools/api/2.0/jobs.html#request-structure.
+     * 
+ * + * .google.protobuf.Struct databricksConf = 7; + */ + public com.google.protobuf.Struct getDatabricksConf() { + return databricksConf_ == null ? com.google.protobuf.Struct.getDefaultInstance() : databricksConf_; + } + /** + *
+     * Databricks job configuration.
+     * Config structure can be found here. https://docs.databricks.com/dev-tools/api/2.0/jobs.html#request-structure.
+     * 
+ * + * .google.protobuf.Struct databricksConf = 7; + */ + public com.google.protobuf.StructOrBuilder getDatabricksConfOrBuilder() { + return getDatabricksConf(); + } + + public static final int DATABRICKSTOKEN_FIELD_NUMBER = 8; + private volatile java.lang.Object databricksToken_; + /** + *
+     * Databricks access token. https://docs.databricks.com/dev-tools/api/latest/authentication.html
+     * This token can be set in either flytepropeller or flytekit.
+     * 
+ * + * string databricksToken = 8; + */ + public java.lang.String getDatabricksToken() { + java.lang.Object ref = databricksToken_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + databricksToken_ = s; + return s; + } + } + /** + *
+     * Databricks access token. https://docs.databricks.com/dev-tools/api/latest/authentication.html
+     * This token can be set in either flytepropeller or flytekit.
+     * 
+ * + * string databricksToken = 8; + */ + public com.google.protobuf.ByteString + getDatabricksTokenBytes() { + java.lang.Object ref = databricksToken_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + databricksToken_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int DATABRICKSINSTANCE_FIELD_NUMBER = 9; + private volatile java.lang.Object databricksInstance_; + /** + *
+     * Domain name of your deployment. Use the form <account>.cloud.databricks.com.
+     * This instance name can be set in either flytepropeller or flytekit.
+     * 
+ * + * string databricksInstance = 9; + */ + public java.lang.String getDatabricksInstance() { + java.lang.Object ref = databricksInstance_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + databricksInstance_ = s; + return s; + } + } + /** + *
+     * Domain name of your deployment. Use the form <account>.cloud.databricks.com.
+     * This instance name can be set in either flytepropeller or flytekit.
+     * 
+ * + * string databricksInstance = 9; + */ + public com.google.protobuf.ByteString + getDatabricksInstanceBytes() { + java.lang.Object ref = databricksInstance_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + databricksInstance_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { @@ -1121,6 +1340,15 @@ public void writeTo(com.google.protobuf.CodedOutputStream output) if (!getExecutorPathBytes().isEmpty()) { com.google.protobuf.GeneratedMessageV3.writeString(output, 6, executorPath_); } + if (databricksConf_ != null) { + output.writeMessage(7, getDatabricksConf()); + } + if (!getDatabricksTokenBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 8, databricksToken_); + } + if (!getDatabricksInstanceBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 9, databricksInstance_); + } unknownFields.writeTo(output); } @@ -1163,6 +1391,16 @@ public int getSerializedSize() { if (!getExecutorPathBytes().isEmpty()) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(6, executorPath_); } + if (databricksConf_ != null) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(7, getDatabricksConf()); + } + if (!getDatabricksTokenBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(8, databricksToken_); + } + if (!getDatabricksInstanceBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(9, databricksInstance_); + } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; @@ -1189,6 +1427,15 @@ public boolean equals(final java.lang.Object obj) { other.internalGetHadoopConf())) return false; if (!getExecutorPath() .equals(other.getExecutorPath())) return false; + if (hasDatabricksConf() != other.hasDatabricksConf()) return false; + if (hasDatabricksConf()) { + if (!getDatabricksConf() + .equals(other.getDatabricksConf())) return false; + } + if (!getDatabricksToken() + .equals(other.getDatabricksToken())) return false; + if (!getDatabricksInstance() + .equals(other.getDatabricksInstance())) return false; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @@ -1216,6 +1463,14 @@ public int hashCode() { } hash = (37 * hash) + EXECUTORPATH_FIELD_NUMBER; hash = (53 * hash) + getExecutorPath().hashCode(); + if (hasDatabricksConf()) { + hash = (37 * hash) + DATABRICKSCONF_FIELD_NUMBER; + hash = (53 * hash) + getDatabricksConf().hashCode(); + } + hash = (37 * hash) + DATABRICKSTOKEN_FIELD_NUMBER; + hash = (53 * hash) + getDatabricksToken().hashCode(); + hash = (37 * hash) + DATABRICKSINSTANCE_FIELD_NUMBER; + hash = (53 * hash) + getDatabricksInstance().hashCode(); hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; @@ -1389,6 +1644,16 @@ public Builder clear() { internalGetMutableHadoopConf().clear(); executorPath_ = ""; + if (databricksConfBuilder_ == null) { + databricksConf_ = null; + } else { + databricksConf_ = null; + databricksConfBuilder_ = null; + } + databricksToken_ = ""; + + databricksInstance_ = ""; + return this; } @@ -1425,6 +1690,13 @@ public flyteidl.plugins.Spark.SparkJob buildPartial() { result.hadoopConf_ = internalGetHadoopConf(); result.hadoopConf_.makeImmutable(); result.executorPath_ = executorPath_; + if (databricksConfBuilder_ == null) { + result.databricksConf_ = databricksConf_; + } else { + result.databricksConf_ = databricksConfBuilder_.build(); + } + result.databricksToken_ = databricksToken_; + result.databricksInstance_ = databricksInstance_; result.bitField0_ = to_bitField0_; onBuilt(); return result; @@ -1493,6 +1765,17 @@ public Builder mergeFrom(flyteidl.plugins.Spark.SparkJob other) { executorPath_ = other.executorPath_; onChanged(); } + if (other.hasDatabricksConf()) { + mergeDatabricksConf(other.getDatabricksConf()); + } + if (!other.getDatabricksToken().isEmpty()) { + databricksToken_ = other.databricksToken_; + onChanged(); + } + if (!other.getDatabricksInstance().isEmpty()) { + databricksInstance_ = other.databricksInstance_; + onChanged(); + } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; @@ -2040,6 +2323,356 @@ public Builder setExecutorPathBytes( onChanged(); return this; } + + private com.google.protobuf.Struct databricksConf_; + private com.google.protobuf.SingleFieldBuilderV3< + com.google.protobuf.Struct, com.google.protobuf.Struct.Builder, com.google.protobuf.StructOrBuilder> databricksConfBuilder_; + /** + *
+       * Databricks job configuration.
+       * Config structure can be found here. https://docs.databricks.com/dev-tools/api/2.0/jobs.html#request-structure.
+       * 
+ * + * .google.protobuf.Struct databricksConf = 7; + */ + public boolean hasDatabricksConf() { + return databricksConfBuilder_ != null || databricksConf_ != null; + } + /** + *
+       * Databricks job configuration.
+       * Config structure can be found here. https://docs.databricks.com/dev-tools/api/2.0/jobs.html#request-structure.
+       * 
+ * + * .google.protobuf.Struct databricksConf = 7; + */ + public com.google.protobuf.Struct getDatabricksConf() { + if (databricksConfBuilder_ == null) { + return databricksConf_ == null ? com.google.protobuf.Struct.getDefaultInstance() : databricksConf_; + } else { + return databricksConfBuilder_.getMessage(); + } + } + /** + *
+       * Databricks job configuration.
+       * Config structure can be found here. https://docs.databricks.com/dev-tools/api/2.0/jobs.html#request-structure.
+       * 
+ * + * .google.protobuf.Struct databricksConf = 7; + */ + public Builder setDatabricksConf(com.google.protobuf.Struct value) { + if (databricksConfBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + databricksConf_ = value; + onChanged(); + } else { + databricksConfBuilder_.setMessage(value); + } + + return this; + } + /** + *
+       * Databricks job configuration.
+       * Config structure can be found here. https://docs.databricks.com/dev-tools/api/2.0/jobs.html#request-structure.
+       * 
+ * + * .google.protobuf.Struct databricksConf = 7; + */ + public Builder setDatabricksConf( + com.google.protobuf.Struct.Builder builderForValue) { + if (databricksConfBuilder_ == null) { + databricksConf_ = builderForValue.build(); + onChanged(); + } else { + databricksConfBuilder_.setMessage(builderForValue.build()); + } + + return this; + } + /** + *
+       * Databricks job configuration.
+       * Config structure can be found here. https://docs.databricks.com/dev-tools/api/2.0/jobs.html#request-structure.
+       * 
+ * + * .google.protobuf.Struct databricksConf = 7; + */ + public Builder mergeDatabricksConf(com.google.protobuf.Struct value) { + if (databricksConfBuilder_ == null) { + if (databricksConf_ != null) { + databricksConf_ = + com.google.protobuf.Struct.newBuilder(databricksConf_).mergeFrom(value).buildPartial(); + } else { + databricksConf_ = value; + } + onChanged(); + } else { + databricksConfBuilder_.mergeFrom(value); + } + + return this; + } + /** + *
+       * Databricks job configuration.
+       * Config structure can be found here. https://docs.databricks.com/dev-tools/api/2.0/jobs.html#request-structure.
+       * 
+ * + * .google.protobuf.Struct databricksConf = 7; + */ + public Builder clearDatabricksConf() { + if (databricksConfBuilder_ == null) { + databricksConf_ = null; + onChanged(); + } else { + databricksConf_ = null; + databricksConfBuilder_ = null; + } + + return this; + } + /** + *
+       * Databricks job configuration.
+       * Config structure can be found here. https://docs.databricks.com/dev-tools/api/2.0/jobs.html#request-structure.
+       * 
+ * + * .google.protobuf.Struct databricksConf = 7; + */ + public com.google.protobuf.Struct.Builder getDatabricksConfBuilder() { + + onChanged(); + return getDatabricksConfFieldBuilder().getBuilder(); + } + /** + *
+       * Databricks job configuration.
+       * Config structure can be found here. https://docs.databricks.com/dev-tools/api/2.0/jobs.html#request-structure.
+       * 
+ * + * .google.protobuf.Struct databricksConf = 7; + */ + public com.google.protobuf.StructOrBuilder getDatabricksConfOrBuilder() { + if (databricksConfBuilder_ != null) { + return databricksConfBuilder_.getMessageOrBuilder(); + } else { + return databricksConf_ == null ? + com.google.protobuf.Struct.getDefaultInstance() : databricksConf_; + } + } + /** + *
+       * Databricks job configuration.
+       * Config structure can be found here. https://docs.databricks.com/dev-tools/api/2.0/jobs.html#request-structure.
+       * 
+ * + * .google.protobuf.Struct databricksConf = 7; + */ + private com.google.protobuf.SingleFieldBuilderV3< + com.google.protobuf.Struct, com.google.protobuf.Struct.Builder, com.google.protobuf.StructOrBuilder> + getDatabricksConfFieldBuilder() { + if (databricksConfBuilder_ == null) { + databricksConfBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + com.google.protobuf.Struct, com.google.protobuf.Struct.Builder, com.google.protobuf.StructOrBuilder>( + getDatabricksConf(), + getParentForChildren(), + isClean()); + databricksConf_ = null; + } + return databricksConfBuilder_; + } + + private java.lang.Object databricksToken_ = ""; + /** + *
+       * Databricks access token. https://docs.databricks.com/dev-tools/api/latest/authentication.html
+       * This token can be set in either flytepropeller or flytekit.
+       * 
+ * + * string databricksToken = 8; + */ + public java.lang.String getDatabricksToken() { + java.lang.Object ref = databricksToken_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + databricksToken_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+       * Databricks access token. https://docs.databricks.com/dev-tools/api/latest/authentication.html
+       * This token can be set in either flytepropeller or flytekit.
+       * 
+ * + * string databricksToken = 8; + */ + public com.google.protobuf.ByteString + getDatabricksTokenBytes() { + java.lang.Object ref = databricksToken_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + databricksToken_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+       * Databricks access token. https://docs.databricks.com/dev-tools/api/latest/authentication.html
+       * This token can be set in either flytepropeller or flytekit.
+       * 
+ * + * string databricksToken = 8; + */ + public Builder setDatabricksToken( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + databricksToken_ = value; + onChanged(); + return this; + } + /** + *
+       * Databricks access token. https://docs.databricks.com/dev-tools/api/latest/authentication.html
+       * This token can be set in either flytepropeller or flytekit.
+       * 
+ * + * string databricksToken = 8; + */ + public Builder clearDatabricksToken() { + + databricksToken_ = getDefaultInstance().getDatabricksToken(); + onChanged(); + return this; + } + /** + *
+       * Databricks access token. https://docs.databricks.com/dev-tools/api/latest/authentication.html
+       * This token can be set in either flytepropeller or flytekit.
+       * 
+ * + * string databricksToken = 8; + */ + public Builder setDatabricksTokenBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + databricksToken_ = value; + onChanged(); + return this; + } + + private java.lang.Object databricksInstance_ = ""; + /** + *
+       * Domain name of your deployment. Use the form <account>.cloud.databricks.com.
+       * This instance name can be set in either flytepropeller or flytekit.
+       * 
+ * + * string databricksInstance = 9; + */ + public java.lang.String getDatabricksInstance() { + java.lang.Object ref = databricksInstance_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + databricksInstance_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+       * Domain name of your deployment. Use the form <account>.cloud.databricks.com.
+       * This instance name can be set in either flytepropeller or flytekit.
+       * 
+ * + * string databricksInstance = 9; + */ + public com.google.protobuf.ByteString + getDatabricksInstanceBytes() { + java.lang.Object ref = databricksInstance_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + databricksInstance_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+       * Domain name of your deployment. Use the form <account>.cloud.databricks.com.
+       * This instance name can be set in either flytepropeller or flytekit.
+       * 
+ * + * string databricksInstance = 9; + */ + public Builder setDatabricksInstance( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + databricksInstance_ = value; + onChanged(); + return this; + } + /** + *
+       * Domain name of your deployment. Use the form <account>.cloud.databricks.com.
+       * This instance name can be set in either flytepropeller or flytekit.
+       * 
+ * + * string databricksInstance = 9; + */ + public Builder clearDatabricksInstance() { + + databricksInstance_ = getDefaultInstance().getDatabricksInstance(); + onChanged(); + return this; + } + /** + *
+       * Domain name of your deployment. Use the form <account>.cloud.databricks.com.
+       * This instance name can be set in either flytepropeller or flytekit.
+       * 
+ * + * string databricksInstance = 9; + */ + public Builder setDatabricksInstanceBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + databricksInstance_ = value; + onChanged(); + return this; + } @java.lang.Override public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { @@ -2123,19 +2756,23 @@ public flyteidl.plugins.Spark.SparkJob getDefaultInstanceForType() { static { java.lang.String[] descriptorData = { "\n\034flyteidl/plugins/spark.proto\022\020flyteidl" + - ".plugins\"B\n\020SparkApplication\".\n\004Type\022\n\n\006" + - "PYTHON\020\000\022\010\n\004JAVA\020\001\022\t\n\005SCALA\020\002\022\005\n\001R\020\003\"\365\002\n" + - "\010SparkJob\022@\n\017applicationType\030\001 \001(\0162\'.fly" + - "teidl.plugins.SparkApplication.Type\022\033\n\023m" + - "ainApplicationFile\030\002 \001(\t\022\021\n\tmainClass\030\003 " + - "\001(\t\022<\n\tsparkConf\030\004 \003(\0132).flyteidl.plugin" + - "s.SparkJob.SparkConfEntry\022>\n\nhadoopConf\030" + - "\005 \003(\0132*.flyteidl.plugins.SparkJob.Hadoop" + - "ConfEntry\022\024\n\014executorPath\030\006 \001(\t\0320\n\016Spark" + - "ConfEntry\022\013\n\003key\030\001 \001(\t\022\r\n\005value\030\002 \001(\t:\0028" + - "\001\0321\n\017HadoopConfEntry\022\013\n\003key\030\001 \001(\t\022\r\n\005val" + - "ue\030\002 \001(\t:\0028\001B9Z7github.com/flyteorg/flyt" + - "eidl/gen/pb-go/flyteidl/pluginsb\006proto3" + ".plugins\032\034google/protobuf/struct.proto\"B" + + "\n\020SparkApplication\".\n\004Type\022\n\n\006PYTHON\020\000\022\010" + + "\n\004JAVA\020\001\022\t\n\005SCALA\020\002\022\005\n\001R\020\003\"\333\003\n\010SparkJob\022" + + "@\n\017applicationType\030\001 \001(\0162\'.flyteidl.plug" + + "ins.SparkApplication.Type\022\033\n\023mainApplica" + + "tionFile\030\002 \001(\t\022\021\n\tmainClass\030\003 \001(\t\022<\n\tspa" + + "rkConf\030\004 \003(\0132).flyteidl.plugins.SparkJob" + + ".SparkConfEntry\022>\n\nhadoopConf\030\005 \003(\0132*.fl" + + "yteidl.plugins.SparkJob.HadoopConfEntry\022" + + "\024\n\014executorPath\030\006 \001(\t\022/\n\016databricksConf\030" + + "\007 \001(\0132\027.google.protobuf.Struct\022\027\n\017databr" + + "icksToken\030\010 \001(\t\022\032\n\022databricksInstance\030\t " + + "\001(\t\0320\n\016SparkConfEntry\022\013\n\003key\030\001 \001(\t\022\r\n\005va" + + "lue\030\002 \001(\t:\0028\001\0321\n\017HadoopConfEntry\022\013\n\003key\030" + + "\001 \001(\t\022\r\n\005value\030\002 \001(\t:\0028\001B9Z7github.com/f" + + "lyteorg/flyteidl/gen/pb-go/flyteidl/plug" + + "insb\006proto3" }; com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = new com.google.protobuf.Descriptors.FileDescriptor. InternalDescriptorAssigner() { @@ -2148,6 +2785,7 @@ public com.google.protobuf.ExtensionRegistry assignDescriptors( com.google.protobuf.Descriptors.FileDescriptor .internalBuildGeneratedFileFrom(descriptorData, new com.google.protobuf.Descriptors.FileDescriptor[] { + com.google.protobuf.StructProto.getDescriptor(), }, assigner); internal_static_flyteidl_plugins_SparkApplication_descriptor = getDescriptor().getMessageTypes().get(0); @@ -2160,7 +2798,7 @@ public com.google.protobuf.ExtensionRegistry assignDescriptors( internal_static_flyteidl_plugins_SparkJob_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_flyteidl_plugins_SparkJob_descriptor, - new java.lang.String[] { "ApplicationType", "MainApplicationFile", "MainClass", "SparkConf", "HadoopConf", "ExecutorPath", }); + new java.lang.String[] { "ApplicationType", "MainApplicationFile", "MainClass", "SparkConf", "HadoopConf", "ExecutorPath", "DatabricksConf", "DatabricksToken", "DatabricksInstance", }); internal_static_flyteidl_plugins_SparkJob_SparkConfEntry_descriptor = internal_static_flyteidl_plugins_SparkJob_descriptor.getNestedTypes().get(0); internal_static_flyteidl_plugins_SparkJob_SparkConfEntry_fieldAccessorTable = new @@ -2173,6 +2811,7 @@ public com.google.protobuf.ExtensionRegistry assignDescriptors( com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_flyteidl_plugins_SparkJob_HadoopConfEntry_descriptor, new java.lang.String[] { "Key", "Value", }); + com.google.protobuf.StructProto.getDescriptor(); } // @@protoc_insertion_point(outer_class_scope) diff --git a/gen/pb_python/flyteidl/plugins/spark_pb2.py b/gen/pb_python/flyteidl/plugins/spark_pb2.py index 9369f3434..6db5d5db7 100644 --- a/gen/pb_python/flyteidl/plugins/spark_pb2.py +++ b/gen/pb_python/flyteidl/plugins/spark_pb2.py @@ -11,9 +11,10 @@ _sym_db = _symbol_database.Default() +from google.protobuf import struct_pb2 as google_dot_protobuf_dot_struct__pb2 -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1c\x66lyteidl/plugins/spark.proto\x12\x10\x66lyteidl.plugins\"B\n\x10SparkApplication\".\n\x04Type\x12\n\n\x06PYTHON\x10\x00\x12\x08\n\x04JAVA\x10\x01\x12\t\n\x05SCALA\x10\x02\x12\x05\n\x01R\x10\x03\"\xe3\x03\n\x08SparkJob\x12Q\n\x0f\x61pplicationType\x18\x01 \x01(\x0e\x32\'.flyteidl.plugins.SparkApplication.TypeR\x0f\x61pplicationType\x12\x30\n\x13mainApplicationFile\x18\x02 \x01(\tR\x13mainApplicationFile\x12\x1c\n\tmainClass\x18\x03 \x01(\tR\tmainClass\x12G\n\tsparkConf\x18\x04 \x03(\x0b\x32).flyteidl.plugins.SparkJob.SparkConfEntryR\tsparkConf\x12J\n\nhadoopConf\x18\x05 \x03(\x0b\x32*.flyteidl.plugins.SparkJob.HadoopConfEntryR\nhadoopConf\x12\"\n\x0c\x65xecutorPath\x18\x06 \x01(\tR\x0c\x65xecutorPath\x1a<\n\x0eSparkConfEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\tR\x05value:\x02\x38\x01\x1a=\n\x0fHadoopConfEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\tR\x05value:\x02\x38\x01\x42\xbc\x01\n\x14\x63om.flyteidl.pluginsB\nSparkProtoP\x01Z7github.com/flyteorg/flyteidl/gen/pb-go/flyteidl/plugins\xa2\x02\x03\x46PX\xaa\x02\x10\x46lyteidl.Plugins\xca\x02\x10\x46lyteidl\\Plugins\xe2\x02\x1c\x46lyteidl\\Plugins\\GPBMetadata\xea\x02\x11\x46lyteidl::Pluginsb\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1c\x66lyteidl/plugins/spark.proto\x12\x10\x66lyteidl.plugins\x1a\x1cgoogle/protobuf/struct.proto\"B\n\x10SparkApplication\".\n\x04Type\x12\n\n\x06PYTHON\x10\x00\x12\x08\n\x04JAVA\x10\x01\x12\t\n\x05SCALA\x10\x02\x12\x05\n\x01R\x10\x03\"\xfe\x04\n\x08SparkJob\x12Q\n\x0f\x61pplicationType\x18\x01 \x01(\x0e\x32\'.flyteidl.plugins.SparkApplication.TypeR\x0f\x61pplicationType\x12\x30\n\x13mainApplicationFile\x18\x02 \x01(\tR\x13mainApplicationFile\x12\x1c\n\tmainClass\x18\x03 \x01(\tR\tmainClass\x12G\n\tsparkConf\x18\x04 \x03(\x0b\x32).flyteidl.plugins.SparkJob.SparkConfEntryR\tsparkConf\x12J\n\nhadoopConf\x18\x05 \x03(\x0b\x32*.flyteidl.plugins.SparkJob.HadoopConfEntryR\nhadoopConf\x12\"\n\x0c\x65xecutorPath\x18\x06 \x01(\tR\x0c\x65xecutorPath\x12?\n\x0e\x64\x61tabricksConf\x18\x07 \x01(\x0b\x32\x17.google.protobuf.StructR\x0e\x64\x61tabricksConf\x12(\n\x0f\x64\x61tabricksToken\x18\x08 \x01(\tR\x0f\x64\x61tabricksToken\x12.\n\x12\x64\x61tabricksInstance\x18\t \x01(\tR\x12\x64\x61tabricksInstance\x1a<\n\x0eSparkConfEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\tR\x05value:\x02\x38\x01\x1a=\n\x0fHadoopConfEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\tR\x05value:\x02\x38\x01\x42\xbc\x01\n\x14\x63om.flyteidl.pluginsB\nSparkProtoP\x01Z7github.com/flyteorg/flyteidl/gen/pb-go/flyteidl/plugins\xa2\x02\x03\x46PX\xaa\x02\x10\x46lyteidl.Plugins\xca\x02\x10\x46lyteidl\\Plugins\xe2\x02\x1c\x46lyteidl\\Plugins\\GPBMetadata\xea\x02\x11\x46lyteidl::Pluginsb\x06proto3') _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) _builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'flyteidl.plugins.spark_pb2', globals()) @@ -25,14 +26,14 @@ _SPARKJOB_SPARKCONFENTRY._serialized_options = b'8\001' _SPARKJOB_HADOOPCONFENTRY._options = None _SPARKJOB_HADOOPCONFENTRY._serialized_options = b'8\001' - _SPARKAPPLICATION._serialized_start=50 - _SPARKAPPLICATION._serialized_end=116 - _SPARKAPPLICATION_TYPE._serialized_start=70 - _SPARKAPPLICATION_TYPE._serialized_end=116 - _SPARKJOB._serialized_start=119 - _SPARKJOB._serialized_end=602 - _SPARKJOB_SPARKCONFENTRY._serialized_start=479 - _SPARKJOB_SPARKCONFENTRY._serialized_end=539 - _SPARKJOB_HADOOPCONFENTRY._serialized_start=541 - _SPARKJOB_HADOOPCONFENTRY._serialized_end=602 + _SPARKAPPLICATION._serialized_start=80 + _SPARKAPPLICATION._serialized_end=146 + _SPARKAPPLICATION_TYPE._serialized_start=100 + _SPARKAPPLICATION_TYPE._serialized_end=146 + _SPARKJOB._serialized_start=149 + _SPARKJOB._serialized_end=787 + _SPARKJOB_SPARKCONFENTRY._serialized_start=664 + _SPARKJOB_SPARKCONFENTRY._serialized_end=724 + _SPARKJOB_HADOOPCONFENTRY._serialized_start=726 + _SPARKJOB_HADOOPCONFENTRY._serialized_end=787 # @@protoc_insertion_point(module_scope) diff --git a/gen/pb_python/flyteidl/plugins/spark_pb2.pyi b/gen/pb_python/flyteidl/plugins/spark_pb2.pyi index ea4b93c8f..4c18bd9cf 100644 --- a/gen/pb_python/flyteidl/plugins/spark_pb2.pyi +++ b/gen/pb_python/flyteidl/plugins/spark_pb2.pyi @@ -1,3 +1,4 @@ +from google.protobuf import struct_pb2 as _struct_pb2 from google.protobuf.internal import containers as _containers from google.protobuf.internal import enum_type_wrapper as _enum_type_wrapper from google.protobuf import descriptor as _descriptor @@ -17,7 +18,7 @@ class SparkApplication(_message.Message): def __init__(self) -> None: ... class SparkJob(_message.Message): - __slots__ = ["applicationType", "executorPath", "hadoopConf", "mainApplicationFile", "mainClass", "sparkConf"] + __slots__ = ["applicationType", "databricksConf", "databricksInstance", "databricksToken", "executorPath", "hadoopConf", "mainApplicationFile", "mainClass", "sparkConf"] class HadoopConfEntry(_message.Message): __slots__ = ["key", "value"] KEY_FIELD_NUMBER: _ClassVar[int] @@ -33,15 +34,21 @@ class SparkJob(_message.Message): value: str def __init__(self, key: _Optional[str] = ..., value: _Optional[str] = ...) -> None: ... APPLICATIONTYPE_FIELD_NUMBER: _ClassVar[int] + DATABRICKSCONF_FIELD_NUMBER: _ClassVar[int] + DATABRICKSINSTANCE_FIELD_NUMBER: _ClassVar[int] + DATABRICKSTOKEN_FIELD_NUMBER: _ClassVar[int] EXECUTORPATH_FIELD_NUMBER: _ClassVar[int] HADOOPCONF_FIELD_NUMBER: _ClassVar[int] MAINAPPLICATIONFILE_FIELD_NUMBER: _ClassVar[int] MAINCLASS_FIELD_NUMBER: _ClassVar[int] SPARKCONF_FIELD_NUMBER: _ClassVar[int] applicationType: SparkApplication.Type + databricksConf: _struct_pb2.Struct + databricksInstance: str + databricksToken: str executorPath: str hadoopConf: _containers.ScalarMap[str, str] mainApplicationFile: str mainClass: str sparkConf: _containers.ScalarMap[str, str] - def __init__(self, applicationType: _Optional[_Union[SparkApplication.Type, str]] = ..., mainApplicationFile: _Optional[str] = ..., mainClass: _Optional[str] = ..., sparkConf: _Optional[_Mapping[str, str]] = ..., hadoopConf: _Optional[_Mapping[str, str]] = ..., executorPath: _Optional[str] = ...) -> None: ... + def __init__(self, applicationType: _Optional[_Union[SparkApplication.Type, str]] = ..., mainApplicationFile: _Optional[str] = ..., mainClass: _Optional[str] = ..., sparkConf: _Optional[_Mapping[str, str]] = ..., hadoopConf: _Optional[_Mapping[str, str]] = ..., executorPath: _Optional[str] = ..., databricksConf: _Optional[_Union[_struct_pb2.Struct, _Mapping]] = ..., databricksToken: _Optional[str] = ..., databricksInstance: _Optional[str] = ...) -> None: ... diff --git a/protos/docs/plugins/plugins.rst b/protos/docs/plugins/plugins.rst index a0a1152c5..995dc7c08 100644 --- a/protos/docs/plugins/plugins.rst +++ b/protos/docs/plugins/plugins.rst @@ -595,6 +595,7 @@ Custom Proto for Spark Plugin. "sparkConf", ":ref:`ref_flyteidl.plugins.SparkJob.SparkConfEntry`", "repeated", "" "hadoopConf", ":ref:`ref_flyteidl.plugins.SparkJob.HadoopConfEntry`", "repeated", "" "executorPath", ":ref:`ref_string`", "", "Executor path for Python jobs." + "databricksConf", ":ref:`ref_string`", "", "databricksConf is base64 encoded string which stores databricks job configuration. Config structure can be found here. https://docs.databricks.com/dev-tools/api/2.0/jobs.html#request-structure The config is automatically encoded by flytekit, and decoded in the propeller." diff --git a/protos/flyteidl/plugins/spark.proto b/protos/flyteidl/plugins/spark.proto index 8d008ee6e..6ba00fe05 100644 --- a/protos/flyteidl/plugins/spark.proto +++ b/protos/flyteidl/plugins/spark.proto @@ -1,6 +1,7 @@ syntax = "proto3"; package flyteidl.plugins; +import "google/protobuf/struct.proto"; option go_package = "github.com/flyteorg/flyteidl/gen/pb-go/flyteidl/plugins"; @@ -21,4 +22,13 @@ message SparkJob { map sparkConf = 4; map hadoopConf = 5; string executorPath = 6; // Executor path for Python jobs. + // Databricks job configuration. + // Config structure can be found here. https://docs.databricks.com/dev-tools/api/2.0/jobs.html#request-structure. + google.protobuf.Struct databricksConf = 7; + // Databricks access token. https://docs.databricks.com/dev-tools/api/latest/authentication.html + // This token can be set in either flytepropeller or flytekit. + string databricksToken = 8; + // Domain name of your deployment. Use the form .cloud.databricks.com. + // This instance name can be set in either flytepropeller or flytekit. + string databricksInstance = 9; }