From 6e45b86c3ad33b6795a90f898ff4d753e794f700 Mon Sep 17 00:00:00 2001 From: Andrew Dye Date: Thu, 5 Oct 2023 17:15:30 -0700 Subject: [PATCH 1/2] Add driver and executor pod template fields to SparkJob Signed-off-by: Andrew Dye --- .../gen/pb-cpp/flyteidl/plugins/spark.pb.cc | 445 +++++++- .../gen/pb-cpp/flyteidl/plugins/spark.pb.h | 275 +++++ .../gen/pb-go/flyteidl/plugins/spark.pb.go | 166 ++- .../flyteidl/plugins/spark.pb.validate.go | 36 + .../gen/pb-java/flyteidl/plugins/Spark.java | 1013 ++++++++++++++++- .../pb_python/flyteidl/plugins/spark_pb2.py | 23 +- .../pb_python/flyteidl/plugins/spark_pb2.pyi | 13 +- flyteidl/gen/pb_rust/flyteidl.plugins.rs | 39 + flyteidl/protos/flyteidl/plugins/spark.proto | 24 +- 9 files changed, 1945 insertions(+), 89 deletions(-) diff --git a/flyteidl/gen/pb-cpp/flyteidl/plugins/spark.pb.cc b/flyteidl/gen/pb-cpp/flyteidl/plugins/spark.pb.cc index 8cb82c83b5..dd81d750f9 100644 --- a/flyteidl/gen/pb-cpp/flyteidl/plugins/spark.pb.cc +++ b/flyteidl/gen/pb-cpp/flyteidl/plugins/spark.pb.cc @@ -16,6 +16,7 @@ // @@protoc_insertion_point(includes) #include +extern PROTOBUF_INTERNAL_EXPORT_flyteidl_2fcore_2ftasks_2eproto ::google::protobuf::internal::SCCInfo<3> scc_info_K8sPod_flyteidl_2fcore_2ftasks_2eproto; extern PROTOBUF_INTERNAL_EXPORT_flyteidl_2fplugins_2fspark_2eproto ::google::protobuf::internal::SCCInfo<0> scc_info_SparkJob_HadoopConfEntry_DoNotUse_flyteidl_2fplugins_2fspark_2eproto; extern PROTOBUF_INTERNAL_EXPORT_flyteidl_2fplugins_2fspark_2eproto ::google::protobuf::internal::SCCInfo<0> scc_info_SparkJob_SparkConfEntry_DoNotUse_flyteidl_2fplugins_2fspark_2eproto; extern PROTOBUF_INTERNAL_EXPORT_google_2fprotobuf_2fstruct_2eproto ::google::protobuf::internal::SCCInfo<0> scc_info_ListValue_google_2fprotobuf_2fstruct_2eproto; @@ -36,6 +37,8 @@ class SparkJob_HadoopConfEntry_DoNotUseDefaultTypeInternal { class SparkJobDefaultTypeInternal { public: ::google::protobuf::internal::ExplicitlyConstructed _instance; + const ::flyteidl::core::K8sPod* driverpod_; + const ::flyteidl::core::K8sPod* executorpod_; } _SparkJob_default_instance_; } // namespace plugins } // namespace flyteidl @@ -90,11 +93,12 @@ static void InitDefaultsSparkJob_flyteidl_2fplugins_2fspark_2eproto() { ::flyteidl::plugins::SparkJob::InitAsDefaultInstance(); } -::google::protobuf::internal::SCCInfo<3> scc_info_SparkJob_flyteidl_2fplugins_2fspark_2eproto = - {{ATOMIC_VAR_INIT(::google::protobuf::internal::SCCInfoBase::kUninitialized), 3, InitDefaultsSparkJob_flyteidl_2fplugins_2fspark_2eproto}, { +::google::protobuf::internal::SCCInfo<4> scc_info_SparkJob_flyteidl_2fplugins_2fspark_2eproto = + {{ATOMIC_VAR_INIT(::google::protobuf::internal::SCCInfoBase::kUninitialized), 4, InitDefaultsSparkJob_flyteidl_2fplugins_2fspark_2eproto}, { &scc_info_SparkJob_SparkConfEntry_DoNotUse_flyteidl_2fplugins_2fspark_2eproto.base, &scc_info_SparkJob_HadoopConfEntry_DoNotUse_flyteidl_2fplugins_2fspark_2eproto.base, - &scc_info_ListValue_google_2fprotobuf_2fstruct_2eproto.base,}}; + &scc_info_ListValue_google_2fprotobuf_2fstruct_2eproto.base, + &scc_info_K8sPod_flyteidl_2fcore_2ftasks_2eproto.base,}}; void InitDefaults_flyteidl_2fplugins_2fspark_2eproto() { ::google::protobuf::internal::InitSCC(&scc_info_SparkApplication_flyteidl_2fplugins_2fspark_2eproto.base); @@ -134,7 +138,7 @@ const ::google::protobuf::uint32 TableStruct_flyteidl_2fplugins_2fspark_2eproto: ~0u, // no _has_bits_ PROTOBUF_FIELD_OFFSET(::flyteidl::plugins::SparkJob, _internal_metadata_), ~0u, // no _extensions_ - ~0u, // no _oneof_case_ + PROTOBUF_FIELD_OFFSET(::flyteidl::plugins::SparkJob, _oneof_case_[0]), ~0u, // no _weak_field_map_ PROTOBUF_FIELD_OFFSET(::flyteidl::plugins::SparkJob, applicationtype_), PROTOBUF_FIELD_OFFSET(::flyteidl::plugins::SparkJob, mainapplicationfile_), @@ -145,6 +149,12 @@ const ::google::protobuf::uint32 TableStruct_flyteidl_2fplugins_2fspark_2eproto: PROTOBUF_FIELD_OFFSET(::flyteidl::plugins::SparkJob, databricksconf_), PROTOBUF_FIELD_OFFSET(::flyteidl::plugins::SparkJob, databrickstoken_), PROTOBUF_FIELD_OFFSET(::flyteidl::plugins::SparkJob, databricksinstance_), + offsetof(::flyteidl::plugins::SparkJobDefaultTypeInternal, driverpod_), + PROTOBUF_FIELD_OFFSET(::flyteidl::plugins::SparkJob, driverpodtemplatename_), + offsetof(::flyteidl::plugins::SparkJobDefaultTypeInternal, executorpod_), + PROTOBUF_FIELD_OFFSET(::flyteidl::plugins::SparkJob, executorpodtemplatename_), + PROTOBUF_FIELD_OFFSET(::flyteidl::plugins::SparkJob, driverPodValue_), + PROTOBUF_FIELD_OFFSET(::flyteidl::plugins::SparkJob, executorPodValue_), }; static const ::google::protobuf::internal::MigrationSchema schemas[] PROTOBUF_SECTION_VARIABLE(protodesc_cold) = { { 0, -1, sizeof(::flyteidl::plugins::SparkApplication)}, @@ -168,36 +178,42 @@ ::google::protobuf::internal::AssignDescriptorsTable assign_descriptors_table_fl const char descriptor_table_protodef_flyteidl_2fplugins_2fspark_2eproto[] = "\n\034flyteidl/plugins/spark.proto\022\020flyteidl" - ".plugins\032\034google/protobuf/struct.proto\"B" - "\n\020SparkApplication\".\n\004Type\022\n\n\006PYTHON\020\000\022\010" - "\n\004JAVA\020\001\022\t\n\005SCALA\020\002\022\005\n\001R\020\003\"\333\003\n\010SparkJob\022" - "@\n\017applicationType\030\001 \001(\0162\'.flyteidl.plug" - "ins.SparkApplication.Type\022\033\n\023mainApplica" - "tionFile\030\002 \001(\t\022\021\n\tmainClass\030\003 \001(\t\022<\n\tspa" - "rkConf\030\004 \003(\0132).flyteidl.plugins.SparkJob" - ".SparkConfEntry\022>\n\nhadoopConf\030\005 \003(\0132*.fl" - "yteidl.plugins.SparkJob.HadoopConfEntry\022" - "\024\n\014executorPath\030\006 \001(\t\022/\n\016databricksConf\030" - "\007 \001(\0132\027.google.protobuf.Struct\022\027\n\017databr" - "icksToken\030\010 \001(\t\022\032\n\022databricksInstance\030\t " - "\001(\t\0320\n\016SparkConfEntry\022\013\n\003key\030\001 \001(\t\022\r\n\005va" - "lue\030\002 \001(\t:\0028\001\0321\n\017HadoopConfEntry\022\013\n\003key\030" - "\001 \001(\t\022\r\n\005value\030\002 \001(\t:\0028\001B\?Z=github.com/f" - "lyteorg/flyte/flyteidl/gen/pb-go/flyteid" - "l/pluginsb\006proto3" + ".plugins\032\031flyteidl/core/tasks.proto\032\034goo" + "gle/protobuf/struct.proto\"B\n\020SparkApplic" + "ation\".\n\004Type\022\n\n\006PYTHON\020\000\022\010\n\004JAVA\020\001\022\t\n\005S" + "CALA\020\002\022\005\n\001R\020\003\"\233\005\n\010SparkJob\022@\n\017applicatio" + "nType\030\001 \001(\0162\'.flyteidl.plugins.SparkAppl" + "ication.Type\022\033\n\023mainApplicationFile\030\002 \001(" + "\t\022\021\n\tmainClass\030\003 \001(\t\022<\n\tsparkConf\030\004 \003(\0132" + ").flyteidl.plugins.SparkJob.SparkConfEnt" + "ry\022>\n\nhadoopConf\030\005 \003(\0132*.flyteidl.plugin" + "s.SparkJob.HadoopConfEntry\022\024\n\014executorPa" + "th\030\006 \001(\t\022/\n\016databricksConf\030\007 \001(\0132\027.googl" + "e.protobuf.Struct\022\027\n\017databricksToken\030\010 \001" + "(\t\022\032\n\022databricksInstance\030\t \001(\t\022*\n\tdriver" + "Pod\030\n \001(\0132\025.flyteidl.core.K8sPodH\000\022\035\n\025dr" + "iverPodTemplateName\030\013 \001(\t\022,\n\013executorPod" + "\030\014 \001(\0132\025.flyteidl.core.K8sPodH\001\022\037\n\027execu" + "torPodTemplateName\030\r \001(\t\0320\n\016SparkConfEnt" + "ry\022\013\n\003key\030\001 \001(\t\022\r\n\005value\030\002 \001(\t:\0028\001\0321\n\017Ha" + "doopConfEntry\022\013\n\003key\030\001 \001(\t\022\r\n\005value\030\002 \001(" + "\t:\0028\001B\020\n\016driverPodValueB\022\n\020executorPodVa" + "lueB\?Z=github.com/flyteorg/flyte/flyteid" + "l/gen/pb-go/flyteidl/pluginsb\006proto3" ; ::google::protobuf::internal::DescriptorTable descriptor_table_flyteidl_2fplugins_2fspark_2eproto = { false, InitDefaults_flyteidl_2fplugins_2fspark_2eproto, descriptor_table_protodef_flyteidl_2fplugins_2fspark_2eproto, - "flyteidl/plugins/spark.proto", &assign_descriptors_table_flyteidl_2fplugins_2fspark_2eproto, 697, + "flyteidl/plugins/spark.proto", &assign_descriptors_table_flyteidl_2fplugins_2fspark_2eproto, 916, }; void AddDescriptors_flyteidl_2fplugins_2fspark_2eproto() { - static constexpr ::google::protobuf::internal::InitFunc deps[1] = + static constexpr ::google::protobuf::internal::InitFunc deps[2] = { + ::AddDescriptors_flyteidl_2fcore_2ftasks_2eproto, ::AddDescriptors_google_2fprotobuf_2fstruct_2eproto, }; - ::google::protobuf::internal::AddDescriptors(&descriptor_table_flyteidl_2fplugins_2fspark_2eproto, deps, 1); + ::google::protobuf::internal::AddDescriptors(&descriptor_table_flyteidl_2fplugins_2fspark_2eproto, deps, 2); } // Force running AddDescriptors() at dynamic initialization time. @@ -526,22 +542,76 @@ bool SparkJob_HadoopConfEntry_DoNotUse::_ParseMap(const char* begin, const char* void SparkJob::InitAsDefaultInstance() { ::flyteidl::plugins::_SparkJob_default_instance_._instance.get_mutable()->databricksconf_ = const_cast< ::google::protobuf::Struct*>( ::google::protobuf::Struct::internal_default_instance()); + ::flyteidl::plugins::_SparkJob_default_instance_.driverpod_ = const_cast< ::flyteidl::core::K8sPod*>( + ::flyteidl::core::K8sPod::internal_default_instance()); + ::flyteidl::plugins::_SparkJob_default_instance_.executorpod_ = const_cast< ::flyteidl::core::K8sPod*>( + ::flyteidl::core::K8sPod::internal_default_instance()); } class SparkJob::HasBitSetters { public: static const ::google::protobuf::Struct& databricksconf(const SparkJob* msg); + static const ::flyteidl::core::K8sPod& driverpod(const SparkJob* msg); + static const ::flyteidl::core::K8sPod& executorpod(const SparkJob* msg); }; const ::google::protobuf::Struct& SparkJob::HasBitSetters::databricksconf(const SparkJob* msg) { return *msg->databricksconf_; } +const ::flyteidl::core::K8sPod& +SparkJob::HasBitSetters::driverpod(const SparkJob* msg) { + return *msg->driverPodValue_.driverpod_; +} +const ::flyteidl::core::K8sPod& +SparkJob::HasBitSetters::executorpod(const SparkJob* msg) { + return *msg->executorPodValue_.executorpod_; +} void SparkJob::clear_databricksconf() { if (GetArenaNoVirtual() == nullptr && databricksconf_ != nullptr) { delete databricksconf_; } databricksconf_ = nullptr; } +void SparkJob::set_allocated_driverpod(::flyteidl::core::K8sPod* driverpod) { + ::google::protobuf::Arena* message_arena = GetArenaNoVirtual(); + clear_driverPodValue(); + if (driverpod) { + ::google::protobuf::Arena* submessage_arena = nullptr; + if (message_arena != submessage_arena) { + driverpod = ::google::protobuf::internal::GetOwnedMessage( + message_arena, driverpod, submessage_arena); + } + set_has_driverpod(); + driverPodValue_.driverpod_ = driverpod; + } + // @@protoc_insertion_point(field_set_allocated:flyteidl.plugins.SparkJob.driverPod) +} +void SparkJob::clear_driverpod() { + if (has_driverpod()) { + delete driverPodValue_.driverpod_; + clear_has_driverPodValue(); + } +} +void SparkJob::set_allocated_executorpod(::flyteidl::core::K8sPod* executorpod) { + ::google::protobuf::Arena* message_arena = GetArenaNoVirtual(); + clear_executorPodValue(); + if (executorpod) { + ::google::protobuf::Arena* submessage_arena = nullptr; + if (message_arena != submessage_arena) { + executorpod = ::google::protobuf::internal::GetOwnedMessage( + message_arena, executorpod, submessage_arena); + } + set_has_executorpod(); + executorPodValue_.executorpod_ = executorpod; + } + // @@protoc_insertion_point(field_set_allocated:flyteidl.plugins.SparkJob.executorPod) +} +void SparkJob::clear_executorpod() { + if (has_executorpod()) { + delete executorPodValue_.executorpod_; + clear_has_executorPodValue(); + } +} #if !defined(_MSC_VER) || _MSC_VER >= 1900 const int SparkJob::kApplicationTypeFieldNumber; const int SparkJob::kMainApplicationFileFieldNumber; @@ -552,6 +622,10 @@ const int SparkJob::kExecutorPathFieldNumber; const int SparkJob::kDatabricksConfFieldNumber; const int SparkJob::kDatabricksTokenFieldNumber; const int SparkJob::kDatabricksInstanceFieldNumber; +const int SparkJob::kDriverPodFieldNumber; +const int SparkJob::kDriverPodTemplateNameFieldNumber; +const int SparkJob::kExecutorPodFieldNumber; +const int SparkJob::kExecutorPodTemplateNameFieldNumber; #endif // !defined(_MSC_VER) || _MSC_VER >= 1900 SparkJob::SparkJob() @@ -585,12 +659,40 @@ SparkJob::SparkJob(const SparkJob& from) if (from.databricksinstance().size() > 0) { databricksinstance_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.databricksinstance_); } + driverpodtemplatename_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + if (from.driverpodtemplatename().size() > 0) { + driverpodtemplatename_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.driverpodtemplatename_); + } + executorpodtemplatename_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + if (from.executorpodtemplatename().size() > 0) { + executorpodtemplatename_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.executorpodtemplatename_); + } if (from.has_databricksconf()) { databricksconf_ = new ::google::protobuf::Struct(*from.databricksconf_); } else { databricksconf_ = nullptr; } applicationtype_ = from.applicationtype_; + clear_has_driverPodValue(); + switch (from.driverPodValue_case()) { + case kDriverPod: { + mutable_driverpod()->::flyteidl::core::K8sPod::MergeFrom(from.driverpod()); + break; + } + case DRIVERPODVALUE_NOT_SET: { + break; + } + } + clear_has_executorPodValue(); + switch (from.executorPodValue_case()) { + case kExecutorPod: { + mutable_executorpod()->::flyteidl::core::K8sPod::MergeFrom(from.executorpod()); + break; + } + case EXECUTORPODVALUE_NOT_SET: { + break; + } + } // @@protoc_insertion_point(copy_constructor:flyteidl.plugins.SparkJob) } @@ -602,9 +704,13 @@ void SparkJob::SharedCtor() { executorpath_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); databrickstoken_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); databricksinstance_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + driverpodtemplatename_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + executorpodtemplatename_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); ::memset(&databricksconf_, 0, static_cast( reinterpret_cast(&applicationtype_) - reinterpret_cast(&databricksconf_)) + sizeof(applicationtype_)); + clear_has_driverPodValue(); + clear_has_executorPodValue(); } SparkJob::~SparkJob() { @@ -618,7 +724,15 @@ void SparkJob::SharedDtor() { executorpath_.DestroyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); databrickstoken_.DestroyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); databricksinstance_.DestroyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + driverpodtemplatename_.DestroyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + executorpodtemplatename_.DestroyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); if (this != internal_default_instance()) delete databricksconf_; + if (has_driverPodValue()) { + clear_driverPodValue(); + } + if (has_executorPodValue()) { + clear_executorPodValue(); + } } void SparkJob::SetCachedSize(int size) const { @@ -630,6 +744,35 @@ const SparkJob& SparkJob::default_instance() { } +void SparkJob::clear_driverPodValue() { +// @@protoc_insertion_point(one_of_clear_start:flyteidl.plugins.SparkJob) + switch (driverPodValue_case()) { + case kDriverPod: { + delete driverPodValue_.driverpod_; + break; + } + case DRIVERPODVALUE_NOT_SET: { + break; + } + } + _oneof_case_[0] = DRIVERPODVALUE_NOT_SET; +} + +void SparkJob::clear_executorPodValue() { +// @@protoc_insertion_point(one_of_clear_start:flyteidl.plugins.SparkJob) + switch (executorPodValue_case()) { + case kExecutorPod: { + delete executorPodValue_.executorpod_; + break; + } + case EXECUTORPODVALUE_NOT_SET: { + break; + } + } + _oneof_case_[1] = EXECUTORPODVALUE_NOT_SET; +} + + void SparkJob::Clear() { // @@protoc_insertion_point(message_clear_start:flyteidl.plugins.SparkJob) ::google::protobuf::uint32 cached_has_bits = 0; @@ -643,11 +786,15 @@ void SparkJob::Clear() { executorpath_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); databrickstoken_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); databricksinstance_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + driverpodtemplatename_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + executorpodtemplatename_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); if (GetArenaNoVirtual() == nullptr && databricksconf_ != nullptr) { delete databricksconf_; } databricksconf_ = nullptr; applicationtype_ = 0; + clear_driverPodValue(); + clear_executorPodValue(); _internal_metadata_.Clear(); } @@ -803,6 +950,64 @@ const char* SparkJob::_InternalParse(const char* begin, const char* end, void* o ptr += size; break; } + // .flyteidl.core.K8sPod driverPod = 10; + case 10: { + if (static_cast<::google::protobuf::uint8>(tag) != 82) goto handle_unusual; + ptr = ::google::protobuf::io::ReadSize(ptr, &size); + GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); + parser_till_end = ::flyteidl::core::K8sPod::_InternalParse; + object = msg->mutable_driverpod(); + if (size > end - ptr) goto len_delim_till_end; + ptr += size; + GOOGLE_PROTOBUF_PARSER_ASSERT(ctx->ParseExactRange( + {parser_till_end, object}, ptr - size, ptr)); + break; + } + // string driverPodTemplateName = 11; + case 11: { + if (static_cast<::google::protobuf::uint8>(tag) != 90) goto handle_unusual; + ptr = ::google::protobuf::io::ReadSize(ptr, &size); + GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); + ctx->extra_parse_data().SetFieldName("flyteidl.plugins.SparkJob.driverPodTemplateName"); + object = msg->mutable_driverpodtemplatename(); + if (size > end - ptr + ::google::protobuf::internal::ParseContext::kSlopBytes) { + parser_till_end = ::google::protobuf::internal::GreedyStringParserUTF8; + goto string_till_end; + } + GOOGLE_PROTOBUF_PARSER_ASSERT(::google::protobuf::internal::StringCheckUTF8(ptr, size, ctx)); + ::google::protobuf::internal::InlineGreedyStringParser(object, ptr, size, ctx); + ptr += size; + break; + } + // .flyteidl.core.K8sPod executorPod = 12; + case 12: { + if (static_cast<::google::protobuf::uint8>(tag) != 98) goto handle_unusual; + ptr = ::google::protobuf::io::ReadSize(ptr, &size); + GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); + parser_till_end = ::flyteidl::core::K8sPod::_InternalParse; + object = msg->mutable_executorpod(); + if (size > end - ptr) goto len_delim_till_end; + ptr += size; + GOOGLE_PROTOBUF_PARSER_ASSERT(ctx->ParseExactRange( + {parser_till_end, object}, ptr - size, ptr)); + break; + } + // string executorPodTemplateName = 13; + case 13: { + if (static_cast<::google::protobuf::uint8>(tag) != 106) goto handle_unusual; + ptr = ::google::protobuf::io::ReadSize(ptr, &size); + GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); + ctx->extra_parse_data().SetFieldName("flyteidl.plugins.SparkJob.executorPodTemplateName"); + object = msg->mutable_executorpodtemplatename(); + if (size > end - ptr + ::google::protobuf::internal::ParseContext::kSlopBytes) { + parser_till_end = ::google::protobuf::internal::GreedyStringParserUTF8; + goto string_till_end; + } + GOOGLE_PROTOBUF_PARSER_ASSERT(::google::protobuf::internal::StringCheckUTF8(ptr, size, ctx)); + ::google::protobuf::internal::InlineGreedyStringParser(object, ptr, size, ctx); + ptr += size; + break; + } default: { handle_unusual: if ((tag & 7) == 4 || tag == 0) { @@ -989,6 +1194,58 @@ bool SparkJob::MergePartialFromCodedStream( break; } + // .flyteidl.core.K8sPod driverPod = 10; + case 10: { + if (static_cast< ::google::protobuf::uint8>(tag) == (82 & 0xFF)) { + DO_(::google::protobuf::internal::WireFormatLite::ReadMessage( + input, mutable_driverpod())); + } else { + goto handle_unusual; + } + break; + } + + // string driverPodTemplateName = 11; + case 11: { + if (static_cast< ::google::protobuf::uint8>(tag) == (90 & 0xFF)) { + DO_(::google::protobuf::internal::WireFormatLite::ReadString( + input, this->mutable_driverpodtemplatename())); + DO_(::google::protobuf::internal::WireFormatLite::VerifyUtf8String( + this->driverpodtemplatename().data(), static_cast(this->driverpodtemplatename().length()), + ::google::protobuf::internal::WireFormatLite::PARSE, + "flyteidl.plugins.SparkJob.driverPodTemplateName")); + } else { + goto handle_unusual; + } + break; + } + + // .flyteidl.core.K8sPod executorPod = 12; + case 12: { + if (static_cast< ::google::protobuf::uint8>(tag) == (98 & 0xFF)) { + DO_(::google::protobuf::internal::WireFormatLite::ReadMessage( + input, mutable_executorpod())); + } else { + goto handle_unusual; + } + break; + } + + // string executorPodTemplateName = 13; + case 13: { + if (static_cast< ::google::protobuf::uint8>(tag) == (106 & 0xFF)) { + DO_(::google::protobuf::internal::WireFormatLite::ReadString( + input, this->mutable_executorpodtemplatename())); + DO_(::google::protobuf::internal::WireFormatLite::VerifyUtf8String( + this->executorpodtemplatename().data(), static_cast(this->executorpodtemplatename().length()), + ::google::protobuf::internal::WireFormatLite::PARSE, + "flyteidl.plugins.SparkJob.executorPodTemplateName")); + } else { + goto handle_unusual; + } + break; + } + default: { handle_unusual: if (tag == 0) { @@ -1176,6 +1433,38 @@ void SparkJob::SerializeWithCachedSizes( 9, this->databricksinstance(), output); } + // .flyteidl.core.K8sPod driverPod = 10; + if (has_driverpod()) { + ::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray( + 10, HasBitSetters::driverpod(this), output); + } + + // string driverPodTemplateName = 11; + if (this->driverpodtemplatename().size() > 0) { + ::google::protobuf::internal::WireFormatLite::VerifyUtf8String( + this->driverpodtemplatename().data(), static_cast(this->driverpodtemplatename().length()), + ::google::protobuf::internal::WireFormatLite::SERIALIZE, + "flyteidl.plugins.SparkJob.driverPodTemplateName"); + ::google::protobuf::internal::WireFormatLite::WriteStringMaybeAliased( + 11, this->driverpodtemplatename(), output); + } + + // .flyteidl.core.K8sPod executorPod = 12; + if (has_executorpod()) { + ::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray( + 12, HasBitSetters::executorpod(this), output); + } + + // string executorPodTemplateName = 13; + if (this->executorpodtemplatename().size() > 0) { + ::google::protobuf::internal::WireFormatLite::VerifyUtf8String( + this->executorpodtemplatename().data(), static_cast(this->executorpodtemplatename().length()), + ::google::protobuf::internal::WireFormatLite::SERIALIZE, + "flyteidl.plugins.SparkJob.executorPodTemplateName"); + ::google::protobuf::internal::WireFormatLite::WriteStringMaybeAliased( + 13, this->executorpodtemplatename(), output); + } + if (_internal_metadata_.have_unknown_fields()) { ::google::protobuf::internal::WireFormat::SerializeUnknownFields( _internal_metadata_.unknown_fields(), output); @@ -1355,6 +1644,42 @@ ::google::protobuf::uint8* SparkJob::InternalSerializeWithCachedSizesToArray( 9, this->databricksinstance(), target); } + // .flyteidl.core.K8sPod driverPod = 10; + if (has_driverpod()) { + target = ::google::protobuf::internal::WireFormatLite:: + InternalWriteMessageToArray( + 10, HasBitSetters::driverpod(this), target); + } + + // string driverPodTemplateName = 11; + if (this->driverpodtemplatename().size() > 0) { + ::google::protobuf::internal::WireFormatLite::VerifyUtf8String( + this->driverpodtemplatename().data(), static_cast(this->driverpodtemplatename().length()), + ::google::protobuf::internal::WireFormatLite::SERIALIZE, + "flyteidl.plugins.SparkJob.driverPodTemplateName"); + target = + ::google::protobuf::internal::WireFormatLite::WriteStringToArray( + 11, this->driverpodtemplatename(), target); + } + + // .flyteidl.core.K8sPod executorPod = 12; + if (has_executorpod()) { + target = ::google::protobuf::internal::WireFormatLite:: + InternalWriteMessageToArray( + 12, HasBitSetters::executorpod(this), target); + } + + // string executorPodTemplateName = 13; + if (this->executorpodtemplatename().size() > 0) { + ::google::protobuf::internal::WireFormatLite::VerifyUtf8String( + this->executorpodtemplatename().data(), static_cast(this->executorpodtemplatename().length()), + ::google::protobuf::internal::WireFormatLite::SERIALIZE, + "flyteidl.plugins.SparkJob.executorPodTemplateName"); + target = + ::google::protobuf::internal::WireFormatLite::WriteStringToArray( + 13, this->executorpodtemplatename(), target); + } + if (_internal_metadata_.have_unknown_fields()) { target = ::google::protobuf::internal::WireFormat::SerializeUnknownFieldsToArray( _internal_metadata_.unknown_fields(), target); @@ -1439,6 +1764,20 @@ size_t SparkJob::ByteSizeLong() const { this->databricksinstance()); } + // string driverPodTemplateName = 11; + if (this->driverpodtemplatename().size() > 0) { + total_size += 1 + + ::google::protobuf::internal::WireFormatLite::StringSize( + this->driverpodtemplatename()); + } + + // string executorPodTemplateName = 13; + if (this->executorpodtemplatename().size() > 0) { + total_size += 1 + + ::google::protobuf::internal::WireFormatLite::StringSize( + this->executorpodtemplatename()); + } + // .google.protobuf.Struct databricksConf = 7; if (this->has_databricksconf()) { total_size += 1 + @@ -1452,6 +1791,30 @@ size_t SparkJob::ByteSizeLong() const { ::google::protobuf::internal::WireFormatLite::EnumSize(this->applicationtype()); } + switch (driverPodValue_case()) { + // .flyteidl.core.K8sPod driverPod = 10; + case kDriverPod: { + total_size += 1 + + ::google::protobuf::internal::WireFormatLite::MessageSize( + *driverPodValue_.driverpod_); + break; + } + case DRIVERPODVALUE_NOT_SET: { + break; + } + } + switch (executorPodValue_case()) { + // .flyteidl.core.K8sPod executorPod = 12; + case kExecutorPod: { + total_size += 1 + + ::google::protobuf::internal::WireFormatLite::MessageSize( + *executorPodValue_.executorpod_); + break; + } + case EXECUTORPODVALUE_NOT_SET: { + break; + } + } int cached_size = ::google::protobuf::internal::ToCachedSize(total_size); SetCachedSize(cached_size); return total_size; @@ -1501,12 +1864,38 @@ void SparkJob::MergeFrom(const SparkJob& from) { databricksinstance_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.databricksinstance_); } + if (from.driverpodtemplatename().size() > 0) { + + driverpodtemplatename_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.driverpodtemplatename_); + } + if (from.executorpodtemplatename().size() > 0) { + + executorpodtemplatename_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.executorpodtemplatename_); + } if (from.has_databricksconf()) { mutable_databricksconf()->::google::protobuf::Struct::MergeFrom(from.databricksconf()); } if (from.applicationtype() != 0) { set_applicationtype(from.applicationtype()); } + switch (from.driverPodValue_case()) { + case kDriverPod: { + mutable_driverpod()->::flyteidl::core::K8sPod::MergeFrom(from.driverpod()); + break; + } + case DRIVERPODVALUE_NOT_SET: { + break; + } + } + switch (from.executorPodValue_case()) { + case kExecutorPod: { + mutable_executorpod()->::flyteidl::core::K8sPod::MergeFrom(from.executorpod()); + break; + } + case EXECUTORPODVALUE_NOT_SET: { + break; + } + } } void SparkJob::CopyFrom(const ::google::protobuf::Message& from) { @@ -1546,8 +1935,16 @@ void SparkJob::InternalSwap(SparkJob* other) { GetArenaNoVirtual()); databricksinstance_.Swap(&other->databricksinstance_, &::google::protobuf::internal::GetEmptyStringAlreadyInited(), GetArenaNoVirtual()); + driverpodtemplatename_.Swap(&other->driverpodtemplatename_, &::google::protobuf::internal::GetEmptyStringAlreadyInited(), + GetArenaNoVirtual()); + executorpodtemplatename_.Swap(&other->executorpodtemplatename_, &::google::protobuf::internal::GetEmptyStringAlreadyInited(), + GetArenaNoVirtual()); swap(databricksconf_, other->databricksconf_); swap(applicationtype_, other->applicationtype_); + swap(driverPodValue_, other->driverPodValue_); + swap(executorPodValue_, other->executorPodValue_); + swap(_oneof_case_[0], other->_oneof_case_[0]); + swap(_oneof_case_[1], other->_oneof_case_[1]); } ::google::protobuf::Metadata SparkJob::GetMetadata() const { diff --git a/flyteidl/gen/pb-cpp/flyteidl/plugins/spark.pb.h b/flyteidl/gen/pb-cpp/flyteidl/plugins/spark.pb.h index 4b60371d82..c6ccc2b036 100644 --- a/flyteidl/gen/pb-cpp/flyteidl/plugins/spark.pb.h +++ b/flyteidl/gen/pb-cpp/flyteidl/plugins/spark.pb.h @@ -35,6 +35,7 @@ #include #include #include +#include "flyteidl/core/tasks.pb.h" #include // @@protoc_insertion_point(includes) #include @@ -320,6 +321,16 @@ class SparkJob final : } static const SparkJob& default_instance(); + enum DriverPodValueCase { + kDriverPod = 10, + DRIVERPODVALUE_NOT_SET = 0, + }; + + enum ExecutorPodValueCase { + kExecutorPod = 12, + EXECUTORPODVALUE_NOT_SET = 0, + }; + static void InitAsDefaultInstance(); // FOR INTERNAL USE ONLY static inline const SparkJob* internal_default_instance() { return reinterpret_cast( @@ -472,6 +483,34 @@ class SparkJob final : ::std::string* release_databricksinstance(); void set_allocated_databricksinstance(::std::string* databricksinstance); + // string driverPodTemplateName = 11; + void clear_driverpodtemplatename(); + static const int kDriverPodTemplateNameFieldNumber = 11; + const ::std::string& driverpodtemplatename() const; + void set_driverpodtemplatename(const ::std::string& value); + #if LANG_CXX11 + void set_driverpodtemplatename(::std::string&& value); + #endif + void set_driverpodtemplatename(const char* value); + void set_driverpodtemplatename(const char* value, size_t size); + ::std::string* mutable_driverpodtemplatename(); + ::std::string* release_driverpodtemplatename(); + void set_allocated_driverpodtemplatename(::std::string* driverpodtemplatename); + + // string executorPodTemplateName = 13; + void clear_executorpodtemplatename(); + static const int kExecutorPodTemplateNameFieldNumber = 13; + const ::std::string& executorpodtemplatename() const; + void set_executorpodtemplatename(const ::std::string& value); + #if LANG_CXX11 + void set_executorpodtemplatename(::std::string&& value); + #endif + void set_executorpodtemplatename(const char* value); + void set_executorpodtemplatename(const char* value, size_t size); + ::std::string* mutable_executorpodtemplatename(); + ::std::string* release_executorpodtemplatename(); + void set_allocated_executorpodtemplatename(::std::string* executorpodtemplatename); + // .google.protobuf.Struct databricksConf = 7; bool has_databricksconf() const; void clear_databricksconf(); @@ -487,9 +526,39 @@ class SparkJob final : ::flyteidl::plugins::SparkApplication_Type applicationtype() const; void set_applicationtype(::flyteidl::plugins::SparkApplication_Type value); + // .flyteidl.core.K8sPod driverPod = 10; + bool has_driverpod() const; + void clear_driverpod(); + static const int kDriverPodFieldNumber = 10; + const ::flyteidl::core::K8sPod& driverpod() const; + ::flyteidl::core::K8sPod* release_driverpod(); + ::flyteidl::core::K8sPod* mutable_driverpod(); + void set_allocated_driverpod(::flyteidl::core::K8sPod* driverpod); + + // .flyteidl.core.K8sPod executorPod = 12; + bool has_executorpod() const; + void clear_executorpod(); + static const int kExecutorPodFieldNumber = 12; + const ::flyteidl::core::K8sPod& executorpod() const; + ::flyteidl::core::K8sPod* release_executorpod(); + ::flyteidl::core::K8sPod* mutable_executorpod(); + void set_allocated_executorpod(::flyteidl::core::K8sPod* executorpod); + + void clear_driverPodValue(); + DriverPodValueCase driverPodValue_case() const; + void clear_executorPodValue(); + ExecutorPodValueCase executorPodValue_case() const; // @@protoc_insertion_point(class_scope:flyteidl.plugins.SparkJob) private: class HasBitSetters; + void set_has_driverpod(); + void set_has_executorpod(); + + inline bool has_driverPodValue() const; + inline void clear_has_driverPodValue(); + + inline bool has_executorPodValue() const; + inline void clear_has_executorPodValue(); ::google::protobuf::internal::InternalMetadataWithArena _internal_metadata_; ::google::protobuf::internal::MapField< @@ -509,9 +578,21 @@ class SparkJob final : ::google::protobuf::internal::ArenaStringPtr executorpath_; ::google::protobuf::internal::ArenaStringPtr databrickstoken_; ::google::protobuf::internal::ArenaStringPtr databricksinstance_; + ::google::protobuf::internal::ArenaStringPtr driverpodtemplatename_; + ::google::protobuf::internal::ArenaStringPtr executorpodtemplatename_; ::google::protobuf::Struct* databricksconf_; int applicationtype_; + union DriverPodValueUnion { + DriverPodValueUnion() {} + ::flyteidl::core::K8sPod* driverpod_; + } driverPodValue_; + union ExecutorPodValueUnion { + ExecutorPodValueUnion() {} + ::flyteidl::core::K8sPod* executorpod_; + } executorPodValue_; mutable ::google::protobuf::internal::CachedSize _cached_size_; + ::google::protobuf::uint32 _oneof_case_[2]; + friend struct ::TableStruct_flyteidl_2fplugins_2fspark_2eproto; }; // =================================================================== @@ -894,6 +975,200 @@ inline void SparkJob::set_allocated_databricksinstance(::std::string* databricks // @@protoc_insertion_point(field_set_allocated:flyteidl.plugins.SparkJob.databricksInstance) } +// .flyteidl.core.K8sPod driverPod = 10; +inline bool SparkJob::has_driverpod() const { + return driverPodValue_case() == kDriverPod; +} +inline void SparkJob::set_has_driverpod() { + _oneof_case_[0] = kDriverPod; +} +inline ::flyteidl::core::K8sPod* SparkJob::release_driverpod() { + // @@protoc_insertion_point(field_release:flyteidl.plugins.SparkJob.driverPod) + if (has_driverpod()) { + clear_has_driverPodValue(); + ::flyteidl::core::K8sPod* temp = driverPodValue_.driverpod_; + driverPodValue_.driverpod_ = nullptr; + return temp; + } else { + return nullptr; + } +} +inline const ::flyteidl::core::K8sPod& SparkJob::driverpod() const { + // @@protoc_insertion_point(field_get:flyteidl.plugins.SparkJob.driverPod) + return has_driverpod() + ? *driverPodValue_.driverpod_ + : *reinterpret_cast< ::flyteidl::core::K8sPod*>(&::flyteidl::core::_K8sPod_default_instance_); +} +inline ::flyteidl::core::K8sPod* SparkJob::mutable_driverpod() { + if (!has_driverpod()) { + clear_driverPodValue(); + set_has_driverpod(); + driverPodValue_.driverpod_ = CreateMaybeMessage< ::flyteidl::core::K8sPod >( + GetArenaNoVirtual()); + } + // @@protoc_insertion_point(field_mutable:flyteidl.plugins.SparkJob.driverPod) + return driverPodValue_.driverpod_; +} + +// string driverPodTemplateName = 11; +inline void SparkJob::clear_driverpodtemplatename() { + driverpodtemplatename_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); +} +inline const ::std::string& SparkJob::driverpodtemplatename() const { + // @@protoc_insertion_point(field_get:flyteidl.plugins.SparkJob.driverPodTemplateName) + return driverpodtemplatename_.GetNoArena(); +} +inline void SparkJob::set_driverpodtemplatename(const ::std::string& value) { + + driverpodtemplatename_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), value); + // @@protoc_insertion_point(field_set:flyteidl.plugins.SparkJob.driverPodTemplateName) +} +#if LANG_CXX11 +inline void SparkJob::set_driverpodtemplatename(::std::string&& value) { + + driverpodtemplatename_.SetNoArena( + &::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); + // @@protoc_insertion_point(field_set_rvalue:flyteidl.plugins.SparkJob.driverPodTemplateName) +} +#endif +inline void SparkJob::set_driverpodtemplatename(const char* value) { + GOOGLE_DCHECK(value != nullptr); + + driverpodtemplatename_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); + // @@protoc_insertion_point(field_set_char:flyteidl.plugins.SparkJob.driverPodTemplateName) +} +inline void SparkJob::set_driverpodtemplatename(const char* value, size_t size) { + + driverpodtemplatename_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), + ::std::string(reinterpret_cast(value), size)); + // @@protoc_insertion_point(field_set_pointer:flyteidl.plugins.SparkJob.driverPodTemplateName) +} +inline ::std::string* SparkJob::mutable_driverpodtemplatename() { + + // @@protoc_insertion_point(field_mutable:flyteidl.plugins.SparkJob.driverPodTemplateName) + return driverpodtemplatename_.MutableNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); +} +inline ::std::string* SparkJob::release_driverpodtemplatename() { + // @@protoc_insertion_point(field_release:flyteidl.plugins.SparkJob.driverPodTemplateName) + + return driverpodtemplatename_.ReleaseNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); +} +inline void SparkJob::set_allocated_driverpodtemplatename(::std::string* driverpodtemplatename) { + if (driverpodtemplatename != nullptr) { + + } else { + + } + driverpodtemplatename_.SetAllocatedNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), driverpodtemplatename); + // @@protoc_insertion_point(field_set_allocated:flyteidl.plugins.SparkJob.driverPodTemplateName) +} + +// .flyteidl.core.K8sPod executorPod = 12; +inline bool SparkJob::has_executorpod() const { + return executorPodValue_case() == kExecutorPod; +} +inline void SparkJob::set_has_executorpod() { + _oneof_case_[1] = kExecutorPod; +} +inline ::flyteidl::core::K8sPod* SparkJob::release_executorpod() { + // @@protoc_insertion_point(field_release:flyteidl.plugins.SparkJob.executorPod) + if (has_executorpod()) { + clear_has_executorPodValue(); + ::flyteidl::core::K8sPod* temp = executorPodValue_.executorpod_; + executorPodValue_.executorpod_ = nullptr; + return temp; + } else { + return nullptr; + } +} +inline const ::flyteidl::core::K8sPod& SparkJob::executorpod() const { + // @@protoc_insertion_point(field_get:flyteidl.plugins.SparkJob.executorPod) + return has_executorpod() + ? *executorPodValue_.executorpod_ + : *reinterpret_cast< ::flyteidl::core::K8sPod*>(&::flyteidl::core::_K8sPod_default_instance_); +} +inline ::flyteidl::core::K8sPod* SparkJob::mutable_executorpod() { + if (!has_executorpod()) { + clear_executorPodValue(); + set_has_executorpod(); + executorPodValue_.executorpod_ = CreateMaybeMessage< ::flyteidl::core::K8sPod >( + GetArenaNoVirtual()); + } + // @@protoc_insertion_point(field_mutable:flyteidl.plugins.SparkJob.executorPod) + return executorPodValue_.executorpod_; +} + +// string executorPodTemplateName = 13; +inline void SparkJob::clear_executorpodtemplatename() { + executorpodtemplatename_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); +} +inline const ::std::string& SparkJob::executorpodtemplatename() const { + // @@protoc_insertion_point(field_get:flyteidl.plugins.SparkJob.executorPodTemplateName) + return executorpodtemplatename_.GetNoArena(); +} +inline void SparkJob::set_executorpodtemplatename(const ::std::string& value) { + + executorpodtemplatename_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), value); + // @@protoc_insertion_point(field_set:flyteidl.plugins.SparkJob.executorPodTemplateName) +} +#if LANG_CXX11 +inline void SparkJob::set_executorpodtemplatename(::std::string&& value) { + + executorpodtemplatename_.SetNoArena( + &::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); + // @@protoc_insertion_point(field_set_rvalue:flyteidl.plugins.SparkJob.executorPodTemplateName) +} +#endif +inline void SparkJob::set_executorpodtemplatename(const char* value) { + GOOGLE_DCHECK(value != nullptr); + + executorpodtemplatename_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); + // @@protoc_insertion_point(field_set_char:flyteidl.plugins.SparkJob.executorPodTemplateName) +} +inline void SparkJob::set_executorpodtemplatename(const char* value, size_t size) { + + executorpodtemplatename_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), + ::std::string(reinterpret_cast(value), size)); + // @@protoc_insertion_point(field_set_pointer:flyteidl.plugins.SparkJob.executorPodTemplateName) +} +inline ::std::string* SparkJob::mutable_executorpodtemplatename() { + + // @@protoc_insertion_point(field_mutable:flyteidl.plugins.SparkJob.executorPodTemplateName) + return executorpodtemplatename_.MutableNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); +} +inline ::std::string* SparkJob::release_executorpodtemplatename() { + // @@protoc_insertion_point(field_release:flyteidl.plugins.SparkJob.executorPodTemplateName) + + return executorpodtemplatename_.ReleaseNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); +} +inline void SparkJob::set_allocated_executorpodtemplatename(::std::string* executorpodtemplatename) { + if (executorpodtemplatename != nullptr) { + + } else { + + } + executorpodtemplatename_.SetAllocatedNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), executorpodtemplatename); + // @@protoc_insertion_point(field_set_allocated:flyteidl.plugins.SparkJob.executorPodTemplateName) +} + +inline bool SparkJob::has_driverPodValue() const { + return driverPodValue_case() != DRIVERPODVALUE_NOT_SET; +} +inline void SparkJob::clear_has_driverPodValue() { + _oneof_case_[0] = DRIVERPODVALUE_NOT_SET; +} +inline bool SparkJob::has_executorPodValue() const { + return executorPodValue_case() != EXECUTORPODVALUE_NOT_SET; +} +inline void SparkJob::clear_has_executorPodValue() { + _oneof_case_[1] = EXECUTORPODVALUE_NOT_SET; +} +inline SparkJob::DriverPodValueCase SparkJob::driverPodValue_case() const { + return SparkJob::DriverPodValueCase(_oneof_case_[0]); +} +inline SparkJob::ExecutorPodValueCase SparkJob::executorPodValue_case() const { + return SparkJob::ExecutorPodValueCase(_oneof_case_[1]); +} #ifdef __GNUC__ #pragma GCC diagnostic pop #endif // __GNUC__ diff --git a/flyteidl/gen/pb-go/flyteidl/plugins/spark.pb.go b/flyteidl/gen/pb-go/flyteidl/plugins/spark.pb.go index 1825fa266a..f755336b82 100644 --- a/flyteidl/gen/pb-go/flyteidl/plugins/spark.pb.go +++ b/flyteidl/gen/pb-go/flyteidl/plugins/spark.pb.go @@ -5,6 +5,7 @@ package plugins import ( fmt "fmt" + core "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/core" proto "github.com/golang/protobuf/proto" _struct "github.com/golang/protobuf/ptypes/struct" math "math" @@ -99,10 +100,32 @@ type SparkJob struct { DatabricksToken string `protobuf:"bytes,8,opt,name=databricksToken,proto3" json:"databricksToken,omitempty"` // Domain name of your deployment. Use the form .cloud.databricks.com. // This instance name can be set in either flytepropeller or flytekit. - DatabricksInstance string `protobuf:"bytes,9,opt,name=databricksInstance,proto3" json:"databricksInstance,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` + DatabricksInstance string `protobuf:"bytes,9,opt,name=databricksInstance,proto3" json:"databricksInstance,omitempty"` + // The pod spec and metadata to be used as the base configuration when creating the driver Pod for this task. + // +optional + // + // Types that are valid to be assigned to DriverPodValue: + // *SparkJob_DriverPod + DriverPodValue isSparkJob_DriverPodValue `protobuf_oneof:"driverPodValue"` + // Reference to an existing PodTemplate k8s resource to be used as the base configuration when creating the + // driver Pod for this task. If this value is set, the specified PodTemplate will be used instead of, but applied + // identically as, the default PodTemplate configured in FlytePropeller. + // +optional + DriverPodTemplateName string `protobuf:"bytes,11,opt,name=driverPodTemplateName,proto3" json:"driverPodTemplateName,omitempty"` + // The pod spec and metadata to be used as the base configuration when creating the executor Pods for this task. + // +optional + // + // Types that are valid to be assigned to ExecutorPodValue: + // *SparkJob_ExecutorPod + ExecutorPodValue isSparkJob_ExecutorPodValue `protobuf_oneof:"executorPodValue"` + // Reference to an existing PodTemplate k8s resource to be used as the base configuration when creating the + // executor Pods for this task. If this value is set, the specified PodTemplate will be used instead of, but applied + // identically as, the default PodTemplate configured in FlytePropeller. + // +optional + ExecutorPodTemplateName string `protobuf:"bytes,13,opt,name=executorPodTemplateName,proto3" json:"executorPodTemplateName,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } func (m *SparkJob) Reset() { *m = SparkJob{} } @@ -193,6 +216,76 @@ func (m *SparkJob) GetDatabricksInstance() string { return "" } +type isSparkJob_DriverPodValue interface { + isSparkJob_DriverPodValue() +} + +type SparkJob_DriverPod struct { + DriverPod *core.K8SPod `protobuf:"bytes,10,opt,name=driverPod,proto3,oneof"` +} + +func (*SparkJob_DriverPod) isSparkJob_DriverPodValue() {} + +func (m *SparkJob) GetDriverPodValue() isSparkJob_DriverPodValue { + if m != nil { + return m.DriverPodValue + } + return nil +} + +func (m *SparkJob) GetDriverPod() *core.K8SPod { + if x, ok := m.GetDriverPodValue().(*SparkJob_DriverPod); ok { + return x.DriverPod + } + return nil +} + +func (m *SparkJob) GetDriverPodTemplateName() string { + if m != nil { + return m.DriverPodTemplateName + } + return "" +} + +type isSparkJob_ExecutorPodValue interface { + isSparkJob_ExecutorPodValue() +} + +type SparkJob_ExecutorPod struct { + ExecutorPod *core.K8SPod `protobuf:"bytes,12,opt,name=executorPod,proto3,oneof"` +} + +func (*SparkJob_ExecutorPod) isSparkJob_ExecutorPodValue() {} + +func (m *SparkJob) GetExecutorPodValue() isSparkJob_ExecutorPodValue { + if m != nil { + return m.ExecutorPodValue + } + return nil +} + +func (m *SparkJob) GetExecutorPod() *core.K8SPod { + if x, ok := m.GetExecutorPodValue().(*SparkJob_ExecutorPod); ok { + return x.ExecutorPod + } + return nil +} + +func (m *SparkJob) GetExecutorPodTemplateName() string { + if m != nil { + return m.ExecutorPodTemplateName + } + return "" +} + +// XXX_OneofWrappers is for the internal use of the proto package. +func (*SparkJob) XXX_OneofWrappers() []interface{} { + return []interface{}{ + (*SparkJob_DriverPod)(nil), + (*SparkJob_ExecutorPod)(nil), + } +} + func init() { proto.RegisterEnum("flyteidl.plugins.SparkApplication_Type", SparkApplication_Type_name, SparkApplication_Type_value) proto.RegisterType((*SparkApplication)(nil), "flyteidl.plugins.SparkApplication") @@ -204,33 +297,40 @@ func init() { func init() { proto.RegisterFile("flyteidl/plugins/spark.proto", fileDescriptor_ca8a069b9820144a) } var fileDescriptor_ca8a069b9820144a = []byte{ - // 443 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x53, 0xdf, 0x8b, 0xd3, 0x40, - 0x10, 0x36, 0xfd, 0x65, 0x33, 0x27, 0x6d, 0x18, 0x05, 0x43, 0xe9, 0x43, 0xe9, 0x8b, 0x51, 0x70, - 0x23, 0xf5, 0x45, 0xc4, 0xe3, 0xc8, 0x15, 0xf5, 0x2c, 0xa2, 0x67, 0x5a, 0x04, 0x7d, 0xdb, 0xa4, - 0xdb, 0x34, 0x74, 0x6f, 0x77, 0x49, 0x36, 0x62, 0xfe, 0x79, 0x91, 0x6c, 0xec, 0xe5, 0x2e, 0x9c, - 0x82, 0x6f, 0xb3, 0xdf, 0x7c, 0xf3, 0xcd, 0xf0, 0x7d, 0x2c, 0x4c, 0x77, 0xbc, 0xd4, 0x2c, 0xdd, - 0x72, 0x5f, 0xf1, 0x22, 0x49, 0x45, 0xee, 0xe7, 0x8a, 0x66, 0x07, 0xa2, 0x32, 0xa9, 0x25, 0x3a, - 0xc7, 0x2e, 0xf9, 0xd3, 0x9d, 0x4c, 0x13, 0x29, 0x13, 0xce, 0x7c, 0xd3, 0x8f, 0x8a, 0x9d, 0x9f, - 0xeb, 0xac, 0x88, 0x75, 0xcd, 0x9f, 0x9f, 0x83, 0xb3, 0xae, 0xc6, 0x03, 0xa5, 0x78, 0x1a, 0x53, - 0x9d, 0x4a, 0x31, 0x27, 0xd0, 0xdb, 0x94, 0x8a, 0x21, 0xc0, 0xe0, 0xf2, 0xdb, 0xe6, 0xe2, 0xf3, - 0x27, 0xe7, 0x1e, 0x0e, 0xa1, 0xb7, 0x0a, 0xbe, 0x06, 0x8e, 0x85, 0x36, 0xf4, 0xd7, 0xcb, 0xe0, - 0x63, 0xe0, 0x74, 0xb0, 0x0f, 0x56, 0xe8, 0x74, 0xe7, 0xbf, 0x7a, 0x30, 0x34, 0x22, 0x2b, 0x19, - 0xe1, 0x17, 0x18, 0xd3, 0x46, 0xab, 0xd2, 0x71, 0xad, 0x99, 0xe5, 0x8d, 0x16, 0x4f, 0x48, 0xfb, - 0x34, 0xd2, 0xde, 0x4c, 0x2a, 0x7a, 0xd8, 0x9e, 0xc7, 0x17, 0xf0, 0xf0, 0x8a, 0xa6, 0xe2, 0x06, - 0xf1, 0x5d, 0xca, 0x99, 0xdb, 0x99, 0x59, 0x9e, 0x1d, 0xde, 0xd5, 0xc2, 0x29, 0xd8, 0x15, 0xbc, - 0xe4, 0x34, 0xcf, 0xdd, 0xae, 0xe1, 0x35, 0x00, 0xbe, 0x07, 0xdb, 0x58, 0xb6, 0x94, 0x62, 0xe7, - 0xf6, 0x66, 0x5d, 0xef, 0x64, 0xf1, 0xf4, 0x2f, 0xc7, 0xad, 0x64, 0x54, 0x17, 0x15, 0xf7, 0xad, - 0xd0, 0x59, 0x19, 0x36, 0xb3, 0xb8, 0x02, 0xd8, 0xd3, 0xad, 0x94, 0xca, 0x28, 0xf5, 0x8d, 0xd2, - 0xb3, 0x7f, 0x28, 0x5d, 0x5c, 0x93, 0x6b, 0xa9, 0x1b, 0xd3, 0x38, 0x87, 0x07, 0xec, 0x27, 0x8b, - 0x0b, 0x2d, 0xb3, 0x4b, 0xaa, 0xf7, 0xee, 0xc0, 0x5c, 0x7d, 0x0b, 0xc3, 0x33, 0x18, 0x6d, 0xa9, - 0xa6, 0x51, 0x96, 0xc6, 0x87, 0xdc, 0xec, 0xbc, 0x3f, 0xb3, 0xbc, 0x93, 0xc5, 0x63, 0x52, 0x67, - 0x4c, 0x8e, 0x19, 0x93, 0xb5, 0xc9, 0x38, 0x6c, 0xd1, 0xd1, 0x83, 0x71, 0x83, 0x6c, 0xe4, 0x81, - 0x09, 0x77, 0x68, 0xf6, 0xb4, 0x61, 0x24, 0x80, 0x0d, 0xf4, 0x41, 0xe4, 0x9a, 0x8a, 0x98, 0xb9, - 0xb6, 0x21, 0xdf, 0xd1, 0x99, 0xbc, 0x81, 0xd1, 0x6d, 0x9f, 0xd0, 0x81, 0xee, 0x81, 0x95, 0x26, - 0x7c, 0x3b, 0xac, 0x4a, 0x7c, 0x04, 0xfd, 0x1f, 0x94, 0x17, 0xc7, 0xe4, 0xea, 0xc7, 0xeb, 0xce, - 0x2b, 0x6b, 0x72, 0x0a, 0xe3, 0x96, 0x37, 0xff, 0x33, 0x7e, 0x7e, 0xf6, 0xfd, 0x34, 0x49, 0xf5, - 0xbe, 0x88, 0x48, 0x2c, 0xaf, 0x7c, 0xe3, 0xbf, 0xcc, 0x92, 0xba, 0xf0, 0xaf, 0xbf, 0x4b, 0xc2, - 0x84, 0xaf, 0xa2, 0xe7, 0x89, 0xf4, 0xdb, 0x3f, 0x28, 0x1a, 0x18, 0xe3, 0x5e, 0xfe, 0x0e, 0x00, - 0x00, 0xff, 0xff, 0x07, 0x7f, 0xfa, 0xf2, 0x5c, 0x03, 0x00, 0x00, + // 549 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x54, 0x51, 0x6f, 0xd2, 0x50, + 0x14, 0x5e, 0xc7, 0xd8, 0xd6, 0xc3, 0x64, 0xcd, 0xd1, 0x65, 0x95, 0xf0, 0x40, 0x78, 0x11, 0x4d, + 0xbc, 0x35, 0xa8, 0x09, 0x1a, 0x97, 0x05, 0x88, 0x8a, 0x68, 0x26, 0x16, 0xb2, 0x44, 0xdf, 0x6e, + 0xdb, 0x4b, 0x69, 0x28, 0xbd, 0x4d, 0x7b, 0xbb, 0xc8, 0x1f, 0xf0, 0x77, 0x9b, 0xde, 0x4a, 0x0b, + 0x0d, 0x5b, 0xe2, 0xdb, 0x3d, 0xdf, 0xf9, 0xce, 0xf7, 0x9d, 0x9c, 0x73, 0x72, 0xa1, 0x39, 0xf7, + 0xd7, 0x82, 0x79, 0x8e, 0x6f, 0x84, 0x7e, 0xe2, 0x7a, 0x41, 0x6c, 0xc4, 0x21, 0x8d, 0x96, 0x24, + 0x8c, 0xb8, 0xe0, 0xa8, 0x6d, 0xb2, 0xe4, 0x5f, 0xb6, 0xf1, 0x34, 0xe7, 0xdb, 0x3c, 0x62, 0x86, + 0xa0, 0xf1, 0x32, 0xce, 0xc8, 0x8d, 0xa6, 0xcb, 0xb9, 0xeb, 0x33, 0x43, 0x46, 0x56, 0x32, 0x37, + 0x62, 0x11, 0x25, 0xb6, 0xc8, 0xb2, 0xed, 0x01, 0x68, 0xd3, 0x54, 0xb9, 0x1f, 0x86, 0xbe, 0x67, + 0x53, 0xe1, 0xf1, 0xa0, 0x4d, 0xe0, 0x68, 0xb6, 0x0e, 0x19, 0x02, 0x1c, 0x4f, 0x7e, 0xce, 0x46, + 0xdf, 0x6f, 0xb4, 0x03, 0x3c, 0x85, 0xa3, 0x71, 0xff, 0xb6, 0xaf, 0x29, 0xa8, 0x42, 0x75, 0x3a, + 0xec, 0x7f, 0xeb, 0x6b, 0x87, 0x58, 0x05, 0xc5, 0xd4, 0x2a, 0xed, 0x3f, 0x27, 0x70, 0x2a, 0x45, + 0xc6, 0xdc, 0xc2, 0x1f, 0x70, 0x4e, 0x0b, 0xad, 0x54, 0x47, 0x57, 0x5a, 0x4a, 0xa7, 0xde, 0x7d, + 0x46, 0xca, 0x5d, 0x93, 0xb2, 0x33, 0x49, 0xe9, 0x66, 0xb9, 0x1e, 0x5f, 0xc1, 0xe3, 0x15, 0xf5, + 0x82, 0x2d, 0xe2, 0x27, 0xcf, 0x67, 0xfa, 0x61, 0x4b, 0xe9, 0xa8, 0xe6, 0xbe, 0x14, 0x36, 0x41, + 0x4d, 0xe1, 0xa1, 0x4f, 0xe3, 0x58, 0xaf, 0x48, 0x5e, 0x01, 0xe0, 0x67, 0x50, 0xe5, 0x34, 0x87, + 0x3c, 0x98, 0xeb, 0x47, 0xad, 0x4a, 0xa7, 0xd6, 0x7d, 0x7e, 0x4f, 0x73, 0x63, 0x6e, 0x65, 0x8f, + 0x94, 0xfb, 0x31, 0x10, 0xd1, 0xda, 0x2c, 0x6a, 0x71, 0x0c, 0xb0, 0xa0, 0x0e, 0xe7, 0xa1, 0x54, + 0xaa, 0x4a, 0xa5, 0x17, 0x0f, 0x28, 0x8d, 0x72, 0x72, 0x26, 0xb5, 0x55, 0x8d, 0x6d, 0x38, 0x63, + 0xbf, 0x99, 0x9d, 0x08, 0x1e, 0x4d, 0xa8, 0x58, 0xe8, 0xc7, 0xb2, 0xeb, 0x1d, 0x0c, 0xaf, 0xa1, + 0xee, 0x50, 0x41, 0xad, 0xc8, 0xb3, 0x97, 0xb1, 0xf4, 0x3c, 0x69, 0x29, 0x9d, 0x5a, 0xf7, 0x92, + 0x64, 0x3b, 0x26, 0x9b, 0x1d, 0x93, 0xa9, 0xdc, 0xb1, 0x59, 0xa2, 0x63, 0x07, 0xce, 0x0b, 0x64, + 0xc6, 0x97, 0x2c, 0xd0, 0x4f, 0xa5, 0x4f, 0x19, 0x46, 0x02, 0x58, 0x40, 0x5f, 0x82, 0x58, 0xd0, + 0xc0, 0x66, 0xba, 0x2a, 0xc9, 0x7b, 0x32, 0xf8, 0x16, 0x54, 0x27, 0xf2, 0xee, 0x58, 0x34, 0xe1, + 0x8e, 0x0e, 0xb2, 0xab, 0x8b, 0x62, 0x12, 0xe9, 0x51, 0x92, 0xaf, 0xbd, 0x78, 0xc2, 0x9d, 0xd1, + 0x81, 0x59, 0x30, 0xf1, 0x0d, 0x5c, 0xe4, 0xc1, 0x8c, 0xad, 0x42, 0x9f, 0x0a, 0x76, 0x43, 0x57, + 0x4c, 0xaf, 0x49, 0xa7, 0xfd, 0x49, 0x7c, 0x07, 0xb5, 0x7c, 0x2e, 0xdc, 0xd1, 0xcf, 0x1e, 0xb2, + 0x53, 0xcc, 0x6d, 0x2e, 0xf6, 0xe0, 0x72, 0x2b, 0xdc, 0xb1, 0x7c, 0x24, 0x2d, 0xef, 0x4b, 0x37, + 0x3e, 0x40, 0x7d, 0xf7, 0x12, 0x50, 0x83, 0xca, 0x92, 0xad, 0xe5, 0x79, 0xab, 0x66, 0xfa, 0xc4, + 0x27, 0x50, 0xbd, 0xa3, 0x7e, 0xb2, 0xb9, 0xcd, 0x2c, 0x78, 0x7f, 0xd8, 0x53, 0x1a, 0x57, 0x70, + 0x5e, 0xda, 0xfe, 0xff, 0x94, 0x0f, 0x34, 0xa8, 0xe7, 0xa3, 0xb8, 0x4d, 0xd1, 0x01, 0x82, 0xb6, + 0xd5, 0x69, 0x86, 0x5d, 0xff, 0xba, 0x72, 0x3d, 0xb1, 0x48, 0x2c, 0x62, 0xf3, 0x95, 0x21, 0xc7, + 0xc1, 0x23, 0x37, 0x7b, 0x18, 0xf9, 0x0f, 0xe1, 0xb2, 0xc0, 0x08, 0xad, 0x97, 0x2e, 0x37, 0xca, + 0x9f, 0x8c, 0x75, 0x2c, 0x0f, 0xe8, 0xf5, 0xdf, 0x00, 0x00, 0x00, 0xff, 0xff, 0xe3, 0xaa, 0xe1, + 0x4a, 0x7f, 0x04, 0x00, 0x00, } diff --git a/flyteidl/gen/pb-go/flyteidl/plugins/spark.pb.validate.go b/flyteidl/gen/pb-go/flyteidl/plugins/spark.pb.validate.go index 0577090e28..1e7d7c5f71 100644 --- a/flyteidl/gen/pb-go/flyteidl/plugins/spark.pb.validate.go +++ b/flyteidl/gen/pb-go/flyteidl/plugins/spark.pb.validate.go @@ -134,6 +134,42 @@ func (m *SparkJob) Validate() error { // no validation rules for DatabricksInstance + // no validation rules for DriverPodTemplateName + + // no validation rules for ExecutorPodTemplateName + + switch m.DriverPodValue.(type) { + + case *SparkJob_DriverPod: + + if v, ok := interface{}(m.GetDriverPod()).(interface{ Validate() error }); ok { + if err := v.Validate(); err != nil { + return SparkJobValidationError{ + field: "DriverPod", + reason: "embedded message failed validation", + cause: err, + } + } + } + + } + + switch m.ExecutorPodValue.(type) { + + case *SparkJob_ExecutorPod: + + if v, ok := interface{}(m.GetExecutorPod()).(interface{ Validate() error }); ok { + if err := v.Validate(); err != nil { + return SparkJobValidationError{ + field: "ExecutorPod", + reason: "embedded message failed validation", + cause: err, + } + } + } + + } + return nil } diff --git a/flyteidl/gen/pb-java/flyteidl/plugins/Spark.java b/flyteidl/gen/pb-java/flyteidl/plugins/Spark.java index bd236e10eb..e1afaeb7d9 100644 --- a/flyteidl/gen/pb-java/flyteidl/plugins/Spark.java +++ b/flyteidl/gen/pb-java/flyteidl/plugins/Spark.java @@ -727,6 +727,84 @@ java.lang.String getHadoopConfOrThrow( */ com.google.protobuf.ByteString getDatabricksInstanceBytes(); + + /** + * .flyteidl.core.K8sPod driverPod = 10; + */ + boolean hasDriverPod(); + /** + * .flyteidl.core.K8sPod driverPod = 10; + */ + flyteidl.core.Tasks.K8sPod getDriverPod(); + /** + * .flyteidl.core.K8sPod driverPod = 10; + */ + flyteidl.core.Tasks.K8sPodOrBuilder getDriverPodOrBuilder(); + + /** + *
+     * Reference to an existing PodTemplate k8s resource to be used as the base configuration when creating the 
+     * driver Pod for this task. If this value is set, the specified PodTemplate will be used instead of, but applied
+     * identically as, the default PodTemplate configured in FlytePropeller.
+     * +optional
+     * 
+ * + * string driverPodTemplateName = 11; + */ + java.lang.String getDriverPodTemplateName(); + /** + *
+     * Reference to an existing PodTemplate k8s resource to be used as the base configuration when creating the 
+     * driver Pod for this task. If this value is set, the specified PodTemplate will be used instead of, but applied
+     * identically as, the default PodTemplate configured in FlytePropeller.
+     * +optional
+     * 
+ * + * string driverPodTemplateName = 11; + */ + com.google.protobuf.ByteString + getDriverPodTemplateNameBytes(); + + /** + * .flyteidl.core.K8sPod executorPod = 12; + */ + boolean hasExecutorPod(); + /** + * .flyteidl.core.K8sPod executorPod = 12; + */ + flyteidl.core.Tasks.K8sPod getExecutorPod(); + /** + * .flyteidl.core.K8sPod executorPod = 12; + */ + flyteidl.core.Tasks.K8sPodOrBuilder getExecutorPodOrBuilder(); + + /** + *
+     * Reference to an existing PodTemplate k8s resource to be used as the base configuration when creating the 
+     * executor Pods for this task. If this value is set, the specified PodTemplate will be used instead of, but applied
+     * identically as, the default PodTemplate configured in FlytePropeller.
+     * +optional
+     * 
+ * + * string executorPodTemplateName = 13; + */ + java.lang.String getExecutorPodTemplateName(); + /** + *
+     * Reference to an existing PodTemplate k8s resource to be used as the base configuration when creating the 
+     * executor Pods for this task. If this value is set, the specified PodTemplate will be used instead of, but applied
+     * identically as, the default PodTemplate configured in FlytePropeller.
+     * +optional
+     * 
+ * + * string executorPodTemplateName = 13; + */ + com.google.protobuf.ByteString + getExecutorPodTemplateNameBytes(); + + public flyteidl.plugins.Spark.SparkJob.DriverPodValueCase getDriverPodValueCase(); + + public flyteidl.plugins.Spark.SparkJob.ExecutorPodValueCase getExecutorPodValueCase(); } /** *
@@ -751,6 +829,8 @@ private SparkJob() {
       executorPath_ = "";
       databricksToken_ = "";
       databricksInstance_ = "";
+      driverPodTemplateName_ = "";
+      executorPodTemplateName_ = "";
     }
 
     @java.lang.Override
@@ -852,6 +932,46 @@ private SparkJob(
               databricksInstance_ = s;
               break;
             }
+            case 82: {
+              flyteidl.core.Tasks.K8sPod.Builder subBuilder = null;
+              if (driverPodValueCase_ == 10) {
+                subBuilder = ((flyteidl.core.Tasks.K8sPod) driverPodValue_).toBuilder();
+              }
+              driverPodValue_ =
+                  input.readMessage(flyteidl.core.Tasks.K8sPod.parser(), extensionRegistry);
+              if (subBuilder != null) {
+                subBuilder.mergeFrom((flyteidl.core.Tasks.K8sPod) driverPodValue_);
+                driverPodValue_ = subBuilder.buildPartial();
+              }
+              driverPodValueCase_ = 10;
+              break;
+            }
+            case 90: {
+              java.lang.String s = input.readStringRequireUtf8();
+
+              driverPodTemplateName_ = s;
+              break;
+            }
+            case 98: {
+              flyteidl.core.Tasks.K8sPod.Builder subBuilder = null;
+              if (executorPodValueCase_ == 12) {
+                subBuilder = ((flyteidl.core.Tasks.K8sPod) executorPodValue_).toBuilder();
+              }
+              executorPodValue_ =
+                  input.readMessage(flyteidl.core.Tasks.K8sPod.parser(), extensionRegistry);
+              if (subBuilder != null) {
+                subBuilder.mergeFrom((flyteidl.core.Tasks.K8sPod) executorPodValue_);
+                executorPodValue_ = subBuilder.buildPartial();
+              }
+              executorPodValueCase_ = 12;
+              break;
+            }
+            case 106: {
+              java.lang.String s = input.readStringRequireUtf8();
+
+              executorPodTemplateName_ = s;
+              break;
+            }
             default: {
               if (!parseUnknownField(
                   input, unknownFields, extensionRegistry, tag)) {
@@ -899,6 +1019,78 @@ protected com.google.protobuf.MapField internalGetMapField(
     }
 
     private int bitField0_;
+    private int driverPodValueCase_ = 0;
+    private java.lang.Object driverPodValue_;
+    public enum DriverPodValueCase
+        implements com.google.protobuf.Internal.EnumLite {
+      DRIVERPOD(10),
+      DRIVERPODVALUE_NOT_SET(0);
+      private final int value;
+      private DriverPodValueCase(int value) {
+        this.value = value;
+      }
+      /**
+       * @deprecated Use {@link #forNumber(int)} instead.
+       */
+      @java.lang.Deprecated
+      public static DriverPodValueCase valueOf(int value) {
+        return forNumber(value);
+      }
+
+      public static DriverPodValueCase forNumber(int value) {
+        switch (value) {
+          case 10: return DRIVERPOD;
+          case 0: return DRIVERPODVALUE_NOT_SET;
+          default: return null;
+        }
+      }
+      public int getNumber() {
+        return this.value;
+      }
+    };
+
+    public DriverPodValueCase
+    getDriverPodValueCase() {
+      return DriverPodValueCase.forNumber(
+          driverPodValueCase_);
+    }
+
+    private int executorPodValueCase_ = 0;
+    private java.lang.Object executorPodValue_;
+    public enum ExecutorPodValueCase
+        implements com.google.protobuf.Internal.EnumLite {
+      EXECUTORPOD(12),
+      EXECUTORPODVALUE_NOT_SET(0);
+      private final int value;
+      private ExecutorPodValueCase(int value) {
+        this.value = value;
+      }
+      /**
+       * @deprecated Use {@link #forNumber(int)} instead.
+       */
+      @java.lang.Deprecated
+      public static ExecutorPodValueCase valueOf(int value) {
+        return forNumber(value);
+      }
+
+      public static ExecutorPodValueCase forNumber(int value) {
+        switch (value) {
+          case 12: return EXECUTORPOD;
+          case 0: return EXECUTORPODVALUE_NOT_SET;
+          default: return null;
+        }
+      }
+      public int getNumber() {
+        return this.value;
+      }
+    };
+
+    public ExecutorPodValueCase
+    getExecutorPodValueCase() {
+      return ExecutorPodValueCase.forNumber(
+          executorPodValueCase_);
+    }
+
     public static final int APPLICATIONTYPE_FIELD_NUMBER = 1;
     private int applicationType_;
     /**
@@ -1302,6 +1494,154 @@ public java.lang.String getDatabricksInstance() {
       }
     }
 
+    public static final int DRIVERPOD_FIELD_NUMBER = 10;
+    /**
+     * .flyteidl.core.K8sPod driverPod = 10;
+     */
+    public boolean hasDriverPod() {
+      return driverPodValueCase_ == 10;
+    }
+    /**
+     * .flyteidl.core.K8sPod driverPod = 10;
+     */
+    public flyteidl.core.Tasks.K8sPod getDriverPod() {
+      if (driverPodValueCase_ == 10) {
+         return (flyteidl.core.Tasks.K8sPod) driverPodValue_;
+      }
+      return flyteidl.core.Tasks.K8sPod.getDefaultInstance();
+    }
+    /**
+     * .flyteidl.core.K8sPod driverPod = 10;
+     */
+    public flyteidl.core.Tasks.K8sPodOrBuilder getDriverPodOrBuilder() {
+      if (driverPodValueCase_ == 10) {
+         return (flyteidl.core.Tasks.K8sPod) driverPodValue_;
+      }
+      return flyteidl.core.Tasks.K8sPod.getDefaultInstance();
+    }
+
+    public static final int DRIVERPODTEMPLATENAME_FIELD_NUMBER = 11;
+    private volatile java.lang.Object driverPodTemplateName_;
+    /**
+     * 
+     * Reference to an existing PodTemplate k8s resource to be used as the base configuration when creating the 
+     * driver Pod for this task. If this value is set, the specified PodTemplate will be used instead of, but applied
+     * identically as, the default PodTemplate configured in FlytePropeller.
+     * +optional
+     * 
+ * + * string driverPodTemplateName = 11; + */ + public java.lang.String getDriverPodTemplateName() { + java.lang.Object ref = driverPodTemplateName_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + driverPodTemplateName_ = s; + return s; + } + } + /** + *
+     * Reference to an existing PodTemplate k8s resource to be used as the base configuration when creating the 
+     * driver Pod for this task. If this value is set, the specified PodTemplate will be used instead of, but applied
+     * identically as, the default PodTemplate configured in FlytePropeller.
+     * +optional
+     * 
+ * + * string driverPodTemplateName = 11; + */ + public com.google.protobuf.ByteString + getDriverPodTemplateNameBytes() { + java.lang.Object ref = driverPodTemplateName_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + driverPodTemplateName_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int EXECUTORPOD_FIELD_NUMBER = 12; + /** + * .flyteidl.core.K8sPod executorPod = 12; + */ + public boolean hasExecutorPod() { + return executorPodValueCase_ == 12; + } + /** + * .flyteidl.core.K8sPod executorPod = 12; + */ + public flyteidl.core.Tasks.K8sPod getExecutorPod() { + if (executorPodValueCase_ == 12) { + return (flyteidl.core.Tasks.K8sPod) executorPodValue_; + } + return flyteidl.core.Tasks.K8sPod.getDefaultInstance(); + } + /** + * .flyteidl.core.K8sPod executorPod = 12; + */ + public flyteidl.core.Tasks.K8sPodOrBuilder getExecutorPodOrBuilder() { + if (executorPodValueCase_ == 12) { + return (flyteidl.core.Tasks.K8sPod) executorPodValue_; + } + return flyteidl.core.Tasks.K8sPod.getDefaultInstance(); + } + + public static final int EXECUTORPODTEMPLATENAME_FIELD_NUMBER = 13; + private volatile java.lang.Object executorPodTemplateName_; + /** + *
+     * Reference to an existing PodTemplate k8s resource to be used as the base configuration when creating the 
+     * executor Pods for this task. If this value is set, the specified PodTemplate will be used instead of, but applied
+     * identically as, the default PodTemplate configured in FlytePropeller.
+     * +optional
+     * 
+ * + * string executorPodTemplateName = 13; + */ + public java.lang.String getExecutorPodTemplateName() { + java.lang.Object ref = executorPodTemplateName_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + executorPodTemplateName_ = s; + return s; + } + } + /** + *
+     * Reference to an existing PodTemplate k8s resource to be used as the base configuration when creating the 
+     * executor Pods for this task. If this value is set, the specified PodTemplate will be used instead of, but applied
+     * identically as, the default PodTemplate configured in FlytePropeller.
+     * +optional
+     * 
+ * + * string executorPodTemplateName = 13; + */ + public com.google.protobuf.ByteString + getExecutorPodTemplateNameBytes() { + java.lang.Object ref = executorPodTemplateName_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + executorPodTemplateName_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { @@ -1349,6 +1689,18 @@ public void writeTo(com.google.protobuf.CodedOutputStream output) if (!getDatabricksInstanceBytes().isEmpty()) { com.google.protobuf.GeneratedMessageV3.writeString(output, 9, databricksInstance_); } + if (driverPodValueCase_ == 10) { + output.writeMessage(10, (flyteidl.core.Tasks.K8sPod) driverPodValue_); + } + if (!getDriverPodTemplateNameBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 11, driverPodTemplateName_); + } + if (executorPodValueCase_ == 12) { + output.writeMessage(12, (flyteidl.core.Tasks.K8sPod) executorPodValue_); + } + if (!getExecutorPodTemplateNameBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 13, executorPodTemplateName_); + } unknownFields.writeTo(output); } @@ -1401,6 +1753,20 @@ public int getSerializedSize() { if (!getDatabricksInstanceBytes().isEmpty()) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(9, databricksInstance_); } + if (driverPodValueCase_ == 10) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(10, (flyteidl.core.Tasks.K8sPod) driverPodValue_); + } + if (!getDriverPodTemplateNameBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(11, driverPodTemplateName_); + } + if (executorPodValueCase_ == 12) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(12, (flyteidl.core.Tasks.K8sPod) executorPodValue_); + } + if (!getExecutorPodTemplateNameBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(13, executorPodTemplateName_); + } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; @@ -1436,6 +1802,28 @@ public boolean equals(final java.lang.Object obj) { .equals(other.getDatabricksToken())) return false; if (!getDatabricksInstance() .equals(other.getDatabricksInstance())) return false; + if (!getDriverPodTemplateName() + .equals(other.getDriverPodTemplateName())) return false; + if (!getExecutorPodTemplateName() + .equals(other.getExecutorPodTemplateName())) return false; + if (!getDriverPodValueCase().equals(other.getDriverPodValueCase())) return false; + switch (driverPodValueCase_) { + case 10: + if (!getDriverPod() + .equals(other.getDriverPod())) return false; + break; + case 0: + default: + } + if (!getExecutorPodValueCase().equals(other.getExecutorPodValueCase())) return false; + switch (executorPodValueCase_) { + case 12: + if (!getExecutorPod() + .equals(other.getExecutorPod())) return false; + break; + case 0: + default: + } if (!unknownFields.equals(other.unknownFields)) return false; return true; } @@ -1471,6 +1859,26 @@ public int hashCode() { hash = (53 * hash) + getDatabricksToken().hashCode(); hash = (37 * hash) + DATABRICKSINSTANCE_FIELD_NUMBER; hash = (53 * hash) + getDatabricksInstance().hashCode(); + hash = (37 * hash) + DRIVERPODTEMPLATENAME_FIELD_NUMBER; + hash = (53 * hash) + getDriverPodTemplateName().hashCode(); + hash = (37 * hash) + EXECUTORPODTEMPLATENAME_FIELD_NUMBER; + hash = (53 * hash) + getExecutorPodTemplateName().hashCode(); + switch (driverPodValueCase_) { + case 10: + hash = (37 * hash) + DRIVERPOD_FIELD_NUMBER; + hash = (53 * hash) + getDriverPod().hashCode(); + break; + case 0: + default: + } + switch (executorPodValueCase_) { + case 12: + hash = (37 * hash) + EXECUTORPOD_FIELD_NUMBER; + hash = (53 * hash) + getExecutorPod().hashCode(); + break; + case 0: + default: + } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; @@ -1654,6 +2062,14 @@ public Builder clear() { databricksInstance_ = ""; + driverPodTemplateName_ = ""; + + executorPodTemplateName_ = ""; + + driverPodValueCase_ = 0; + driverPodValue_ = null; + executorPodValueCase_ = 0; + executorPodValue_ = null; return this; } @@ -1697,7 +2113,25 @@ public flyteidl.plugins.Spark.SparkJob buildPartial() { } result.databricksToken_ = databricksToken_; result.databricksInstance_ = databricksInstance_; + if (driverPodValueCase_ == 10) { + if (driverPodBuilder_ == null) { + result.driverPodValue_ = driverPodValue_; + } else { + result.driverPodValue_ = driverPodBuilder_.build(); + } + } + result.driverPodTemplateName_ = driverPodTemplateName_; + if (executorPodValueCase_ == 12) { + if (executorPodBuilder_ == null) { + result.executorPodValue_ = executorPodValue_; + } else { + result.executorPodValue_ = executorPodBuilder_.build(); + } + } + result.executorPodTemplateName_ = executorPodTemplateName_; result.bitField0_ = to_bitField0_; + result.driverPodValueCase_ = driverPodValueCase_; + result.executorPodValueCase_ = executorPodValueCase_; onBuilt(); return result; } @@ -1776,6 +2210,32 @@ public Builder mergeFrom(flyteidl.plugins.Spark.SparkJob other) { databricksInstance_ = other.databricksInstance_; onChanged(); } + if (!other.getDriverPodTemplateName().isEmpty()) { + driverPodTemplateName_ = other.driverPodTemplateName_; + onChanged(); + } + if (!other.getExecutorPodTemplateName().isEmpty()) { + executorPodTemplateName_ = other.executorPodTemplateName_; + onChanged(); + } + switch (other.getDriverPodValueCase()) { + case DRIVERPOD: { + mergeDriverPod(other.getDriverPod()); + break; + } + case DRIVERPODVALUE_NOT_SET: { + break; + } + } + switch (other.getExecutorPodValueCase()) { + case EXECUTORPOD: { + mergeExecutorPod(other.getExecutorPod()); + break; + } + case EXECUTORPODVALUE_NOT_SET: { + break; + } + } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; @@ -1804,6 +2264,36 @@ public Builder mergeFrom( } return this; } + private int driverPodValueCase_ = 0; + private java.lang.Object driverPodValue_; + public DriverPodValueCase + getDriverPodValueCase() { + return DriverPodValueCase.forNumber( + driverPodValueCase_); + } + + public Builder clearDriverPodValue() { + driverPodValueCase_ = 0; + driverPodValue_ = null; + onChanged(); + return this; + } + + private int executorPodValueCase_ = 0; + private java.lang.Object executorPodValue_; + public ExecutorPodValueCase + getExecutorPodValueCase() { + return ExecutorPodValueCase.forNumber( + executorPodValueCase_); + } + + public Builder clearExecutorPodValue() { + executorPodValueCase_ = 0; + executorPodValue_ = null; + onChanged(); + return this; + } + private int bitField0_; private int applicationType_ = 0; @@ -2673,6 +3163,486 @@ public Builder setDatabricksInstanceBytes( onChanged(); return this; } + + private com.google.protobuf.SingleFieldBuilderV3< + flyteidl.core.Tasks.K8sPod, flyteidl.core.Tasks.K8sPod.Builder, flyteidl.core.Tasks.K8sPodOrBuilder> driverPodBuilder_; + /** + * .flyteidl.core.K8sPod driverPod = 10; + */ + public boolean hasDriverPod() { + return driverPodValueCase_ == 10; + } + /** + * .flyteidl.core.K8sPod driverPod = 10; + */ + public flyteidl.core.Tasks.K8sPod getDriverPod() { + if (driverPodBuilder_ == null) { + if (driverPodValueCase_ == 10) { + return (flyteidl.core.Tasks.K8sPod) driverPodValue_; + } + return flyteidl.core.Tasks.K8sPod.getDefaultInstance(); + } else { + if (driverPodValueCase_ == 10) { + return driverPodBuilder_.getMessage(); + } + return flyteidl.core.Tasks.K8sPod.getDefaultInstance(); + } + } + /** + * .flyteidl.core.K8sPod driverPod = 10; + */ + public Builder setDriverPod(flyteidl.core.Tasks.K8sPod value) { + if (driverPodBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + driverPodValue_ = value; + onChanged(); + } else { + driverPodBuilder_.setMessage(value); + } + driverPodValueCase_ = 10; + return this; + } + /** + * .flyteidl.core.K8sPod driverPod = 10; + */ + public Builder setDriverPod( + flyteidl.core.Tasks.K8sPod.Builder builderForValue) { + if (driverPodBuilder_ == null) { + driverPodValue_ = builderForValue.build(); + onChanged(); + } else { + driverPodBuilder_.setMessage(builderForValue.build()); + } + driverPodValueCase_ = 10; + return this; + } + /** + * .flyteidl.core.K8sPod driverPod = 10; + */ + public Builder mergeDriverPod(flyteidl.core.Tasks.K8sPod value) { + if (driverPodBuilder_ == null) { + if (driverPodValueCase_ == 10 && + driverPodValue_ != flyteidl.core.Tasks.K8sPod.getDefaultInstance()) { + driverPodValue_ = flyteidl.core.Tasks.K8sPod.newBuilder((flyteidl.core.Tasks.K8sPod) driverPodValue_) + .mergeFrom(value).buildPartial(); + } else { + driverPodValue_ = value; + } + onChanged(); + } else { + if (driverPodValueCase_ == 10) { + driverPodBuilder_.mergeFrom(value); + } + driverPodBuilder_.setMessage(value); + } + driverPodValueCase_ = 10; + return this; + } + /** + * .flyteidl.core.K8sPod driverPod = 10; + */ + public Builder clearDriverPod() { + if (driverPodBuilder_ == null) { + if (driverPodValueCase_ == 10) { + driverPodValueCase_ = 0; + driverPodValue_ = null; + onChanged(); + } + } else { + if (driverPodValueCase_ == 10) { + driverPodValueCase_ = 0; + driverPodValue_ = null; + } + driverPodBuilder_.clear(); + } + return this; + } + /** + * .flyteidl.core.K8sPod driverPod = 10; + */ + public flyteidl.core.Tasks.K8sPod.Builder getDriverPodBuilder() { + return getDriverPodFieldBuilder().getBuilder(); + } + /** + * .flyteidl.core.K8sPod driverPod = 10; + */ + public flyteidl.core.Tasks.K8sPodOrBuilder getDriverPodOrBuilder() { + if ((driverPodValueCase_ == 10) && (driverPodBuilder_ != null)) { + return driverPodBuilder_.getMessageOrBuilder(); + } else { + if (driverPodValueCase_ == 10) { + return (flyteidl.core.Tasks.K8sPod) driverPodValue_; + } + return flyteidl.core.Tasks.K8sPod.getDefaultInstance(); + } + } + /** + * .flyteidl.core.K8sPod driverPod = 10; + */ + private com.google.protobuf.SingleFieldBuilderV3< + flyteidl.core.Tasks.K8sPod, flyteidl.core.Tasks.K8sPod.Builder, flyteidl.core.Tasks.K8sPodOrBuilder> + getDriverPodFieldBuilder() { + if (driverPodBuilder_ == null) { + if (!(driverPodValueCase_ == 10)) { + driverPodValue_ = flyteidl.core.Tasks.K8sPod.getDefaultInstance(); + } + driverPodBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + flyteidl.core.Tasks.K8sPod, flyteidl.core.Tasks.K8sPod.Builder, flyteidl.core.Tasks.K8sPodOrBuilder>( + (flyteidl.core.Tasks.K8sPod) driverPodValue_, + getParentForChildren(), + isClean()); + driverPodValue_ = null; + } + driverPodValueCase_ = 10; + onChanged();; + return driverPodBuilder_; + } + + private java.lang.Object driverPodTemplateName_ = ""; + /** + *
+       * Reference to an existing PodTemplate k8s resource to be used as the base configuration when creating the 
+       * driver Pod for this task. If this value is set, the specified PodTemplate will be used instead of, but applied
+       * identically as, the default PodTemplate configured in FlytePropeller.
+       * +optional
+       * 
+ * + * string driverPodTemplateName = 11; + */ + public java.lang.String getDriverPodTemplateName() { + java.lang.Object ref = driverPodTemplateName_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + driverPodTemplateName_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+       * Reference to an existing PodTemplate k8s resource to be used as the base configuration when creating the 
+       * driver Pod for this task. If this value is set, the specified PodTemplate will be used instead of, but applied
+       * identically as, the default PodTemplate configured in FlytePropeller.
+       * +optional
+       * 
+ * + * string driverPodTemplateName = 11; + */ + public com.google.protobuf.ByteString + getDriverPodTemplateNameBytes() { + java.lang.Object ref = driverPodTemplateName_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + driverPodTemplateName_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+       * Reference to an existing PodTemplate k8s resource to be used as the base configuration when creating the 
+       * driver Pod for this task. If this value is set, the specified PodTemplate will be used instead of, but applied
+       * identically as, the default PodTemplate configured in FlytePropeller.
+       * +optional
+       * 
+ * + * string driverPodTemplateName = 11; + */ + public Builder setDriverPodTemplateName( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + driverPodTemplateName_ = value; + onChanged(); + return this; + } + /** + *
+       * Reference to an existing PodTemplate k8s resource to be used as the base configuration when creating the 
+       * driver Pod for this task. If this value is set, the specified PodTemplate will be used instead of, but applied
+       * identically as, the default PodTemplate configured in FlytePropeller.
+       * +optional
+       * 
+ * + * string driverPodTemplateName = 11; + */ + public Builder clearDriverPodTemplateName() { + + driverPodTemplateName_ = getDefaultInstance().getDriverPodTemplateName(); + onChanged(); + return this; + } + /** + *
+       * Reference to an existing PodTemplate k8s resource to be used as the base configuration when creating the 
+       * driver Pod for this task. If this value is set, the specified PodTemplate will be used instead of, but applied
+       * identically as, the default PodTemplate configured in FlytePropeller.
+       * +optional
+       * 
+ * + * string driverPodTemplateName = 11; + */ + public Builder setDriverPodTemplateNameBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + driverPodTemplateName_ = value; + onChanged(); + return this; + } + + private com.google.protobuf.SingleFieldBuilderV3< + flyteidl.core.Tasks.K8sPod, flyteidl.core.Tasks.K8sPod.Builder, flyteidl.core.Tasks.K8sPodOrBuilder> executorPodBuilder_; + /** + * .flyteidl.core.K8sPod executorPod = 12; + */ + public boolean hasExecutorPod() { + return executorPodValueCase_ == 12; + } + /** + * .flyteidl.core.K8sPod executorPod = 12; + */ + public flyteidl.core.Tasks.K8sPod getExecutorPod() { + if (executorPodBuilder_ == null) { + if (executorPodValueCase_ == 12) { + return (flyteidl.core.Tasks.K8sPod) executorPodValue_; + } + return flyteidl.core.Tasks.K8sPod.getDefaultInstance(); + } else { + if (executorPodValueCase_ == 12) { + return executorPodBuilder_.getMessage(); + } + return flyteidl.core.Tasks.K8sPod.getDefaultInstance(); + } + } + /** + * .flyteidl.core.K8sPod executorPod = 12; + */ + public Builder setExecutorPod(flyteidl.core.Tasks.K8sPod value) { + if (executorPodBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + executorPodValue_ = value; + onChanged(); + } else { + executorPodBuilder_.setMessage(value); + } + executorPodValueCase_ = 12; + return this; + } + /** + * .flyteidl.core.K8sPod executorPod = 12; + */ + public Builder setExecutorPod( + flyteidl.core.Tasks.K8sPod.Builder builderForValue) { + if (executorPodBuilder_ == null) { + executorPodValue_ = builderForValue.build(); + onChanged(); + } else { + executorPodBuilder_.setMessage(builderForValue.build()); + } + executorPodValueCase_ = 12; + return this; + } + /** + * .flyteidl.core.K8sPod executorPod = 12; + */ + public Builder mergeExecutorPod(flyteidl.core.Tasks.K8sPod value) { + if (executorPodBuilder_ == null) { + if (executorPodValueCase_ == 12 && + executorPodValue_ != flyteidl.core.Tasks.K8sPod.getDefaultInstance()) { + executorPodValue_ = flyteidl.core.Tasks.K8sPod.newBuilder((flyteidl.core.Tasks.K8sPod) executorPodValue_) + .mergeFrom(value).buildPartial(); + } else { + executorPodValue_ = value; + } + onChanged(); + } else { + if (executorPodValueCase_ == 12) { + executorPodBuilder_.mergeFrom(value); + } + executorPodBuilder_.setMessage(value); + } + executorPodValueCase_ = 12; + return this; + } + /** + * .flyteidl.core.K8sPod executorPod = 12; + */ + public Builder clearExecutorPod() { + if (executorPodBuilder_ == null) { + if (executorPodValueCase_ == 12) { + executorPodValueCase_ = 0; + executorPodValue_ = null; + onChanged(); + } + } else { + if (executorPodValueCase_ == 12) { + executorPodValueCase_ = 0; + executorPodValue_ = null; + } + executorPodBuilder_.clear(); + } + return this; + } + /** + * .flyteidl.core.K8sPod executorPod = 12; + */ + public flyteidl.core.Tasks.K8sPod.Builder getExecutorPodBuilder() { + return getExecutorPodFieldBuilder().getBuilder(); + } + /** + * .flyteidl.core.K8sPod executorPod = 12; + */ + public flyteidl.core.Tasks.K8sPodOrBuilder getExecutorPodOrBuilder() { + if ((executorPodValueCase_ == 12) && (executorPodBuilder_ != null)) { + return executorPodBuilder_.getMessageOrBuilder(); + } else { + if (executorPodValueCase_ == 12) { + return (flyteidl.core.Tasks.K8sPod) executorPodValue_; + } + return flyteidl.core.Tasks.K8sPod.getDefaultInstance(); + } + } + /** + * .flyteidl.core.K8sPod executorPod = 12; + */ + private com.google.protobuf.SingleFieldBuilderV3< + flyteidl.core.Tasks.K8sPod, flyteidl.core.Tasks.K8sPod.Builder, flyteidl.core.Tasks.K8sPodOrBuilder> + getExecutorPodFieldBuilder() { + if (executorPodBuilder_ == null) { + if (!(executorPodValueCase_ == 12)) { + executorPodValue_ = flyteidl.core.Tasks.K8sPod.getDefaultInstance(); + } + executorPodBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + flyteidl.core.Tasks.K8sPod, flyteidl.core.Tasks.K8sPod.Builder, flyteidl.core.Tasks.K8sPodOrBuilder>( + (flyteidl.core.Tasks.K8sPod) executorPodValue_, + getParentForChildren(), + isClean()); + executorPodValue_ = null; + } + executorPodValueCase_ = 12; + onChanged();; + return executorPodBuilder_; + } + + private java.lang.Object executorPodTemplateName_ = ""; + /** + *
+       * Reference to an existing PodTemplate k8s resource to be used as the base configuration when creating the 
+       * executor Pods for this task. If this value is set, the specified PodTemplate will be used instead of, but applied
+       * identically as, the default PodTemplate configured in FlytePropeller.
+       * +optional
+       * 
+ * + * string executorPodTemplateName = 13; + */ + public java.lang.String getExecutorPodTemplateName() { + java.lang.Object ref = executorPodTemplateName_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + executorPodTemplateName_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+       * Reference to an existing PodTemplate k8s resource to be used as the base configuration when creating the 
+       * executor Pods for this task. If this value is set, the specified PodTemplate will be used instead of, but applied
+       * identically as, the default PodTemplate configured in FlytePropeller.
+       * +optional
+       * 
+ * + * string executorPodTemplateName = 13; + */ + public com.google.protobuf.ByteString + getExecutorPodTemplateNameBytes() { + java.lang.Object ref = executorPodTemplateName_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + executorPodTemplateName_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+       * Reference to an existing PodTemplate k8s resource to be used as the base configuration when creating the 
+       * executor Pods for this task. If this value is set, the specified PodTemplate will be used instead of, but applied
+       * identically as, the default PodTemplate configured in FlytePropeller.
+       * +optional
+       * 
+ * + * string executorPodTemplateName = 13; + */ + public Builder setExecutorPodTemplateName( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + executorPodTemplateName_ = value; + onChanged(); + return this; + } + /** + *
+       * Reference to an existing PodTemplate k8s resource to be used as the base configuration when creating the 
+       * executor Pods for this task. If this value is set, the specified PodTemplate will be used instead of, but applied
+       * identically as, the default PodTemplate configured in FlytePropeller.
+       * +optional
+       * 
+ * + * string executorPodTemplateName = 13; + */ + public Builder clearExecutorPodTemplateName() { + + executorPodTemplateName_ = getDefaultInstance().getExecutorPodTemplateName(); + onChanged(); + return this; + } + /** + *
+       * Reference to an existing PodTemplate k8s resource to be used as the base configuration when creating the 
+       * executor Pods for this task. If this value is set, the specified PodTemplate will be used instead of, but applied
+       * identically as, the default PodTemplate configured in FlytePropeller.
+       * +optional
+       * 
+ * + * string executorPodTemplateName = 13; + */ + public Builder setExecutorPodTemplateNameBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + executorPodTemplateName_ = value; + onChanged(); + return this; + } @java.lang.Override public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { @@ -2756,23 +3726,28 @@ public flyteidl.plugins.Spark.SparkJob getDefaultInstanceForType() { static { java.lang.String[] descriptorData = { "\n\034flyteidl/plugins/spark.proto\022\020flyteidl" + - ".plugins\032\034google/protobuf/struct.proto\"B" + - "\n\020SparkApplication\".\n\004Type\022\n\n\006PYTHON\020\000\022\010" + - "\n\004JAVA\020\001\022\t\n\005SCALA\020\002\022\005\n\001R\020\003\"\333\003\n\010SparkJob\022" + - "@\n\017applicationType\030\001 \001(\0162\'.flyteidl.plug" + - "ins.SparkApplication.Type\022\033\n\023mainApplica" + - "tionFile\030\002 \001(\t\022\021\n\tmainClass\030\003 \001(\t\022<\n\tspa" + - "rkConf\030\004 \003(\0132).flyteidl.plugins.SparkJob" + - ".SparkConfEntry\022>\n\nhadoopConf\030\005 \003(\0132*.fl" + - "yteidl.plugins.SparkJob.HadoopConfEntry\022" + - "\024\n\014executorPath\030\006 \001(\t\022/\n\016databricksConf\030" + - "\007 \001(\0132\027.google.protobuf.Struct\022\027\n\017databr" + - "icksToken\030\010 \001(\t\022\032\n\022databricksInstance\030\t " + - "\001(\t\0320\n\016SparkConfEntry\022\013\n\003key\030\001 \001(\t\022\r\n\005va" + - "lue\030\002 \001(\t:\0028\001\0321\n\017HadoopConfEntry\022\013\n\003key\030" + - "\001 \001(\t\022\r\n\005value\030\002 \001(\t:\0028\001B?Z=github.com/f" + - "lyteorg/flyte/flyteidl/gen/pb-go/flyteid" + - "l/pluginsb\006proto3" + ".plugins\032\031flyteidl/core/tasks.proto\032\034goo" + + "gle/protobuf/struct.proto\"B\n\020SparkApplic" + + "ation\".\n\004Type\022\n\n\006PYTHON\020\000\022\010\n\004JAVA\020\001\022\t\n\005S" + + "CALA\020\002\022\005\n\001R\020\003\"\233\005\n\010SparkJob\022@\n\017applicatio" + + "nType\030\001 \001(\0162\'.flyteidl.plugins.SparkAppl" + + "ication.Type\022\033\n\023mainApplicationFile\030\002 \001(" + + "\t\022\021\n\tmainClass\030\003 \001(\t\022<\n\tsparkConf\030\004 \003(\0132" + + ").flyteidl.plugins.SparkJob.SparkConfEnt" + + "ry\022>\n\nhadoopConf\030\005 \003(\0132*.flyteidl.plugin" + + "s.SparkJob.HadoopConfEntry\022\024\n\014executorPa" + + "th\030\006 \001(\t\022/\n\016databricksConf\030\007 \001(\0132\027.googl" + + "e.protobuf.Struct\022\027\n\017databricksToken\030\010 \001" + + "(\t\022\032\n\022databricksInstance\030\t \001(\t\022*\n\tdriver" + + "Pod\030\n \001(\0132\025.flyteidl.core.K8sPodH\000\022\035\n\025dr" + + "iverPodTemplateName\030\013 \001(\t\022,\n\013executorPod" + + "\030\014 \001(\0132\025.flyteidl.core.K8sPodH\001\022\037\n\027execu" + + "torPodTemplateName\030\r \001(\t\0320\n\016SparkConfEnt" + + "ry\022\013\n\003key\030\001 \001(\t\022\r\n\005value\030\002 \001(\t:\0028\001\0321\n\017Ha" + + "doopConfEntry\022\013\n\003key\030\001 \001(\t\022\r\n\005value\030\002 \001(" + + "\t:\0028\001B\020\n\016driverPodValueB\022\n\020executorPodVa" + + "lueB?Z=github.com/flyteorg/flyte/flyteid" + + "l/gen/pb-go/flyteidl/pluginsb\006proto3" }; com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = new com.google.protobuf.Descriptors.FileDescriptor. InternalDescriptorAssigner() { @@ -2785,6 +3760,7 @@ public com.google.protobuf.ExtensionRegistry assignDescriptors( com.google.protobuf.Descriptors.FileDescriptor .internalBuildGeneratedFileFrom(descriptorData, new com.google.protobuf.Descriptors.FileDescriptor[] { + flyteidl.core.Tasks.getDescriptor(), com.google.protobuf.StructProto.getDescriptor(), }, assigner); internal_static_flyteidl_plugins_SparkApplication_descriptor = @@ -2798,7 +3774,7 @@ public com.google.protobuf.ExtensionRegistry assignDescriptors( internal_static_flyteidl_plugins_SparkJob_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_flyteidl_plugins_SparkJob_descriptor, - new java.lang.String[] { "ApplicationType", "MainApplicationFile", "MainClass", "SparkConf", "HadoopConf", "ExecutorPath", "DatabricksConf", "DatabricksToken", "DatabricksInstance", }); + new java.lang.String[] { "ApplicationType", "MainApplicationFile", "MainClass", "SparkConf", "HadoopConf", "ExecutorPath", "DatabricksConf", "DatabricksToken", "DatabricksInstance", "DriverPod", "DriverPodTemplateName", "ExecutorPod", "ExecutorPodTemplateName", "DriverPodValue", "ExecutorPodValue", }); internal_static_flyteidl_plugins_SparkJob_SparkConfEntry_descriptor = internal_static_flyteidl_plugins_SparkJob_descriptor.getNestedTypes().get(0); internal_static_flyteidl_plugins_SparkJob_SparkConfEntry_fieldAccessorTable = new @@ -2811,6 +3787,7 @@ public com.google.protobuf.ExtensionRegistry assignDescriptors( com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_flyteidl_plugins_SparkJob_HadoopConfEntry_descriptor, new java.lang.String[] { "Key", "Value", }); + flyteidl.core.Tasks.getDescriptor(); com.google.protobuf.StructProto.getDescriptor(); } diff --git a/flyteidl/gen/pb_python/flyteidl/plugins/spark_pb2.py b/flyteidl/gen/pb_python/flyteidl/plugins/spark_pb2.py index 8ee1759390..933435f09c 100644 --- a/flyteidl/gen/pb_python/flyteidl/plugins/spark_pb2.py +++ b/flyteidl/gen/pb_python/flyteidl/plugins/spark_pb2.py @@ -11,10 +11,11 @@ _sym_db = _symbol_database.Default() +from flyteidl.core import tasks_pb2 as flyteidl_dot_core_dot_tasks__pb2 from google.protobuf import struct_pb2 as google_dot_protobuf_dot_struct__pb2 -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1c\x66lyteidl/plugins/spark.proto\x12\x10\x66lyteidl.plugins\x1a\x1cgoogle/protobuf/struct.proto\"B\n\x10SparkApplication\".\n\x04Type\x12\n\n\x06PYTHON\x10\x00\x12\x08\n\x04JAVA\x10\x01\x12\t\n\x05SCALA\x10\x02\x12\x05\n\x01R\x10\x03\"\xfe\x04\n\x08SparkJob\x12Q\n\x0f\x61pplicationType\x18\x01 \x01(\x0e\x32\'.flyteidl.plugins.SparkApplication.TypeR\x0f\x61pplicationType\x12\x30\n\x13mainApplicationFile\x18\x02 \x01(\tR\x13mainApplicationFile\x12\x1c\n\tmainClass\x18\x03 \x01(\tR\tmainClass\x12G\n\tsparkConf\x18\x04 \x03(\x0b\x32).flyteidl.plugins.SparkJob.SparkConfEntryR\tsparkConf\x12J\n\nhadoopConf\x18\x05 \x03(\x0b\x32*.flyteidl.plugins.SparkJob.HadoopConfEntryR\nhadoopConf\x12\"\n\x0c\x65xecutorPath\x18\x06 \x01(\tR\x0c\x65xecutorPath\x12?\n\x0e\x64\x61tabricksConf\x18\x07 \x01(\x0b\x32\x17.google.protobuf.StructR\x0e\x64\x61tabricksConf\x12(\n\x0f\x64\x61tabricksToken\x18\x08 \x01(\tR\x0f\x64\x61tabricksToken\x12.\n\x12\x64\x61tabricksInstance\x18\t \x01(\tR\x12\x64\x61tabricksInstance\x1a<\n\x0eSparkConfEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\tR\x05value:\x02\x38\x01\x1a=\n\x0fHadoopConfEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\tR\x05value:\x02\x38\x01\x42\xc2\x01\n\x14\x63om.flyteidl.pluginsB\nSparkProtoP\x01Z=github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/plugins\xa2\x02\x03\x46PX\xaa\x02\x10\x46lyteidl.Plugins\xca\x02\x10\x46lyteidl\\Plugins\xe2\x02\x1c\x46lyteidl\\Plugins\\GPBMetadata\xea\x02\x11\x46lyteidl::Pluginsb\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1c\x66lyteidl/plugins/spark.proto\x12\x10\x66lyteidl.plugins\x1a\x19\x66lyteidl/core/tasks.proto\x1a\x1cgoogle/protobuf/struct.proto\"B\n\x10SparkApplication\".\n\x04Type\x12\n\n\x06PYTHON\x10\x00\x12\x08\n\x04JAVA\x10\x01\x12\t\n\x05SCALA\x10\x02\x12\x05\n\x01R\x10\x03\"\x86\x07\n\x08SparkJob\x12Q\n\x0f\x61pplicationType\x18\x01 \x01(\x0e\x32\'.flyteidl.plugins.SparkApplication.TypeR\x0f\x61pplicationType\x12\x30\n\x13mainApplicationFile\x18\x02 \x01(\tR\x13mainApplicationFile\x12\x1c\n\tmainClass\x18\x03 \x01(\tR\tmainClass\x12G\n\tsparkConf\x18\x04 \x03(\x0b\x32).flyteidl.plugins.SparkJob.SparkConfEntryR\tsparkConf\x12J\n\nhadoopConf\x18\x05 \x03(\x0b\x32*.flyteidl.plugins.SparkJob.HadoopConfEntryR\nhadoopConf\x12\"\n\x0c\x65xecutorPath\x18\x06 \x01(\tR\x0c\x65xecutorPath\x12?\n\x0e\x64\x61tabricksConf\x18\x07 \x01(\x0b\x32\x17.google.protobuf.StructR\x0e\x64\x61tabricksConf\x12(\n\x0f\x64\x61tabricksToken\x18\x08 \x01(\tR\x0f\x64\x61tabricksToken\x12.\n\x12\x64\x61tabricksInstance\x18\t \x01(\tR\x12\x64\x61tabricksInstance\x12\x35\n\tdriverPod\x18\n \x01(\x0b\x32\x15.flyteidl.core.K8sPodH\x00R\tdriverPod\x12\x34\n\x15\x64riverPodTemplateName\x18\x0b \x01(\tR\x15\x64riverPodTemplateName\x12\x39\n\x0b\x65xecutorPod\x18\x0c \x01(\x0b\x32\x15.flyteidl.core.K8sPodH\x01R\x0b\x65xecutorPod\x12\x38\n\x17\x65xecutorPodTemplateName\x18\r \x01(\tR\x17\x65xecutorPodTemplateName\x1a<\n\x0eSparkConfEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\tR\x05value:\x02\x38\x01\x1a=\n\x0fHadoopConfEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\tR\x05value:\x02\x38\x01\x42\x10\n\x0e\x64riverPodValueB\x12\n\x10\x65xecutorPodValueB\xc2\x01\n\x14\x63om.flyteidl.pluginsB\nSparkProtoP\x01Z=github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/plugins\xa2\x02\x03\x46PX\xaa\x02\x10\x46lyteidl.Plugins\xca\x02\x10\x46lyteidl\\Plugins\xe2\x02\x1c\x46lyteidl\\Plugins\\GPBMetadata\xea\x02\x11\x46lyteidl::Pluginsb\x06proto3') _globals = globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) @@ -27,14 +28,14 @@ _SPARKJOB_SPARKCONFENTRY._serialized_options = b'8\001' _SPARKJOB_HADOOPCONFENTRY._options = None _SPARKJOB_HADOOPCONFENTRY._serialized_options = b'8\001' - _globals['_SPARKAPPLICATION']._serialized_start=80 - _globals['_SPARKAPPLICATION']._serialized_end=146 - _globals['_SPARKAPPLICATION_TYPE']._serialized_start=100 - _globals['_SPARKAPPLICATION_TYPE']._serialized_end=146 - _globals['_SPARKJOB']._serialized_start=149 - _globals['_SPARKJOB']._serialized_end=787 - _globals['_SPARKJOB_SPARKCONFENTRY']._serialized_start=664 - _globals['_SPARKJOB_SPARKCONFENTRY']._serialized_end=724 - _globals['_SPARKJOB_HADOOPCONFENTRY']._serialized_start=726 - _globals['_SPARKJOB_HADOOPCONFENTRY']._serialized_end=787 + _globals['_SPARKAPPLICATION']._serialized_start=107 + _globals['_SPARKAPPLICATION']._serialized_end=173 + _globals['_SPARKAPPLICATION_TYPE']._serialized_start=127 + _globals['_SPARKAPPLICATION_TYPE']._serialized_end=173 + _globals['_SPARKJOB']._serialized_start=176 + _globals['_SPARKJOB']._serialized_end=1078 + _globals['_SPARKJOB_SPARKCONFENTRY']._serialized_start=917 + _globals['_SPARKJOB_SPARKCONFENTRY']._serialized_end=977 + _globals['_SPARKJOB_HADOOPCONFENTRY']._serialized_start=979 + _globals['_SPARKJOB_HADOOPCONFENTRY']._serialized_end=1040 # @@protoc_insertion_point(module_scope) diff --git a/flyteidl/gen/pb_python/flyteidl/plugins/spark_pb2.pyi b/flyteidl/gen/pb_python/flyteidl/plugins/spark_pb2.pyi index e6b9e4eb68..95029a2613 100644 --- a/flyteidl/gen/pb_python/flyteidl/plugins/spark_pb2.pyi +++ b/flyteidl/gen/pb_python/flyteidl/plugins/spark_pb2.pyi @@ -1,3 +1,4 @@ +from flyteidl.core import tasks_pb2 as _tasks_pb2 from google.protobuf import struct_pb2 as _struct_pb2 from google.protobuf.internal import containers as _containers from google.protobuf.internal import enum_type_wrapper as _enum_type_wrapper @@ -22,7 +23,7 @@ class SparkApplication(_message.Message): def __init__(self) -> None: ... class SparkJob(_message.Message): - __slots__ = ["applicationType", "mainApplicationFile", "mainClass", "sparkConf", "hadoopConf", "executorPath", "databricksConf", "databricksToken", "databricksInstance"] + __slots__ = ["applicationType", "mainApplicationFile", "mainClass", "sparkConf", "hadoopConf", "executorPath", "databricksConf", "databricksToken", "databricksInstance", "driverPod", "driverPodTemplateName", "executorPod", "executorPodTemplateName"] class SparkConfEntry(_message.Message): __slots__ = ["key", "value"] KEY_FIELD_NUMBER: _ClassVar[int] @@ -46,6 +47,10 @@ class SparkJob(_message.Message): DATABRICKSCONF_FIELD_NUMBER: _ClassVar[int] DATABRICKSTOKEN_FIELD_NUMBER: _ClassVar[int] DATABRICKSINSTANCE_FIELD_NUMBER: _ClassVar[int] + DRIVERPOD_FIELD_NUMBER: _ClassVar[int] + DRIVERPODTEMPLATENAME_FIELD_NUMBER: _ClassVar[int] + EXECUTORPOD_FIELD_NUMBER: _ClassVar[int] + EXECUTORPODTEMPLATENAME_FIELD_NUMBER: _ClassVar[int] applicationType: SparkApplication.Type mainApplicationFile: str mainClass: str @@ -55,4 +60,8 @@ class SparkJob(_message.Message): databricksConf: _struct_pb2.Struct databricksToken: str databricksInstance: str - def __init__(self, applicationType: _Optional[_Union[SparkApplication.Type, str]] = ..., mainApplicationFile: _Optional[str] = ..., mainClass: _Optional[str] = ..., sparkConf: _Optional[_Mapping[str, str]] = ..., hadoopConf: _Optional[_Mapping[str, str]] = ..., executorPath: _Optional[str] = ..., databricksConf: _Optional[_Union[_struct_pb2.Struct, _Mapping]] = ..., databricksToken: _Optional[str] = ..., databricksInstance: _Optional[str] = ...) -> None: ... + driverPod: _tasks_pb2.K8sPod + driverPodTemplateName: str + executorPod: _tasks_pb2.K8sPod + executorPodTemplateName: str + def __init__(self, applicationType: _Optional[_Union[SparkApplication.Type, str]] = ..., mainApplicationFile: _Optional[str] = ..., mainClass: _Optional[str] = ..., sparkConf: _Optional[_Mapping[str, str]] = ..., hadoopConf: _Optional[_Mapping[str, str]] = ..., executorPath: _Optional[str] = ..., databricksConf: _Optional[_Union[_struct_pb2.Struct, _Mapping]] = ..., databricksToken: _Optional[str] = ..., databricksInstance: _Optional[str] = ..., driverPod: _Optional[_Union[_tasks_pb2.K8sPod, _Mapping]] = ..., driverPodTemplateName: _Optional[str] = ..., executorPod: _Optional[_Union[_tasks_pb2.K8sPod, _Mapping]] = ..., executorPodTemplateName: _Optional[str] = ...) -> None: ... diff --git a/flyteidl/gen/pb_rust/flyteidl.plugins.rs b/flyteidl/gen/pb_rust/flyteidl.plugins.rs index 5c7873b5d2..edf7e1eab1 100644 --- a/flyteidl/gen/pb_rust/flyteidl.plugins.rs +++ b/flyteidl/gen/pb_rust/flyteidl.plugins.rs @@ -285,6 +285,45 @@ pub struct SparkJob { /// This instance name can be set in either flytepropeller or flytekit. #[prost(string, tag="9")] pub databricks_instance: ::prost::alloc::string::String, + /// Reference to an existing PodTemplate k8s resource to be used as the base configuration when creating the + /// driver Pod for this task. If this value is set, the specified PodTemplate will be used instead of, but applied + /// identically as, the default PodTemplate configured in FlytePropeller. + /// +optional + #[prost(string, tag="11")] + pub driver_pod_template_name: ::prost::alloc::string::String, + /// Reference to an existing PodTemplate k8s resource to be used as the base configuration when creating the + /// executor Pods for this task. If this value is set, the specified PodTemplate will be used instead of, but applied + /// identically as, the default PodTemplate configured in FlytePropeller. + /// +optional + #[prost(string, tag="13")] + pub executor_pod_template_name: ::prost::alloc::string::String, + /// The pod spec and metadata to be used as the base configuration when creating the driver Pod for this task. + /// +optional + #[prost(oneof="spark_job::DriverPodValue", tags="10")] + pub driver_pod_value: ::core::option::Option, + /// The pod spec and metadata to be used as the base configuration when creating the executor Pods for this task. + /// +optional + #[prost(oneof="spark_job::ExecutorPodValue", tags="12")] + pub executor_pod_value: ::core::option::Option, +} +/// Nested message and enum types in `SparkJob`. +pub mod spark_job { + /// The pod spec and metadata to be used as the base configuration when creating the driver Pod for this task. + /// +optional + #[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Oneof)] + pub enum DriverPodValue { + #[prost(message, tag="10")] + DriverPod(super::super::core::K8sPod), + } + /// The pod spec and metadata to be used as the base configuration when creating the executor Pods for this task. + /// +optional + #[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Oneof)] + pub enum ExecutorPodValue { + #[prost(message, tag="12")] + ExecutorPod(super::super::core::K8sPod), + } } /// Custom proto for plugin that enables distributed training using #[allow(clippy::derive_partial_eq_without_eq)] diff --git a/flyteidl/protos/flyteidl/plugins/spark.proto b/flyteidl/protos/flyteidl/plugins/spark.proto index 666ea311b2..a9c3a5f901 100644 --- a/flyteidl/protos/flyteidl/plugins/spark.proto +++ b/flyteidl/protos/flyteidl/plugins/spark.proto @@ -1,8 +1,10 @@ syntax = "proto3"; -package flyteidl.plugins; +import "flyteidl/core/tasks.proto"; import "google/protobuf/struct.proto"; +package flyteidl.plugins; + option go_package = "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/plugins"; message SparkApplication { @@ -31,4 +33,24 @@ message SparkJob { // Domain name of your deployment. Use the form .cloud.databricks.com. // This instance name can be set in either flytepropeller or flytekit. string databricksInstance = 9; + // The pod spec and metadata to be used as the base configuration when creating the driver Pod for this task. + // +optional + oneof driverPodValue { + core.K8sPod driverPod = 10; + } + // Reference to an existing PodTemplate k8s resource to be used as the base configuration when creating the + // driver Pod for this task. If this value is set, the specified PodTemplate will be used instead of, but applied + // identically as, the default PodTemplate configured in FlytePropeller. + // +optional + string driverPodTemplateName = 11; + // The pod spec and metadata to be used as the base configuration when creating the executor Pods for this task. + // +optional + oneof executorPodValue { + core.K8sPod executorPod = 12; + } + // Reference to an existing PodTemplate k8s resource to be used as the base configuration when creating the + // executor Pods for this task. If this value is set, the specified PodTemplate will be used instead of, but applied + // identically as, the default PodTemplate configured in FlytePropeller. + // +optional + string executorPodTemplateName = 13; } From 6fefc0bcca55fcb10bb4fb25e6bd950145e70979 Mon Sep 17 00:00:00 2001 From: Andrew Dye Date: Sat, 7 Oct 2023 22:27:45 -0700 Subject: [PATCH 2/2] Use RoleSpec for driver and executor Signed-off-by: Andrew Dye --- .../pb-cpp/flyteidl/plugins/common.grpc.pb.cc | 24 + .../pb-cpp/flyteidl/plugins/common.grpc.pb.h | 47 + .../gen/pb-cpp/flyteidl/plugins/common.pb.cc | 530 ++++++++ .../gen/pb-cpp/flyteidl/plugins/common.pb.h | 333 +++++ .../gen/pb-cpp/flyteidl/plugins/spark.pb.cc | 463 ++----- .../gen/pb-cpp/flyteidl/plugins/spark.pb.h | 338 ++--- .../gen/pb-go/flyteidl/plugins/common.pb.go | 128 ++ .../flyteidl/plugins/common.pb.validate.go | 119 ++ .../gen/pb-go/flyteidl/plugins/spark.pb.go | 165 +-- .../flyteidl/plugins/spark.pb.validate.go | 40 +- .../gen/pb-java/flyteidl/plugins/Common.java | 968 +++++++++++++++ .../gen/pb-java/flyteidl/plugins/Spark.java | 1098 +++++------------ .../pb_python/flyteidl/plugins/common_pb2.py | 28 + .../pb_python/flyteidl/plugins/common_pb2.pyi | 14 + .../flyteidl/plugins/common_pb2_grpc.py | 4 + .../pb_python/flyteidl/plugins/spark_pb2.py | 24 +- .../pb_python/flyteidl/plugins/spark_pb2.pyi | 18 +- flyteidl/gen/pb_rust/flyteidl.plugins.rs | 72 +- flyteidl/protos/flyteidl/plugins/common.proto | 22 + flyteidl/protos/flyteidl/plugins/spark.proto | 26 +- 20 files changed, 2842 insertions(+), 1619 deletions(-) create mode 100644 flyteidl/gen/pb-cpp/flyteidl/plugins/common.grpc.pb.cc create mode 100644 flyteidl/gen/pb-cpp/flyteidl/plugins/common.grpc.pb.h create mode 100644 flyteidl/gen/pb-cpp/flyteidl/plugins/common.pb.cc create mode 100644 flyteidl/gen/pb-cpp/flyteidl/plugins/common.pb.h create mode 100644 flyteidl/gen/pb-go/flyteidl/plugins/common.pb.go create mode 100644 flyteidl/gen/pb-go/flyteidl/plugins/common.pb.validate.go create mode 100644 flyteidl/gen/pb-java/flyteidl/plugins/Common.java create mode 100644 flyteidl/gen/pb_python/flyteidl/plugins/common_pb2.py create mode 100644 flyteidl/gen/pb_python/flyteidl/plugins/common_pb2.pyi create mode 100644 flyteidl/gen/pb_python/flyteidl/plugins/common_pb2_grpc.py create mode 100644 flyteidl/protos/flyteidl/plugins/common.proto diff --git a/flyteidl/gen/pb-cpp/flyteidl/plugins/common.grpc.pb.cc b/flyteidl/gen/pb-cpp/flyteidl/plugins/common.grpc.pb.cc new file mode 100644 index 0000000000..2a7071c8ea --- /dev/null +++ b/flyteidl/gen/pb-cpp/flyteidl/plugins/common.grpc.pb.cc @@ -0,0 +1,24 @@ +// Generated by the gRPC C++ plugin. +// If you make any local change, they will be lost. +// source: flyteidl/plugins/common.proto + +#include "flyteidl/plugins/common.pb.h" +#include "flyteidl/plugins/common.grpc.pb.h" + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +namespace flyteidl { +namespace plugins { + +} // namespace flyteidl +} // namespace plugins + diff --git a/flyteidl/gen/pb-cpp/flyteidl/plugins/common.grpc.pb.h b/flyteidl/gen/pb-cpp/flyteidl/plugins/common.grpc.pb.h new file mode 100644 index 0000000000..85571ee76a --- /dev/null +++ b/flyteidl/gen/pb-cpp/flyteidl/plugins/common.grpc.pb.h @@ -0,0 +1,47 @@ +// Generated by the gRPC C++ plugin. +// If you make any local change, they will be lost. +// source: flyteidl/plugins/common.proto +#ifndef GRPC_flyteidl_2fplugins_2fcommon_2eproto__INCLUDED +#define GRPC_flyteidl_2fplugins_2fcommon_2eproto__INCLUDED + +#include "flyteidl/plugins/common.pb.h" + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +namespace grpc_impl { +class Channel; +class CompletionQueue; +class ServerCompletionQueue; +} // namespace grpc_impl + +namespace grpc { +namespace experimental { +template +class MessageAllocator; +} // namespace experimental +} // namespace grpc_impl + +namespace grpc { +class ServerContext; +} // namespace grpc + +namespace flyteidl { +namespace plugins { + +} // namespace plugins +} // namespace flyteidl + + +#endif // GRPC_flyteidl_2fplugins_2fcommon_2eproto__INCLUDED diff --git a/flyteidl/gen/pb-cpp/flyteidl/plugins/common.pb.cc b/flyteidl/gen/pb-cpp/flyteidl/plugins/common.pb.cc new file mode 100644 index 0000000000..63bd7f8c95 --- /dev/null +++ b/flyteidl/gen/pb-cpp/flyteidl/plugins/common.pb.cc @@ -0,0 +1,530 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: flyteidl/plugins/common.proto + +#include "flyteidl/plugins/common.pb.h" + +#include + +#include +#include +#include +#include +#include +#include +#include +#include +// @@protoc_insertion_point(includes) +#include + +extern PROTOBUF_INTERNAL_EXPORT_flyteidl_2fcore_2ftasks_2eproto ::google::protobuf::internal::SCCInfo<3> scc_info_K8sPod_flyteidl_2fcore_2ftasks_2eproto; +namespace flyteidl { +namespace plugins { +class RoleSpecDefaultTypeInternal { + public: + ::google::protobuf::internal::ExplicitlyConstructed _instance; + const ::flyteidl::core::K8sPod* pod_; +} _RoleSpec_default_instance_; +} // namespace plugins +} // namespace flyteidl +static void InitDefaultsRoleSpec_flyteidl_2fplugins_2fcommon_2eproto() { + GOOGLE_PROTOBUF_VERIFY_VERSION; + + { + void* ptr = &::flyteidl::plugins::_RoleSpec_default_instance_; + new (ptr) ::flyteidl::plugins::RoleSpec(); + ::google::protobuf::internal::OnShutdownDestroyMessage(ptr); + } + ::flyteidl::plugins::RoleSpec::InitAsDefaultInstance(); +} + +::google::protobuf::internal::SCCInfo<1> scc_info_RoleSpec_flyteidl_2fplugins_2fcommon_2eproto = + {{ATOMIC_VAR_INIT(::google::protobuf::internal::SCCInfoBase::kUninitialized), 1, InitDefaultsRoleSpec_flyteidl_2fplugins_2fcommon_2eproto}, { + &scc_info_K8sPod_flyteidl_2fcore_2ftasks_2eproto.base,}}; + +void InitDefaults_flyteidl_2fplugins_2fcommon_2eproto() { + ::google::protobuf::internal::InitSCC(&scc_info_RoleSpec_flyteidl_2fplugins_2fcommon_2eproto.base); +} + +::google::protobuf::Metadata file_level_metadata_flyteidl_2fplugins_2fcommon_2eproto[1]; +constexpr ::google::protobuf::EnumDescriptor const** file_level_enum_descriptors_flyteidl_2fplugins_2fcommon_2eproto = nullptr; +constexpr ::google::protobuf::ServiceDescriptor const** file_level_service_descriptors_flyteidl_2fplugins_2fcommon_2eproto = nullptr; + +const ::google::protobuf::uint32 TableStruct_flyteidl_2fplugins_2fcommon_2eproto::offsets[] PROTOBUF_SECTION_VARIABLE(protodesc_cold) = { + ~0u, // no _has_bits_ + PROTOBUF_FIELD_OFFSET(::flyteidl::plugins::RoleSpec, _internal_metadata_), + ~0u, // no _extensions_ + PROTOBUF_FIELD_OFFSET(::flyteidl::plugins::RoleSpec, _oneof_case_[0]), + ~0u, // no _weak_field_map_ + offsetof(::flyteidl::plugins::RoleSpecDefaultTypeInternal, pod_), + PROTOBUF_FIELD_OFFSET(::flyteidl::plugins::RoleSpec, pod_template_name_), + PROTOBUF_FIELD_OFFSET(::flyteidl::plugins::RoleSpec, pod_value_), +}; +static const ::google::protobuf::internal::MigrationSchema schemas[] PROTOBUF_SECTION_VARIABLE(protodesc_cold) = { + { 0, -1, sizeof(::flyteidl::plugins::RoleSpec)}, +}; + +static ::google::protobuf::Message const * const file_default_instances[] = { + reinterpret_cast(&::flyteidl::plugins::_RoleSpec_default_instance_), +}; + +::google::protobuf::internal::AssignDescriptorsTable assign_descriptors_table_flyteidl_2fplugins_2fcommon_2eproto = { + {}, AddDescriptors_flyteidl_2fplugins_2fcommon_2eproto, "flyteidl/plugins/common.proto", schemas, + file_default_instances, TableStruct_flyteidl_2fplugins_2fcommon_2eproto::offsets, + file_level_metadata_flyteidl_2fplugins_2fcommon_2eproto, 1, file_level_enum_descriptors_flyteidl_2fplugins_2fcommon_2eproto, file_level_service_descriptors_flyteidl_2fplugins_2fcommon_2eproto, +}; + +const char descriptor_table_protodef_flyteidl_2fplugins_2fcommon_2eproto[] = + "\n\035flyteidl/plugins/common.proto\022\020flyteid" + "l.plugins\032\031flyteidl/core/tasks.proto\"X\n\010" + "RoleSpec\022$\n\003pod\030\001 \001(\0132\025.flyteidl.core.K8" + "sPodH\000\022\031\n\021pod_template_name\030\002 \001(\tB\013\n\tpod" + "_valueB\?Z=github.com/flyteorg/flyte/flyt" + "eidl/gen/pb-go/flyteidl/pluginsb\006proto3" + ; +::google::protobuf::internal::DescriptorTable descriptor_table_flyteidl_2fplugins_2fcommon_2eproto = { + false, InitDefaults_flyteidl_2fplugins_2fcommon_2eproto, + descriptor_table_protodef_flyteidl_2fplugins_2fcommon_2eproto, + "flyteidl/plugins/common.proto", &assign_descriptors_table_flyteidl_2fplugins_2fcommon_2eproto, 239, +}; + +void AddDescriptors_flyteidl_2fplugins_2fcommon_2eproto() { + static constexpr ::google::protobuf::internal::InitFunc deps[1] = + { + ::AddDescriptors_flyteidl_2fcore_2ftasks_2eproto, + }; + ::google::protobuf::internal::AddDescriptors(&descriptor_table_flyteidl_2fplugins_2fcommon_2eproto, deps, 1); +} + +// Force running AddDescriptors() at dynamic initialization time. +static bool dynamic_init_dummy_flyteidl_2fplugins_2fcommon_2eproto = []() { AddDescriptors_flyteidl_2fplugins_2fcommon_2eproto(); return true; }(); +namespace flyteidl { +namespace plugins { + +// =================================================================== + +void RoleSpec::InitAsDefaultInstance() { + ::flyteidl::plugins::_RoleSpec_default_instance_.pod_ = const_cast< ::flyteidl::core::K8sPod*>( + ::flyteidl::core::K8sPod::internal_default_instance()); +} +class RoleSpec::HasBitSetters { + public: + static const ::flyteidl::core::K8sPod& pod(const RoleSpec* msg); +}; + +const ::flyteidl::core::K8sPod& +RoleSpec::HasBitSetters::pod(const RoleSpec* msg) { + return *msg->pod_value_.pod_; +} +void RoleSpec::set_allocated_pod(::flyteidl::core::K8sPod* pod) { + ::google::protobuf::Arena* message_arena = GetArenaNoVirtual(); + clear_pod_value(); + if (pod) { + ::google::protobuf::Arena* submessage_arena = nullptr; + if (message_arena != submessage_arena) { + pod = ::google::protobuf::internal::GetOwnedMessage( + message_arena, pod, submessage_arena); + } + set_has_pod(); + pod_value_.pod_ = pod; + } + // @@protoc_insertion_point(field_set_allocated:flyteidl.plugins.RoleSpec.pod) +} +void RoleSpec::clear_pod() { + if (has_pod()) { + delete pod_value_.pod_; + clear_has_pod_value(); + } +} +#if !defined(_MSC_VER) || _MSC_VER >= 1900 +const int RoleSpec::kPodFieldNumber; +const int RoleSpec::kPodTemplateNameFieldNumber; +#endif // !defined(_MSC_VER) || _MSC_VER >= 1900 + +RoleSpec::RoleSpec() + : ::google::protobuf::Message(), _internal_metadata_(nullptr) { + SharedCtor(); + // @@protoc_insertion_point(constructor:flyteidl.plugins.RoleSpec) +} +RoleSpec::RoleSpec(const RoleSpec& from) + : ::google::protobuf::Message(), + _internal_metadata_(nullptr) { + _internal_metadata_.MergeFrom(from._internal_metadata_); + pod_template_name_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + if (from.pod_template_name().size() > 0) { + pod_template_name_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.pod_template_name_); + } + clear_has_pod_value(); + switch (from.pod_value_case()) { + case kPod: { + mutable_pod()->::flyteidl::core::K8sPod::MergeFrom(from.pod()); + break; + } + case POD_VALUE_NOT_SET: { + break; + } + } + // @@protoc_insertion_point(copy_constructor:flyteidl.plugins.RoleSpec) +} + +void RoleSpec::SharedCtor() { + ::google::protobuf::internal::InitSCC( + &scc_info_RoleSpec_flyteidl_2fplugins_2fcommon_2eproto.base); + pod_template_name_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + clear_has_pod_value(); +} + +RoleSpec::~RoleSpec() { + // @@protoc_insertion_point(destructor:flyteidl.plugins.RoleSpec) + SharedDtor(); +} + +void RoleSpec::SharedDtor() { + pod_template_name_.DestroyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + if (has_pod_value()) { + clear_pod_value(); + } +} + +void RoleSpec::SetCachedSize(int size) const { + _cached_size_.Set(size); +} +const RoleSpec& RoleSpec::default_instance() { + ::google::protobuf::internal::InitSCC(&::scc_info_RoleSpec_flyteidl_2fplugins_2fcommon_2eproto.base); + return *internal_default_instance(); +} + + +void RoleSpec::clear_pod_value() { +// @@protoc_insertion_point(one_of_clear_start:flyteidl.plugins.RoleSpec) + switch (pod_value_case()) { + case kPod: { + delete pod_value_.pod_; + break; + } + case POD_VALUE_NOT_SET: { + break; + } + } + _oneof_case_[0] = POD_VALUE_NOT_SET; +} + + +void RoleSpec::Clear() { +// @@protoc_insertion_point(message_clear_start:flyteidl.plugins.RoleSpec) + ::google::protobuf::uint32 cached_has_bits = 0; + // Prevent compiler warnings about cached_has_bits being unused + (void) cached_has_bits; + + pod_template_name_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + clear_pod_value(); + _internal_metadata_.Clear(); +} + +#if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER +const char* RoleSpec::_InternalParse(const char* begin, const char* end, void* object, + ::google::protobuf::internal::ParseContext* ctx) { + auto msg = static_cast(object); + ::google::protobuf::int32 size; (void)size; + int depth; (void)depth; + ::google::protobuf::uint32 tag; + ::google::protobuf::internal::ParseFunc parser_till_end; (void)parser_till_end; + auto ptr = begin; + while (ptr < end) { + ptr = ::google::protobuf::io::Parse32(ptr, &tag); + GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); + switch (tag >> 3) { + // .flyteidl.core.K8sPod pod = 1; + case 1: { + if (static_cast<::google::protobuf::uint8>(tag) != 10) goto handle_unusual; + ptr = ::google::protobuf::io::ReadSize(ptr, &size); + GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); + parser_till_end = ::flyteidl::core::K8sPod::_InternalParse; + object = msg->mutable_pod(); + if (size > end - ptr) goto len_delim_till_end; + ptr += size; + GOOGLE_PROTOBUF_PARSER_ASSERT(ctx->ParseExactRange( + {parser_till_end, object}, ptr - size, ptr)); + break; + } + // string pod_template_name = 2; + case 2: { + if (static_cast<::google::protobuf::uint8>(tag) != 18) goto handle_unusual; + ptr = ::google::protobuf::io::ReadSize(ptr, &size); + GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); + ctx->extra_parse_data().SetFieldName("flyteidl.plugins.RoleSpec.pod_template_name"); + object = msg->mutable_pod_template_name(); + if (size > end - ptr + ::google::protobuf::internal::ParseContext::kSlopBytes) { + parser_till_end = ::google::protobuf::internal::GreedyStringParserUTF8; + goto string_till_end; + } + GOOGLE_PROTOBUF_PARSER_ASSERT(::google::protobuf::internal::StringCheckUTF8(ptr, size, ctx)); + ::google::protobuf::internal::InlineGreedyStringParser(object, ptr, size, ctx); + ptr += size; + break; + } + default: { + handle_unusual: + if ((tag & 7) == 4 || tag == 0) { + ctx->EndGroup(tag); + return ptr; + } + auto res = UnknownFieldParse(tag, {_InternalParse, msg}, + ptr, end, msg->_internal_metadata_.mutable_unknown_fields(), ctx); + ptr = res.first; + GOOGLE_PROTOBUF_PARSER_ASSERT(ptr != nullptr); + if (res.second) return ptr; + } + } // switch + } // while + return ptr; +string_till_end: + static_cast<::std::string*>(object)->clear(); + static_cast<::std::string*>(object)->reserve(size); + goto len_delim_till_end; +len_delim_till_end: + return ctx->StoreAndTailCall(ptr, end, {_InternalParse, msg}, + {parser_till_end, object}, size); +} +#else // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER +bool RoleSpec::MergePartialFromCodedStream( + ::google::protobuf::io::CodedInputStream* input) { +#define DO_(EXPRESSION) if (!PROTOBUF_PREDICT_TRUE(EXPRESSION)) goto failure + ::google::protobuf::uint32 tag; + // @@protoc_insertion_point(parse_start:flyteidl.plugins.RoleSpec) + for (;;) { + ::std::pair<::google::protobuf::uint32, bool> p = input->ReadTagWithCutoffNoLastTag(127u); + tag = p.first; + if (!p.second) goto handle_unusual; + switch (::google::protobuf::internal::WireFormatLite::GetTagFieldNumber(tag)) { + // .flyteidl.core.K8sPod pod = 1; + case 1: { + if (static_cast< ::google::protobuf::uint8>(tag) == (10 & 0xFF)) { + DO_(::google::protobuf::internal::WireFormatLite::ReadMessage( + input, mutable_pod())); + } else { + goto handle_unusual; + } + break; + } + + // string pod_template_name = 2; + case 2: { + if (static_cast< ::google::protobuf::uint8>(tag) == (18 & 0xFF)) { + DO_(::google::protobuf::internal::WireFormatLite::ReadString( + input, this->mutable_pod_template_name())); + DO_(::google::protobuf::internal::WireFormatLite::VerifyUtf8String( + this->pod_template_name().data(), static_cast(this->pod_template_name().length()), + ::google::protobuf::internal::WireFormatLite::PARSE, + "flyteidl.plugins.RoleSpec.pod_template_name")); + } else { + goto handle_unusual; + } + break; + } + + default: { + handle_unusual: + if (tag == 0) { + goto success; + } + DO_(::google::protobuf::internal::WireFormat::SkipField( + input, tag, _internal_metadata_.mutable_unknown_fields())); + break; + } + } + } +success: + // @@protoc_insertion_point(parse_success:flyteidl.plugins.RoleSpec) + return true; +failure: + // @@protoc_insertion_point(parse_failure:flyteidl.plugins.RoleSpec) + return false; +#undef DO_ +} +#endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER + +void RoleSpec::SerializeWithCachedSizes( + ::google::protobuf::io::CodedOutputStream* output) const { + // @@protoc_insertion_point(serialize_start:flyteidl.plugins.RoleSpec) + ::google::protobuf::uint32 cached_has_bits = 0; + (void) cached_has_bits; + + // .flyteidl.core.K8sPod pod = 1; + if (has_pod()) { + ::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray( + 1, HasBitSetters::pod(this), output); + } + + // string pod_template_name = 2; + if (this->pod_template_name().size() > 0) { + ::google::protobuf::internal::WireFormatLite::VerifyUtf8String( + this->pod_template_name().data(), static_cast(this->pod_template_name().length()), + ::google::protobuf::internal::WireFormatLite::SERIALIZE, + "flyteidl.plugins.RoleSpec.pod_template_name"); + ::google::protobuf::internal::WireFormatLite::WriteStringMaybeAliased( + 2, this->pod_template_name(), output); + } + + if (_internal_metadata_.have_unknown_fields()) { + ::google::protobuf::internal::WireFormat::SerializeUnknownFields( + _internal_metadata_.unknown_fields(), output); + } + // @@protoc_insertion_point(serialize_end:flyteidl.plugins.RoleSpec) +} + +::google::protobuf::uint8* RoleSpec::InternalSerializeWithCachedSizesToArray( + ::google::protobuf::uint8* target) const { + // @@protoc_insertion_point(serialize_to_array_start:flyteidl.plugins.RoleSpec) + ::google::protobuf::uint32 cached_has_bits = 0; + (void) cached_has_bits; + + // .flyteidl.core.K8sPod pod = 1; + if (has_pod()) { + target = ::google::protobuf::internal::WireFormatLite:: + InternalWriteMessageToArray( + 1, HasBitSetters::pod(this), target); + } + + // string pod_template_name = 2; + if (this->pod_template_name().size() > 0) { + ::google::protobuf::internal::WireFormatLite::VerifyUtf8String( + this->pod_template_name().data(), static_cast(this->pod_template_name().length()), + ::google::protobuf::internal::WireFormatLite::SERIALIZE, + "flyteidl.plugins.RoleSpec.pod_template_name"); + target = + ::google::protobuf::internal::WireFormatLite::WriteStringToArray( + 2, this->pod_template_name(), target); + } + + if (_internal_metadata_.have_unknown_fields()) { + target = ::google::protobuf::internal::WireFormat::SerializeUnknownFieldsToArray( + _internal_metadata_.unknown_fields(), target); + } + // @@protoc_insertion_point(serialize_to_array_end:flyteidl.plugins.RoleSpec) + return target; +} + +size_t RoleSpec::ByteSizeLong() const { +// @@protoc_insertion_point(message_byte_size_start:flyteidl.plugins.RoleSpec) + size_t total_size = 0; + + if (_internal_metadata_.have_unknown_fields()) { + total_size += + ::google::protobuf::internal::WireFormat::ComputeUnknownFieldsSize( + _internal_metadata_.unknown_fields()); + } + ::google::protobuf::uint32 cached_has_bits = 0; + // Prevent compiler warnings about cached_has_bits being unused + (void) cached_has_bits; + + // string pod_template_name = 2; + if (this->pod_template_name().size() > 0) { + total_size += 1 + + ::google::protobuf::internal::WireFormatLite::StringSize( + this->pod_template_name()); + } + + switch (pod_value_case()) { + // .flyteidl.core.K8sPod pod = 1; + case kPod: { + total_size += 1 + + ::google::protobuf::internal::WireFormatLite::MessageSize( + *pod_value_.pod_); + break; + } + case POD_VALUE_NOT_SET: { + break; + } + } + int cached_size = ::google::protobuf::internal::ToCachedSize(total_size); + SetCachedSize(cached_size); + return total_size; +} + +void RoleSpec::MergeFrom(const ::google::protobuf::Message& from) { +// @@protoc_insertion_point(generalized_merge_from_start:flyteidl.plugins.RoleSpec) + GOOGLE_DCHECK_NE(&from, this); + const RoleSpec* source = + ::google::protobuf::DynamicCastToGenerated( + &from); + if (source == nullptr) { + // @@protoc_insertion_point(generalized_merge_from_cast_fail:flyteidl.plugins.RoleSpec) + ::google::protobuf::internal::ReflectionOps::Merge(from, this); + } else { + // @@protoc_insertion_point(generalized_merge_from_cast_success:flyteidl.plugins.RoleSpec) + MergeFrom(*source); + } +} + +void RoleSpec::MergeFrom(const RoleSpec& from) { +// @@protoc_insertion_point(class_specific_merge_from_start:flyteidl.plugins.RoleSpec) + GOOGLE_DCHECK_NE(&from, this); + _internal_metadata_.MergeFrom(from._internal_metadata_); + ::google::protobuf::uint32 cached_has_bits = 0; + (void) cached_has_bits; + + if (from.pod_template_name().size() > 0) { + + pod_template_name_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.pod_template_name_); + } + switch (from.pod_value_case()) { + case kPod: { + mutable_pod()->::flyteidl::core::K8sPod::MergeFrom(from.pod()); + break; + } + case POD_VALUE_NOT_SET: { + break; + } + } +} + +void RoleSpec::CopyFrom(const ::google::protobuf::Message& from) { +// @@protoc_insertion_point(generalized_copy_from_start:flyteidl.plugins.RoleSpec) + if (&from == this) return; + Clear(); + MergeFrom(from); +} + +void RoleSpec::CopyFrom(const RoleSpec& from) { +// @@protoc_insertion_point(class_specific_copy_from_start:flyteidl.plugins.RoleSpec) + if (&from == this) return; + Clear(); + MergeFrom(from); +} + +bool RoleSpec::IsInitialized() const { + return true; +} + +void RoleSpec::Swap(RoleSpec* other) { + if (other == this) return; + InternalSwap(other); +} +void RoleSpec::InternalSwap(RoleSpec* other) { + using std::swap; + _internal_metadata_.Swap(&other->_internal_metadata_); + pod_template_name_.Swap(&other->pod_template_name_, &::google::protobuf::internal::GetEmptyStringAlreadyInited(), + GetArenaNoVirtual()); + swap(pod_value_, other->pod_value_); + swap(_oneof_case_[0], other->_oneof_case_[0]); +} + +::google::protobuf::Metadata RoleSpec::GetMetadata() const { + ::google::protobuf::internal::AssignDescriptors(&::assign_descriptors_table_flyteidl_2fplugins_2fcommon_2eproto); + return ::file_level_metadata_flyteidl_2fplugins_2fcommon_2eproto[kIndexInFileMessages]; +} + + +// @@protoc_insertion_point(namespace_scope) +} // namespace plugins +} // namespace flyteidl +namespace google { +namespace protobuf { +template<> PROTOBUF_NOINLINE ::flyteidl::plugins::RoleSpec* Arena::CreateMaybeMessage< ::flyteidl::plugins::RoleSpec >(Arena* arena) { + return Arena::CreateInternal< ::flyteidl::plugins::RoleSpec >(arena); +} +} // namespace protobuf +} // namespace google + +// @@protoc_insertion_point(global_scope) +#include diff --git a/flyteidl/gen/pb-cpp/flyteidl/plugins/common.pb.h b/flyteidl/gen/pb-cpp/flyteidl/plugins/common.pb.h new file mode 100644 index 0000000000..a2a0130ff1 --- /dev/null +++ b/flyteidl/gen/pb-cpp/flyteidl/plugins/common.pb.h @@ -0,0 +1,333 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: flyteidl/plugins/common.proto + +#ifndef PROTOBUF_INCLUDED_flyteidl_2fplugins_2fcommon_2eproto +#define PROTOBUF_INCLUDED_flyteidl_2fplugins_2fcommon_2eproto + +#include +#include + +#include +#if PROTOBUF_VERSION < 3007000 +#error This file was generated by a newer version of protoc which is +#error incompatible with your Protocol Buffer headers. Please update +#error your headers. +#endif +#if 3007000 < PROTOBUF_MIN_PROTOC_VERSION +#error This file was generated by an older version of protoc which is +#error incompatible with your Protocol Buffer headers. Please +#error regenerate this file with a newer version of protoc. +#endif + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include // IWYU pragma: export +#include // IWYU pragma: export +#include +#include "flyteidl/core/tasks.pb.h" +// @@protoc_insertion_point(includes) +#include +#define PROTOBUF_INTERNAL_EXPORT_flyteidl_2fplugins_2fcommon_2eproto + +// Internal implementation detail -- do not use these members. +struct TableStruct_flyteidl_2fplugins_2fcommon_2eproto { + static const ::google::protobuf::internal::ParseTableField entries[] + PROTOBUF_SECTION_VARIABLE(protodesc_cold); + static const ::google::protobuf::internal::AuxillaryParseTableField aux[] + PROTOBUF_SECTION_VARIABLE(protodesc_cold); + static const ::google::protobuf::internal::ParseTable schema[1] + PROTOBUF_SECTION_VARIABLE(protodesc_cold); + static const ::google::protobuf::internal::FieldMetadata field_metadata[]; + static const ::google::protobuf::internal::SerializationTable serialization_table[]; + static const ::google::protobuf::uint32 offsets[]; +}; +void AddDescriptors_flyteidl_2fplugins_2fcommon_2eproto(); +namespace flyteidl { +namespace plugins { +class RoleSpec; +class RoleSpecDefaultTypeInternal; +extern RoleSpecDefaultTypeInternal _RoleSpec_default_instance_; +} // namespace plugins +} // namespace flyteidl +namespace google { +namespace protobuf { +template<> ::flyteidl::plugins::RoleSpec* Arena::CreateMaybeMessage<::flyteidl::plugins::RoleSpec>(Arena*); +} // namespace protobuf +} // namespace google +namespace flyteidl { +namespace plugins { + +// =================================================================== + +class RoleSpec final : + public ::google::protobuf::Message /* @@protoc_insertion_point(class_definition:flyteidl.plugins.RoleSpec) */ { + public: + RoleSpec(); + virtual ~RoleSpec(); + + RoleSpec(const RoleSpec& from); + + inline RoleSpec& operator=(const RoleSpec& from) { + CopyFrom(from); + return *this; + } + #if LANG_CXX11 + RoleSpec(RoleSpec&& from) noexcept + : RoleSpec() { + *this = ::std::move(from); + } + + inline RoleSpec& operator=(RoleSpec&& from) noexcept { + if (GetArenaNoVirtual() == from.GetArenaNoVirtual()) { + if (this != &from) InternalSwap(&from); + } else { + CopyFrom(from); + } + return *this; + } + #endif + static const ::google::protobuf::Descriptor* descriptor() { + return default_instance().GetDescriptor(); + } + static const RoleSpec& default_instance(); + + enum PodValueCase { + kPod = 1, + POD_VALUE_NOT_SET = 0, + }; + + static void InitAsDefaultInstance(); // FOR INTERNAL USE ONLY + static inline const RoleSpec* internal_default_instance() { + return reinterpret_cast( + &_RoleSpec_default_instance_); + } + static constexpr int kIndexInFileMessages = + 0; + + void Swap(RoleSpec* other); + friend void swap(RoleSpec& a, RoleSpec& b) { + a.Swap(&b); + } + + // implements Message ---------------------------------------------- + + inline RoleSpec* New() const final { + return CreateMaybeMessage(nullptr); + } + + RoleSpec* New(::google::protobuf::Arena* arena) const final { + return CreateMaybeMessage(arena); + } + void CopyFrom(const ::google::protobuf::Message& from) final; + void MergeFrom(const ::google::protobuf::Message& from) final; + void CopyFrom(const RoleSpec& from); + void MergeFrom(const RoleSpec& from); + PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final; + bool IsInitialized() const final; + + size_t ByteSizeLong() const final; + #if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER + static const char* _InternalParse(const char* begin, const char* end, void* object, ::google::protobuf::internal::ParseContext* ctx); + ::google::protobuf::internal::ParseFunc _ParseFunc() const final { return _InternalParse; } + #else + bool MergePartialFromCodedStream( + ::google::protobuf::io::CodedInputStream* input) final; + #endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER + void SerializeWithCachedSizes( + ::google::protobuf::io::CodedOutputStream* output) const final; + ::google::protobuf::uint8* InternalSerializeWithCachedSizesToArray( + ::google::protobuf::uint8* target) const final; + int GetCachedSize() const final { return _cached_size_.Get(); } + + private: + void SharedCtor(); + void SharedDtor(); + void SetCachedSize(int size) const final; + void InternalSwap(RoleSpec* other); + private: + inline ::google::protobuf::Arena* GetArenaNoVirtual() const { + return nullptr; + } + inline void* MaybeArenaPtr() const { + return nullptr; + } + public: + + ::google::protobuf::Metadata GetMetadata() const final; + + // nested types ---------------------------------------------------- + + // accessors ------------------------------------------------------- + + // string pod_template_name = 2; + void clear_pod_template_name(); + static const int kPodTemplateNameFieldNumber = 2; + const ::std::string& pod_template_name() const; + void set_pod_template_name(const ::std::string& value); + #if LANG_CXX11 + void set_pod_template_name(::std::string&& value); + #endif + void set_pod_template_name(const char* value); + void set_pod_template_name(const char* value, size_t size); + ::std::string* mutable_pod_template_name(); + ::std::string* release_pod_template_name(); + void set_allocated_pod_template_name(::std::string* pod_template_name); + + // .flyteidl.core.K8sPod pod = 1; + bool has_pod() const; + void clear_pod(); + static const int kPodFieldNumber = 1; + const ::flyteidl::core::K8sPod& pod() const; + ::flyteidl::core::K8sPod* release_pod(); + ::flyteidl::core::K8sPod* mutable_pod(); + void set_allocated_pod(::flyteidl::core::K8sPod* pod); + + void clear_pod_value(); + PodValueCase pod_value_case() const; + // @@protoc_insertion_point(class_scope:flyteidl.plugins.RoleSpec) + private: + class HasBitSetters; + void set_has_pod(); + + inline bool has_pod_value() const; + inline void clear_has_pod_value(); + + ::google::protobuf::internal::InternalMetadataWithArena _internal_metadata_; + ::google::protobuf::internal::ArenaStringPtr pod_template_name_; + union PodValueUnion { + PodValueUnion() {} + ::flyteidl::core::K8sPod* pod_; + } pod_value_; + mutable ::google::protobuf::internal::CachedSize _cached_size_; + ::google::protobuf::uint32 _oneof_case_[1]; + + friend struct ::TableStruct_flyteidl_2fplugins_2fcommon_2eproto; +}; +// =================================================================== + + +// =================================================================== + +#ifdef __GNUC__ + #pragma GCC diagnostic push + #pragma GCC diagnostic ignored "-Wstrict-aliasing" +#endif // __GNUC__ +// RoleSpec + +// .flyteidl.core.K8sPod pod = 1; +inline bool RoleSpec::has_pod() const { + return pod_value_case() == kPod; +} +inline void RoleSpec::set_has_pod() { + _oneof_case_[0] = kPod; +} +inline ::flyteidl::core::K8sPod* RoleSpec::release_pod() { + // @@protoc_insertion_point(field_release:flyteidl.plugins.RoleSpec.pod) + if (has_pod()) { + clear_has_pod_value(); + ::flyteidl::core::K8sPod* temp = pod_value_.pod_; + pod_value_.pod_ = nullptr; + return temp; + } else { + return nullptr; + } +} +inline const ::flyteidl::core::K8sPod& RoleSpec::pod() const { + // @@protoc_insertion_point(field_get:flyteidl.plugins.RoleSpec.pod) + return has_pod() + ? *pod_value_.pod_ + : *reinterpret_cast< ::flyteidl::core::K8sPod*>(&::flyteidl::core::_K8sPod_default_instance_); +} +inline ::flyteidl::core::K8sPod* RoleSpec::mutable_pod() { + if (!has_pod()) { + clear_pod_value(); + set_has_pod(); + pod_value_.pod_ = CreateMaybeMessage< ::flyteidl::core::K8sPod >( + GetArenaNoVirtual()); + } + // @@protoc_insertion_point(field_mutable:flyteidl.plugins.RoleSpec.pod) + return pod_value_.pod_; +} + +// string pod_template_name = 2; +inline void RoleSpec::clear_pod_template_name() { + pod_template_name_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); +} +inline const ::std::string& RoleSpec::pod_template_name() const { + // @@protoc_insertion_point(field_get:flyteidl.plugins.RoleSpec.pod_template_name) + return pod_template_name_.GetNoArena(); +} +inline void RoleSpec::set_pod_template_name(const ::std::string& value) { + + pod_template_name_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), value); + // @@protoc_insertion_point(field_set:flyteidl.plugins.RoleSpec.pod_template_name) +} +#if LANG_CXX11 +inline void RoleSpec::set_pod_template_name(::std::string&& value) { + + pod_template_name_.SetNoArena( + &::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); + // @@protoc_insertion_point(field_set_rvalue:flyteidl.plugins.RoleSpec.pod_template_name) +} +#endif +inline void RoleSpec::set_pod_template_name(const char* value) { + GOOGLE_DCHECK(value != nullptr); + + pod_template_name_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); + // @@protoc_insertion_point(field_set_char:flyteidl.plugins.RoleSpec.pod_template_name) +} +inline void RoleSpec::set_pod_template_name(const char* value, size_t size) { + + pod_template_name_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), + ::std::string(reinterpret_cast(value), size)); + // @@protoc_insertion_point(field_set_pointer:flyteidl.plugins.RoleSpec.pod_template_name) +} +inline ::std::string* RoleSpec::mutable_pod_template_name() { + + // @@protoc_insertion_point(field_mutable:flyteidl.plugins.RoleSpec.pod_template_name) + return pod_template_name_.MutableNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); +} +inline ::std::string* RoleSpec::release_pod_template_name() { + // @@protoc_insertion_point(field_release:flyteidl.plugins.RoleSpec.pod_template_name) + + return pod_template_name_.ReleaseNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); +} +inline void RoleSpec::set_allocated_pod_template_name(::std::string* pod_template_name) { + if (pod_template_name != nullptr) { + + } else { + + } + pod_template_name_.SetAllocatedNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), pod_template_name); + // @@protoc_insertion_point(field_set_allocated:flyteidl.plugins.RoleSpec.pod_template_name) +} + +inline bool RoleSpec::has_pod_value() const { + return pod_value_case() != POD_VALUE_NOT_SET; +} +inline void RoleSpec::clear_has_pod_value() { + _oneof_case_[0] = POD_VALUE_NOT_SET; +} +inline RoleSpec::PodValueCase RoleSpec::pod_value_case() const { + return RoleSpec::PodValueCase(_oneof_case_[0]); +} +#ifdef __GNUC__ + #pragma GCC diagnostic pop +#endif // __GNUC__ + +// @@protoc_insertion_point(namespace_scope) + +} // namespace plugins +} // namespace flyteidl + +// @@protoc_insertion_point(global_scope) + +#include +#endif // PROTOBUF_INCLUDED_flyteidl_2fplugins_2fcommon_2eproto diff --git a/flyteidl/gen/pb-cpp/flyteidl/plugins/spark.pb.cc b/flyteidl/gen/pb-cpp/flyteidl/plugins/spark.pb.cc index dd81d750f9..9956e0d22a 100644 --- a/flyteidl/gen/pb-cpp/flyteidl/plugins/spark.pb.cc +++ b/flyteidl/gen/pb-cpp/flyteidl/plugins/spark.pb.cc @@ -16,7 +16,7 @@ // @@protoc_insertion_point(includes) #include -extern PROTOBUF_INTERNAL_EXPORT_flyteidl_2fcore_2ftasks_2eproto ::google::protobuf::internal::SCCInfo<3> scc_info_K8sPod_flyteidl_2fcore_2ftasks_2eproto; +extern PROTOBUF_INTERNAL_EXPORT_flyteidl_2fplugins_2fcommon_2eproto ::google::protobuf::internal::SCCInfo<1> scc_info_RoleSpec_flyteidl_2fplugins_2fcommon_2eproto; extern PROTOBUF_INTERNAL_EXPORT_flyteidl_2fplugins_2fspark_2eproto ::google::protobuf::internal::SCCInfo<0> scc_info_SparkJob_HadoopConfEntry_DoNotUse_flyteidl_2fplugins_2fspark_2eproto; extern PROTOBUF_INTERNAL_EXPORT_flyteidl_2fplugins_2fspark_2eproto ::google::protobuf::internal::SCCInfo<0> scc_info_SparkJob_SparkConfEntry_DoNotUse_flyteidl_2fplugins_2fspark_2eproto; extern PROTOBUF_INTERNAL_EXPORT_google_2fprotobuf_2fstruct_2eproto ::google::protobuf::internal::SCCInfo<0> scc_info_ListValue_google_2fprotobuf_2fstruct_2eproto; @@ -37,8 +37,6 @@ class SparkJob_HadoopConfEntry_DoNotUseDefaultTypeInternal { class SparkJobDefaultTypeInternal { public: ::google::protobuf::internal::ExplicitlyConstructed _instance; - const ::flyteidl::core::K8sPod* driverpod_; - const ::flyteidl::core::K8sPod* executorpod_; } _SparkJob_default_instance_; } // namespace plugins } // namespace flyteidl @@ -98,7 +96,7 @@ ::google::protobuf::internal::SCCInfo<4> scc_info_SparkJob_flyteidl_2fplugins_2f &scc_info_SparkJob_SparkConfEntry_DoNotUse_flyteidl_2fplugins_2fspark_2eproto.base, &scc_info_SparkJob_HadoopConfEntry_DoNotUse_flyteidl_2fplugins_2fspark_2eproto.base, &scc_info_ListValue_google_2fprotobuf_2fstruct_2eproto.base, - &scc_info_K8sPod_flyteidl_2fcore_2ftasks_2eproto.base,}}; + &scc_info_RoleSpec_flyteidl_2fplugins_2fcommon_2eproto.base,}}; void InitDefaults_flyteidl_2fplugins_2fspark_2eproto() { ::google::protobuf::internal::InitSCC(&scc_info_SparkApplication_flyteidl_2fplugins_2fspark_2eproto.base); @@ -138,7 +136,7 @@ const ::google::protobuf::uint32 TableStruct_flyteidl_2fplugins_2fspark_2eproto: ~0u, // no _has_bits_ PROTOBUF_FIELD_OFFSET(::flyteidl::plugins::SparkJob, _internal_metadata_), ~0u, // no _extensions_ - PROTOBUF_FIELD_OFFSET(::flyteidl::plugins::SparkJob, _oneof_case_[0]), + ~0u, // no _oneof_case_ ~0u, // no _weak_field_map_ PROTOBUF_FIELD_OFFSET(::flyteidl::plugins::SparkJob, applicationtype_), PROTOBUF_FIELD_OFFSET(::flyteidl::plugins::SparkJob, mainapplicationfile_), @@ -149,12 +147,8 @@ const ::google::protobuf::uint32 TableStruct_flyteidl_2fplugins_2fspark_2eproto: PROTOBUF_FIELD_OFFSET(::flyteidl::plugins::SparkJob, databricksconf_), PROTOBUF_FIELD_OFFSET(::flyteidl::plugins::SparkJob, databrickstoken_), PROTOBUF_FIELD_OFFSET(::flyteidl::plugins::SparkJob, databricksinstance_), - offsetof(::flyteidl::plugins::SparkJobDefaultTypeInternal, driverpod_), - PROTOBUF_FIELD_OFFSET(::flyteidl::plugins::SparkJob, driverpodtemplatename_), - offsetof(::flyteidl::plugins::SparkJobDefaultTypeInternal, executorpod_), - PROTOBUF_FIELD_OFFSET(::flyteidl::plugins::SparkJob, executorpodtemplatename_), - PROTOBUF_FIELD_OFFSET(::flyteidl::plugins::SparkJob, driverPodValue_), - PROTOBUF_FIELD_OFFSET(::flyteidl::plugins::SparkJob, executorPodValue_), + PROTOBUF_FIELD_OFFSET(::flyteidl::plugins::SparkJob, driverspec_), + PROTOBUF_FIELD_OFFSET(::flyteidl::plugins::SparkJob, executorspec_), }; static const ::google::protobuf::internal::MigrationSchema schemas[] PROTOBUF_SECTION_VARIABLE(protodesc_cold) = { { 0, -1, sizeof(::flyteidl::plugins::SparkApplication)}, @@ -178,39 +172,37 @@ ::google::protobuf::internal::AssignDescriptorsTable assign_descriptors_table_fl const char descriptor_table_protodef_flyteidl_2fplugins_2fspark_2eproto[] = "\n\034flyteidl/plugins/spark.proto\022\020flyteidl" - ".plugins\032\031flyteidl/core/tasks.proto\032\034goo" - "gle/protobuf/struct.proto\"B\n\020SparkApplic" - "ation\".\n\004Type\022\n\n\006PYTHON\020\000\022\010\n\004JAVA\020\001\022\t\n\005S" - "CALA\020\002\022\005\n\001R\020\003\"\233\005\n\010SparkJob\022@\n\017applicatio" - "nType\030\001 \001(\0162\'.flyteidl.plugins.SparkAppl" - "ication.Type\022\033\n\023mainApplicationFile\030\002 \001(" - "\t\022\021\n\tmainClass\030\003 \001(\t\022<\n\tsparkConf\030\004 \003(\0132" - ").flyteidl.plugins.SparkJob.SparkConfEnt" - "ry\022>\n\nhadoopConf\030\005 \003(\0132*.flyteidl.plugin" - "s.SparkJob.HadoopConfEntry\022\024\n\014executorPa" - "th\030\006 \001(\t\022/\n\016databricksConf\030\007 \001(\0132\027.googl" - "e.protobuf.Struct\022\027\n\017databricksToken\030\010 \001" - "(\t\022\032\n\022databricksInstance\030\t \001(\t\022*\n\tdriver" - "Pod\030\n \001(\0132\025.flyteidl.core.K8sPodH\000\022\035\n\025dr" - "iverPodTemplateName\030\013 \001(\t\022,\n\013executorPod" - "\030\014 \001(\0132\025.flyteidl.core.K8sPodH\001\022\037\n\027execu" - "torPodTemplateName\030\r \001(\t\0320\n\016SparkConfEnt" - "ry\022\013\n\003key\030\001 \001(\t\022\r\n\005value\030\002 \001(\t:\0028\001\0321\n\017Ha" - "doopConfEntry\022\013\n\003key\030\001 \001(\t\022\r\n\005value\030\002 \001(" - "\t:\0028\001B\020\n\016driverPodValueB\022\n\020executorPodVa" - "lueB\?Z=github.com/flyteorg/flyte/flyteid" - "l/gen/pb-go/flyteidl/pluginsb\006proto3" + ".plugins\032\035flyteidl/plugins/common.proto\032" + "\034google/protobuf/struct.proto\"B\n\020SparkAp" + "plication\".\n\004Type\022\n\n\006PYTHON\020\000\022\010\n\004JAVA\020\001\022" + "\t\n\005SCALA\020\002\022\005\n\001R\020\003\"\275\004\n\010SparkJob\022@\n\017applic" + "ationType\030\001 \001(\0162\'.flyteidl.plugins.Spark" + "Application.Type\022\033\n\023mainApplicationFile\030" + "\002 \001(\t\022\021\n\tmainClass\030\003 \001(\t\022<\n\tsparkConf\030\004 " + "\003(\0132).flyteidl.plugins.SparkJob.SparkCon" + "fEntry\022>\n\nhadoopConf\030\005 \003(\0132*.flyteidl.pl" + "ugins.SparkJob.HadoopConfEntry\022\024\n\014execut" + "orPath\030\006 \001(\t\022/\n\016databricksConf\030\007 \001(\0132\027.g" + "oogle.protobuf.Struct\022\027\n\017databricksToken" + "\030\010 \001(\t\022\032\n\022databricksInstance\030\t \001(\t\022.\n\ndr" + "iverSpec\030\n \001(\0132\032.flyteidl.plugins.RoleSp" + "ec\0220\n\014executorSpec\030\013 \001(\0132\032.flyteidl.plug" + "ins.RoleSpec\0320\n\016SparkConfEntry\022\013\n\003key\030\001 " + "\001(\t\022\r\n\005value\030\002 \001(\t:\0028\001\0321\n\017HadoopConfEntr" + "y\022\013\n\003key\030\001 \001(\t\022\r\n\005value\030\002 \001(\t:\0028\001B\?Z=git" + "hub.com/flyteorg/flyte/flyteidl/gen/pb-g" + "o/flyteidl/pluginsb\006proto3" ; ::google::protobuf::internal::DescriptorTable descriptor_table_flyteidl_2fplugins_2fspark_2eproto = { false, InitDefaults_flyteidl_2fplugins_2fspark_2eproto, descriptor_table_protodef_flyteidl_2fplugins_2fspark_2eproto, - "flyteidl/plugins/spark.proto", &assign_descriptors_table_flyteidl_2fplugins_2fspark_2eproto, 916, + "flyteidl/plugins/spark.proto", &assign_descriptors_table_flyteidl_2fplugins_2fspark_2eproto, 826, }; void AddDescriptors_flyteidl_2fplugins_2fspark_2eproto() { static constexpr ::google::protobuf::internal::InitFunc deps[2] = { - ::AddDescriptors_flyteidl_2fcore_2ftasks_2eproto, + ::AddDescriptors_flyteidl_2fplugins_2fcommon_2eproto, ::AddDescriptors_google_2fprotobuf_2fstruct_2eproto, }; ::google::protobuf::internal::AddDescriptors(&descriptor_table_flyteidl_2fplugins_2fspark_2eproto, deps, 2); @@ -542,29 +534,29 @@ bool SparkJob_HadoopConfEntry_DoNotUse::_ParseMap(const char* begin, const char* void SparkJob::InitAsDefaultInstance() { ::flyteidl::plugins::_SparkJob_default_instance_._instance.get_mutable()->databricksconf_ = const_cast< ::google::protobuf::Struct*>( ::google::protobuf::Struct::internal_default_instance()); - ::flyteidl::plugins::_SparkJob_default_instance_.driverpod_ = const_cast< ::flyteidl::core::K8sPod*>( - ::flyteidl::core::K8sPod::internal_default_instance()); - ::flyteidl::plugins::_SparkJob_default_instance_.executorpod_ = const_cast< ::flyteidl::core::K8sPod*>( - ::flyteidl::core::K8sPod::internal_default_instance()); + ::flyteidl::plugins::_SparkJob_default_instance_._instance.get_mutable()->driverspec_ = const_cast< ::flyteidl::plugins::RoleSpec*>( + ::flyteidl::plugins::RoleSpec::internal_default_instance()); + ::flyteidl::plugins::_SparkJob_default_instance_._instance.get_mutable()->executorspec_ = const_cast< ::flyteidl::plugins::RoleSpec*>( + ::flyteidl::plugins::RoleSpec::internal_default_instance()); } class SparkJob::HasBitSetters { public: static const ::google::protobuf::Struct& databricksconf(const SparkJob* msg); - static const ::flyteidl::core::K8sPod& driverpod(const SparkJob* msg); - static const ::flyteidl::core::K8sPod& executorpod(const SparkJob* msg); + static const ::flyteidl::plugins::RoleSpec& driverspec(const SparkJob* msg); + static const ::flyteidl::plugins::RoleSpec& executorspec(const SparkJob* msg); }; const ::google::protobuf::Struct& SparkJob::HasBitSetters::databricksconf(const SparkJob* msg) { return *msg->databricksconf_; } -const ::flyteidl::core::K8sPod& -SparkJob::HasBitSetters::driverpod(const SparkJob* msg) { - return *msg->driverPodValue_.driverpod_; +const ::flyteidl::plugins::RoleSpec& +SparkJob::HasBitSetters::driverspec(const SparkJob* msg) { + return *msg->driverspec_; } -const ::flyteidl::core::K8sPod& -SparkJob::HasBitSetters::executorpod(const SparkJob* msg) { - return *msg->executorPodValue_.executorpod_; +const ::flyteidl::plugins::RoleSpec& +SparkJob::HasBitSetters::executorspec(const SparkJob* msg) { + return *msg->executorspec_; } void SparkJob::clear_databricksconf() { if (GetArenaNoVirtual() == nullptr && databricksconf_ != nullptr) { @@ -572,45 +564,17 @@ void SparkJob::clear_databricksconf() { } databricksconf_ = nullptr; } -void SparkJob::set_allocated_driverpod(::flyteidl::core::K8sPod* driverpod) { - ::google::protobuf::Arena* message_arena = GetArenaNoVirtual(); - clear_driverPodValue(); - if (driverpod) { - ::google::protobuf::Arena* submessage_arena = nullptr; - if (message_arena != submessage_arena) { - driverpod = ::google::protobuf::internal::GetOwnedMessage( - message_arena, driverpod, submessage_arena); - } - set_has_driverpod(); - driverPodValue_.driverpod_ = driverpod; - } - // @@protoc_insertion_point(field_set_allocated:flyteidl.plugins.SparkJob.driverPod) -} -void SparkJob::clear_driverpod() { - if (has_driverpod()) { - delete driverPodValue_.driverpod_; - clear_has_driverPodValue(); +void SparkJob::clear_driverspec() { + if (GetArenaNoVirtual() == nullptr && driverspec_ != nullptr) { + delete driverspec_; } + driverspec_ = nullptr; } -void SparkJob::set_allocated_executorpod(::flyteidl::core::K8sPod* executorpod) { - ::google::protobuf::Arena* message_arena = GetArenaNoVirtual(); - clear_executorPodValue(); - if (executorpod) { - ::google::protobuf::Arena* submessage_arena = nullptr; - if (message_arena != submessage_arena) { - executorpod = ::google::protobuf::internal::GetOwnedMessage( - message_arena, executorpod, submessage_arena); - } - set_has_executorpod(); - executorPodValue_.executorpod_ = executorpod; - } - // @@protoc_insertion_point(field_set_allocated:flyteidl.plugins.SparkJob.executorPod) -} -void SparkJob::clear_executorpod() { - if (has_executorpod()) { - delete executorPodValue_.executorpod_; - clear_has_executorPodValue(); +void SparkJob::clear_executorspec() { + if (GetArenaNoVirtual() == nullptr && executorspec_ != nullptr) { + delete executorspec_; } + executorspec_ = nullptr; } #if !defined(_MSC_VER) || _MSC_VER >= 1900 const int SparkJob::kApplicationTypeFieldNumber; @@ -622,10 +586,8 @@ const int SparkJob::kExecutorPathFieldNumber; const int SparkJob::kDatabricksConfFieldNumber; const int SparkJob::kDatabricksTokenFieldNumber; const int SparkJob::kDatabricksInstanceFieldNumber; -const int SparkJob::kDriverPodFieldNumber; -const int SparkJob::kDriverPodTemplateNameFieldNumber; -const int SparkJob::kExecutorPodFieldNumber; -const int SparkJob::kExecutorPodTemplateNameFieldNumber; +const int SparkJob::kDriverSpecFieldNumber; +const int SparkJob::kExecutorSpecFieldNumber; #endif // !defined(_MSC_VER) || _MSC_VER >= 1900 SparkJob::SparkJob() @@ -659,40 +621,22 @@ SparkJob::SparkJob(const SparkJob& from) if (from.databricksinstance().size() > 0) { databricksinstance_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.databricksinstance_); } - driverpodtemplatename_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); - if (from.driverpodtemplatename().size() > 0) { - driverpodtemplatename_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.driverpodtemplatename_); - } - executorpodtemplatename_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); - if (from.executorpodtemplatename().size() > 0) { - executorpodtemplatename_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.executorpodtemplatename_); - } if (from.has_databricksconf()) { databricksconf_ = new ::google::protobuf::Struct(*from.databricksconf_); } else { databricksconf_ = nullptr; } - applicationtype_ = from.applicationtype_; - clear_has_driverPodValue(); - switch (from.driverPodValue_case()) { - case kDriverPod: { - mutable_driverpod()->::flyteidl::core::K8sPod::MergeFrom(from.driverpod()); - break; - } - case DRIVERPODVALUE_NOT_SET: { - break; - } + if (from.has_driverspec()) { + driverspec_ = new ::flyteidl::plugins::RoleSpec(*from.driverspec_); + } else { + driverspec_ = nullptr; } - clear_has_executorPodValue(); - switch (from.executorPodValue_case()) { - case kExecutorPod: { - mutable_executorpod()->::flyteidl::core::K8sPod::MergeFrom(from.executorpod()); - break; - } - case EXECUTORPODVALUE_NOT_SET: { - break; - } + if (from.has_executorspec()) { + executorspec_ = new ::flyteidl::plugins::RoleSpec(*from.executorspec_); + } else { + executorspec_ = nullptr; } + applicationtype_ = from.applicationtype_; // @@protoc_insertion_point(copy_constructor:flyteidl.plugins.SparkJob) } @@ -704,13 +648,9 @@ void SparkJob::SharedCtor() { executorpath_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); databrickstoken_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); databricksinstance_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); - driverpodtemplatename_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); - executorpodtemplatename_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); ::memset(&databricksconf_, 0, static_cast( reinterpret_cast(&applicationtype_) - reinterpret_cast(&databricksconf_)) + sizeof(applicationtype_)); - clear_has_driverPodValue(); - clear_has_executorPodValue(); } SparkJob::~SparkJob() { @@ -724,15 +664,9 @@ void SparkJob::SharedDtor() { executorpath_.DestroyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); databrickstoken_.DestroyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); databricksinstance_.DestroyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); - driverpodtemplatename_.DestroyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); - executorpodtemplatename_.DestroyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); if (this != internal_default_instance()) delete databricksconf_; - if (has_driverPodValue()) { - clear_driverPodValue(); - } - if (has_executorPodValue()) { - clear_executorPodValue(); - } + if (this != internal_default_instance()) delete driverspec_; + if (this != internal_default_instance()) delete executorspec_; } void SparkJob::SetCachedSize(int size) const { @@ -744,35 +678,6 @@ const SparkJob& SparkJob::default_instance() { } -void SparkJob::clear_driverPodValue() { -// @@protoc_insertion_point(one_of_clear_start:flyteidl.plugins.SparkJob) - switch (driverPodValue_case()) { - case kDriverPod: { - delete driverPodValue_.driverpod_; - break; - } - case DRIVERPODVALUE_NOT_SET: { - break; - } - } - _oneof_case_[0] = DRIVERPODVALUE_NOT_SET; -} - -void SparkJob::clear_executorPodValue() { -// @@protoc_insertion_point(one_of_clear_start:flyteidl.plugins.SparkJob) - switch (executorPodValue_case()) { - case kExecutorPod: { - delete executorPodValue_.executorpod_; - break; - } - case EXECUTORPODVALUE_NOT_SET: { - break; - } - } - _oneof_case_[1] = EXECUTORPODVALUE_NOT_SET; -} - - void SparkJob::Clear() { // @@protoc_insertion_point(message_clear_start:flyteidl.plugins.SparkJob) ::google::protobuf::uint32 cached_has_bits = 0; @@ -786,15 +691,19 @@ void SparkJob::Clear() { executorpath_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); databrickstoken_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); databricksinstance_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); - driverpodtemplatename_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); - executorpodtemplatename_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); if (GetArenaNoVirtual() == nullptr && databricksconf_ != nullptr) { delete databricksconf_; } databricksconf_ = nullptr; + if (GetArenaNoVirtual() == nullptr && driverspec_ != nullptr) { + delete driverspec_; + } + driverspec_ = nullptr; + if (GetArenaNoVirtual() == nullptr && executorspec_ != nullptr) { + delete executorspec_; + } + executorspec_ = nullptr; applicationtype_ = 0; - clear_driverPodValue(); - clear_executorPodValue(); _internal_metadata_.Clear(); } @@ -950,64 +859,32 @@ const char* SparkJob::_InternalParse(const char* begin, const char* end, void* o ptr += size; break; } - // .flyteidl.core.K8sPod driverPod = 10; + // .flyteidl.plugins.RoleSpec driverSpec = 10; case 10: { if (static_cast<::google::protobuf::uint8>(tag) != 82) goto handle_unusual; ptr = ::google::protobuf::io::ReadSize(ptr, &size); GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); - parser_till_end = ::flyteidl::core::K8sPod::_InternalParse; - object = msg->mutable_driverpod(); + parser_till_end = ::flyteidl::plugins::RoleSpec::_InternalParse; + object = msg->mutable_driverspec(); if (size > end - ptr) goto len_delim_till_end; ptr += size; GOOGLE_PROTOBUF_PARSER_ASSERT(ctx->ParseExactRange( {parser_till_end, object}, ptr - size, ptr)); break; } - // string driverPodTemplateName = 11; + // .flyteidl.plugins.RoleSpec executorSpec = 11; case 11: { if (static_cast<::google::protobuf::uint8>(tag) != 90) goto handle_unusual; ptr = ::google::protobuf::io::ReadSize(ptr, &size); GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); - ctx->extra_parse_data().SetFieldName("flyteidl.plugins.SparkJob.driverPodTemplateName"); - object = msg->mutable_driverpodtemplatename(); - if (size > end - ptr + ::google::protobuf::internal::ParseContext::kSlopBytes) { - parser_till_end = ::google::protobuf::internal::GreedyStringParserUTF8; - goto string_till_end; - } - GOOGLE_PROTOBUF_PARSER_ASSERT(::google::protobuf::internal::StringCheckUTF8(ptr, size, ctx)); - ::google::protobuf::internal::InlineGreedyStringParser(object, ptr, size, ctx); - ptr += size; - break; - } - // .flyteidl.core.K8sPod executorPod = 12; - case 12: { - if (static_cast<::google::protobuf::uint8>(tag) != 98) goto handle_unusual; - ptr = ::google::protobuf::io::ReadSize(ptr, &size); - GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); - parser_till_end = ::flyteidl::core::K8sPod::_InternalParse; - object = msg->mutable_executorpod(); + parser_till_end = ::flyteidl::plugins::RoleSpec::_InternalParse; + object = msg->mutable_executorspec(); if (size > end - ptr) goto len_delim_till_end; ptr += size; GOOGLE_PROTOBUF_PARSER_ASSERT(ctx->ParseExactRange( {parser_till_end, object}, ptr - size, ptr)); break; } - // string executorPodTemplateName = 13; - case 13: { - if (static_cast<::google::protobuf::uint8>(tag) != 106) goto handle_unusual; - ptr = ::google::protobuf::io::ReadSize(ptr, &size); - GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); - ctx->extra_parse_data().SetFieldName("flyteidl.plugins.SparkJob.executorPodTemplateName"); - object = msg->mutable_executorpodtemplatename(); - if (size > end - ptr + ::google::protobuf::internal::ParseContext::kSlopBytes) { - parser_till_end = ::google::protobuf::internal::GreedyStringParserUTF8; - goto string_till_end; - } - GOOGLE_PROTOBUF_PARSER_ASSERT(::google::protobuf::internal::StringCheckUTF8(ptr, size, ctx)); - ::google::protobuf::internal::InlineGreedyStringParser(object, ptr, size, ctx); - ptr += size; - break; - } default: { handle_unusual: if ((tag & 7) == 4 || tag == 0) { @@ -1194,52 +1071,22 @@ bool SparkJob::MergePartialFromCodedStream( break; } - // .flyteidl.core.K8sPod driverPod = 10; + // .flyteidl.plugins.RoleSpec driverSpec = 10; case 10: { if (static_cast< ::google::protobuf::uint8>(tag) == (82 & 0xFF)) { DO_(::google::protobuf::internal::WireFormatLite::ReadMessage( - input, mutable_driverpod())); + input, mutable_driverspec())); } else { goto handle_unusual; } break; } - // string driverPodTemplateName = 11; + // .flyteidl.plugins.RoleSpec executorSpec = 11; case 11: { if (static_cast< ::google::protobuf::uint8>(tag) == (90 & 0xFF)) { - DO_(::google::protobuf::internal::WireFormatLite::ReadString( - input, this->mutable_driverpodtemplatename())); - DO_(::google::protobuf::internal::WireFormatLite::VerifyUtf8String( - this->driverpodtemplatename().data(), static_cast(this->driverpodtemplatename().length()), - ::google::protobuf::internal::WireFormatLite::PARSE, - "flyteidl.plugins.SparkJob.driverPodTemplateName")); - } else { - goto handle_unusual; - } - break; - } - - // .flyteidl.core.K8sPod executorPod = 12; - case 12: { - if (static_cast< ::google::protobuf::uint8>(tag) == (98 & 0xFF)) { DO_(::google::protobuf::internal::WireFormatLite::ReadMessage( - input, mutable_executorpod())); - } else { - goto handle_unusual; - } - break; - } - - // string executorPodTemplateName = 13; - case 13: { - if (static_cast< ::google::protobuf::uint8>(tag) == (106 & 0xFF)) { - DO_(::google::protobuf::internal::WireFormatLite::ReadString( - input, this->mutable_executorpodtemplatename())); - DO_(::google::protobuf::internal::WireFormatLite::VerifyUtf8String( - this->executorpodtemplatename().data(), static_cast(this->executorpodtemplatename().length()), - ::google::protobuf::internal::WireFormatLite::PARSE, - "flyteidl.plugins.SparkJob.executorPodTemplateName")); + input, mutable_executorspec())); } else { goto handle_unusual; } @@ -1433,36 +1280,16 @@ void SparkJob::SerializeWithCachedSizes( 9, this->databricksinstance(), output); } - // .flyteidl.core.K8sPod driverPod = 10; - if (has_driverpod()) { + // .flyteidl.plugins.RoleSpec driverSpec = 10; + if (this->has_driverspec()) { ::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray( - 10, HasBitSetters::driverpod(this), output); - } - - // string driverPodTemplateName = 11; - if (this->driverpodtemplatename().size() > 0) { - ::google::protobuf::internal::WireFormatLite::VerifyUtf8String( - this->driverpodtemplatename().data(), static_cast(this->driverpodtemplatename().length()), - ::google::protobuf::internal::WireFormatLite::SERIALIZE, - "flyteidl.plugins.SparkJob.driverPodTemplateName"); - ::google::protobuf::internal::WireFormatLite::WriteStringMaybeAliased( - 11, this->driverpodtemplatename(), output); + 10, HasBitSetters::driverspec(this), output); } - // .flyteidl.core.K8sPod executorPod = 12; - if (has_executorpod()) { + // .flyteidl.plugins.RoleSpec executorSpec = 11; + if (this->has_executorspec()) { ::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray( - 12, HasBitSetters::executorpod(this), output); - } - - // string executorPodTemplateName = 13; - if (this->executorpodtemplatename().size() > 0) { - ::google::protobuf::internal::WireFormatLite::VerifyUtf8String( - this->executorpodtemplatename().data(), static_cast(this->executorpodtemplatename().length()), - ::google::protobuf::internal::WireFormatLite::SERIALIZE, - "flyteidl.plugins.SparkJob.executorPodTemplateName"); - ::google::protobuf::internal::WireFormatLite::WriteStringMaybeAliased( - 13, this->executorpodtemplatename(), output); + 11, HasBitSetters::executorspec(this), output); } if (_internal_metadata_.have_unknown_fields()) { @@ -1644,40 +1471,18 @@ ::google::protobuf::uint8* SparkJob::InternalSerializeWithCachedSizesToArray( 9, this->databricksinstance(), target); } - // .flyteidl.core.K8sPod driverPod = 10; - if (has_driverpod()) { + // .flyteidl.plugins.RoleSpec driverSpec = 10; + if (this->has_driverspec()) { target = ::google::protobuf::internal::WireFormatLite:: InternalWriteMessageToArray( - 10, HasBitSetters::driverpod(this), target); + 10, HasBitSetters::driverspec(this), target); } - // string driverPodTemplateName = 11; - if (this->driverpodtemplatename().size() > 0) { - ::google::protobuf::internal::WireFormatLite::VerifyUtf8String( - this->driverpodtemplatename().data(), static_cast(this->driverpodtemplatename().length()), - ::google::protobuf::internal::WireFormatLite::SERIALIZE, - "flyteidl.plugins.SparkJob.driverPodTemplateName"); - target = - ::google::protobuf::internal::WireFormatLite::WriteStringToArray( - 11, this->driverpodtemplatename(), target); - } - - // .flyteidl.core.K8sPod executorPod = 12; - if (has_executorpod()) { + // .flyteidl.plugins.RoleSpec executorSpec = 11; + if (this->has_executorspec()) { target = ::google::protobuf::internal::WireFormatLite:: InternalWriteMessageToArray( - 12, HasBitSetters::executorpod(this), target); - } - - // string executorPodTemplateName = 13; - if (this->executorpodtemplatename().size() > 0) { - ::google::protobuf::internal::WireFormatLite::VerifyUtf8String( - this->executorpodtemplatename().data(), static_cast(this->executorpodtemplatename().length()), - ::google::protobuf::internal::WireFormatLite::SERIALIZE, - "flyteidl.plugins.SparkJob.executorPodTemplateName"); - target = - ::google::protobuf::internal::WireFormatLite::WriteStringToArray( - 13, this->executorpodtemplatename(), target); + 11, HasBitSetters::executorspec(this), target); } if (_internal_metadata_.have_unknown_fields()) { @@ -1764,25 +1569,25 @@ size_t SparkJob::ByteSizeLong() const { this->databricksinstance()); } - // string driverPodTemplateName = 11; - if (this->driverpodtemplatename().size() > 0) { + // .google.protobuf.Struct databricksConf = 7; + if (this->has_databricksconf()) { total_size += 1 + - ::google::protobuf::internal::WireFormatLite::StringSize( - this->driverpodtemplatename()); + ::google::protobuf::internal::WireFormatLite::MessageSize( + *databricksconf_); } - // string executorPodTemplateName = 13; - if (this->executorpodtemplatename().size() > 0) { + // .flyteidl.plugins.RoleSpec driverSpec = 10; + if (this->has_driverspec()) { total_size += 1 + - ::google::protobuf::internal::WireFormatLite::StringSize( - this->executorpodtemplatename()); + ::google::protobuf::internal::WireFormatLite::MessageSize( + *driverspec_); } - // .google.protobuf.Struct databricksConf = 7; - if (this->has_databricksconf()) { + // .flyteidl.plugins.RoleSpec executorSpec = 11; + if (this->has_executorspec()) { total_size += 1 + ::google::protobuf::internal::WireFormatLite::MessageSize( - *databricksconf_); + *executorspec_); } // .flyteidl.plugins.SparkApplication.Type applicationType = 1; @@ -1791,30 +1596,6 @@ size_t SparkJob::ByteSizeLong() const { ::google::protobuf::internal::WireFormatLite::EnumSize(this->applicationtype()); } - switch (driverPodValue_case()) { - // .flyteidl.core.K8sPod driverPod = 10; - case kDriverPod: { - total_size += 1 + - ::google::protobuf::internal::WireFormatLite::MessageSize( - *driverPodValue_.driverpod_); - break; - } - case DRIVERPODVALUE_NOT_SET: { - break; - } - } - switch (executorPodValue_case()) { - // .flyteidl.core.K8sPod executorPod = 12; - case kExecutorPod: { - total_size += 1 + - ::google::protobuf::internal::WireFormatLite::MessageSize( - *executorPodValue_.executorpod_); - break; - } - case EXECUTORPODVALUE_NOT_SET: { - break; - } - } int cached_size = ::google::protobuf::internal::ToCachedSize(total_size); SetCachedSize(cached_size); return total_size; @@ -1864,37 +1645,17 @@ void SparkJob::MergeFrom(const SparkJob& from) { databricksinstance_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.databricksinstance_); } - if (from.driverpodtemplatename().size() > 0) { - - driverpodtemplatename_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.driverpodtemplatename_); - } - if (from.executorpodtemplatename().size() > 0) { - - executorpodtemplatename_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.executorpodtemplatename_); - } if (from.has_databricksconf()) { mutable_databricksconf()->::google::protobuf::Struct::MergeFrom(from.databricksconf()); } - if (from.applicationtype() != 0) { - set_applicationtype(from.applicationtype()); + if (from.has_driverspec()) { + mutable_driverspec()->::flyteidl::plugins::RoleSpec::MergeFrom(from.driverspec()); } - switch (from.driverPodValue_case()) { - case kDriverPod: { - mutable_driverpod()->::flyteidl::core::K8sPod::MergeFrom(from.driverpod()); - break; - } - case DRIVERPODVALUE_NOT_SET: { - break; - } + if (from.has_executorspec()) { + mutable_executorspec()->::flyteidl::plugins::RoleSpec::MergeFrom(from.executorspec()); } - switch (from.executorPodValue_case()) { - case kExecutorPod: { - mutable_executorpod()->::flyteidl::core::K8sPod::MergeFrom(from.executorpod()); - break; - } - case EXECUTORPODVALUE_NOT_SET: { - break; - } + if (from.applicationtype() != 0) { + set_applicationtype(from.applicationtype()); } } @@ -1935,16 +1696,10 @@ void SparkJob::InternalSwap(SparkJob* other) { GetArenaNoVirtual()); databricksinstance_.Swap(&other->databricksinstance_, &::google::protobuf::internal::GetEmptyStringAlreadyInited(), GetArenaNoVirtual()); - driverpodtemplatename_.Swap(&other->driverpodtemplatename_, &::google::protobuf::internal::GetEmptyStringAlreadyInited(), - GetArenaNoVirtual()); - executorpodtemplatename_.Swap(&other->executorpodtemplatename_, &::google::protobuf::internal::GetEmptyStringAlreadyInited(), - GetArenaNoVirtual()); swap(databricksconf_, other->databricksconf_); + swap(driverspec_, other->driverspec_); + swap(executorspec_, other->executorspec_); swap(applicationtype_, other->applicationtype_); - swap(driverPodValue_, other->driverPodValue_); - swap(executorPodValue_, other->executorPodValue_); - swap(_oneof_case_[0], other->_oneof_case_[0]); - swap(_oneof_case_[1], other->_oneof_case_[1]); } ::google::protobuf::Metadata SparkJob::GetMetadata() const { diff --git a/flyteidl/gen/pb-cpp/flyteidl/plugins/spark.pb.h b/flyteidl/gen/pb-cpp/flyteidl/plugins/spark.pb.h index c6ccc2b036..36126355a4 100644 --- a/flyteidl/gen/pb-cpp/flyteidl/plugins/spark.pb.h +++ b/flyteidl/gen/pb-cpp/flyteidl/plugins/spark.pb.h @@ -35,7 +35,7 @@ #include #include #include -#include "flyteidl/core/tasks.pb.h" +#include "flyteidl/plugins/common.pb.h" #include // @@protoc_insertion_point(includes) #include @@ -321,16 +321,6 @@ class SparkJob final : } static const SparkJob& default_instance(); - enum DriverPodValueCase { - kDriverPod = 10, - DRIVERPODVALUE_NOT_SET = 0, - }; - - enum ExecutorPodValueCase { - kExecutorPod = 12, - EXECUTORPODVALUE_NOT_SET = 0, - }; - static void InitAsDefaultInstance(); // FOR INTERNAL USE ONLY static inline const SparkJob* internal_default_instance() { return reinterpret_cast( @@ -483,34 +473,6 @@ class SparkJob final : ::std::string* release_databricksinstance(); void set_allocated_databricksinstance(::std::string* databricksinstance); - // string driverPodTemplateName = 11; - void clear_driverpodtemplatename(); - static const int kDriverPodTemplateNameFieldNumber = 11; - const ::std::string& driverpodtemplatename() const; - void set_driverpodtemplatename(const ::std::string& value); - #if LANG_CXX11 - void set_driverpodtemplatename(::std::string&& value); - #endif - void set_driverpodtemplatename(const char* value); - void set_driverpodtemplatename(const char* value, size_t size); - ::std::string* mutable_driverpodtemplatename(); - ::std::string* release_driverpodtemplatename(); - void set_allocated_driverpodtemplatename(::std::string* driverpodtemplatename); - - // string executorPodTemplateName = 13; - void clear_executorpodtemplatename(); - static const int kExecutorPodTemplateNameFieldNumber = 13; - const ::std::string& executorpodtemplatename() const; - void set_executorpodtemplatename(const ::std::string& value); - #if LANG_CXX11 - void set_executorpodtemplatename(::std::string&& value); - #endif - void set_executorpodtemplatename(const char* value); - void set_executorpodtemplatename(const char* value, size_t size); - ::std::string* mutable_executorpodtemplatename(); - ::std::string* release_executorpodtemplatename(); - void set_allocated_executorpodtemplatename(::std::string* executorpodtemplatename); - // .google.protobuf.Struct databricksConf = 7; bool has_databricksconf() const; void clear_databricksconf(); @@ -520,45 +482,33 @@ class SparkJob final : ::google::protobuf::Struct* mutable_databricksconf(); void set_allocated_databricksconf(::google::protobuf::Struct* databricksconf); + // .flyteidl.plugins.RoleSpec driverSpec = 10; + bool has_driverspec() const; + void clear_driverspec(); + static const int kDriverSpecFieldNumber = 10; + const ::flyteidl::plugins::RoleSpec& driverspec() const; + ::flyteidl::plugins::RoleSpec* release_driverspec(); + ::flyteidl::plugins::RoleSpec* mutable_driverspec(); + void set_allocated_driverspec(::flyteidl::plugins::RoleSpec* driverspec); + + // .flyteidl.plugins.RoleSpec executorSpec = 11; + bool has_executorspec() const; + void clear_executorspec(); + static const int kExecutorSpecFieldNumber = 11; + const ::flyteidl::plugins::RoleSpec& executorspec() const; + ::flyteidl::plugins::RoleSpec* release_executorspec(); + ::flyteidl::plugins::RoleSpec* mutable_executorspec(); + void set_allocated_executorspec(::flyteidl::plugins::RoleSpec* executorspec); + // .flyteidl.plugins.SparkApplication.Type applicationType = 1; void clear_applicationtype(); static const int kApplicationTypeFieldNumber = 1; ::flyteidl::plugins::SparkApplication_Type applicationtype() const; void set_applicationtype(::flyteidl::plugins::SparkApplication_Type value); - // .flyteidl.core.K8sPod driverPod = 10; - bool has_driverpod() const; - void clear_driverpod(); - static const int kDriverPodFieldNumber = 10; - const ::flyteidl::core::K8sPod& driverpod() const; - ::flyteidl::core::K8sPod* release_driverpod(); - ::flyteidl::core::K8sPod* mutable_driverpod(); - void set_allocated_driverpod(::flyteidl::core::K8sPod* driverpod); - - // .flyteidl.core.K8sPod executorPod = 12; - bool has_executorpod() const; - void clear_executorpod(); - static const int kExecutorPodFieldNumber = 12; - const ::flyteidl::core::K8sPod& executorpod() const; - ::flyteidl::core::K8sPod* release_executorpod(); - ::flyteidl::core::K8sPod* mutable_executorpod(); - void set_allocated_executorpod(::flyteidl::core::K8sPod* executorpod); - - void clear_driverPodValue(); - DriverPodValueCase driverPodValue_case() const; - void clear_executorPodValue(); - ExecutorPodValueCase executorPodValue_case() const; // @@protoc_insertion_point(class_scope:flyteidl.plugins.SparkJob) private: class HasBitSetters; - void set_has_driverpod(); - void set_has_executorpod(); - - inline bool has_driverPodValue() const; - inline void clear_has_driverPodValue(); - - inline bool has_executorPodValue() const; - inline void clear_has_executorPodValue(); ::google::protobuf::internal::InternalMetadataWithArena _internal_metadata_; ::google::protobuf::internal::MapField< @@ -578,21 +528,11 @@ class SparkJob final : ::google::protobuf::internal::ArenaStringPtr executorpath_; ::google::protobuf::internal::ArenaStringPtr databrickstoken_; ::google::protobuf::internal::ArenaStringPtr databricksinstance_; - ::google::protobuf::internal::ArenaStringPtr driverpodtemplatename_; - ::google::protobuf::internal::ArenaStringPtr executorpodtemplatename_; ::google::protobuf::Struct* databricksconf_; + ::flyteidl::plugins::RoleSpec* driverspec_; + ::flyteidl::plugins::RoleSpec* executorspec_; int applicationtype_; - union DriverPodValueUnion { - DriverPodValueUnion() {} - ::flyteidl::core::K8sPod* driverpod_; - } driverPodValue_; - union ExecutorPodValueUnion { - ExecutorPodValueUnion() {} - ::flyteidl::core::K8sPod* executorpod_; - } executorPodValue_; mutable ::google::protobuf::internal::CachedSize _cached_size_; - ::google::protobuf::uint32 _oneof_case_[2]; - friend struct ::TableStruct_flyteidl_2fplugins_2fspark_2eproto; }; // =================================================================== @@ -975,200 +915,96 @@ inline void SparkJob::set_allocated_databricksinstance(::std::string* databricks // @@protoc_insertion_point(field_set_allocated:flyteidl.plugins.SparkJob.databricksInstance) } -// .flyteidl.core.K8sPod driverPod = 10; -inline bool SparkJob::has_driverpod() const { - return driverPodValue_case() == kDriverPod; -} -inline void SparkJob::set_has_driverpod() { - _oneof_case_[0] = kDriverPod; -} -inline ::flyteidl::core::K8sPod* SparkJob::release_driverpod() { - // @@protoc_insertion_point(field_release:flyteidl.plugins.SparkJob.driverPod) - if (has_driverpod()) { - clear_has_driverPodValue(); - ::flyteidl::core::K8sPod* temp = driverPodValue_.driverpod_; - driverPodValue_.driverpod_ = nullptr; - return temp; - } else { - return nullptr; - } -} -inline const ::flyteidl::core::K8sPod& SparkJob::driverpod() const { - // @@protoc_insertion_point(field_get:flyteidl.plugins.SparkJob.driverPod) - return has_driverpod() - ? *driverPodValue_.driverpod_ - : *reinterpret_cast< ::flyteidl::core::K8sPod*>(&::flyteidl::core::_K8sPod_default_instance_); -} -inline ::flyteidl::core::K8sPod* SparkJob::mutable_driverpod() { - if (!has_driverpod()) { - clear_driverPodValue(); - set_has_driverpod(); - driverPodValue_.driverpod_ = CreateMaybeMessage< ::flyteidl::core::K8sPod >( - GetArenaNoVirtual()); - } - // @@protoc_insertion_point(field_mutable:flyteidl.plugins.SparkJob.driverPod) - return driverPodValue_.driverpod_; -} - -// string driverPodTemplateName = 11; -inline void SparkJob::clear_driverpodtemplatename() { - driverpodtemplatename_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); -} -inline const ::std::string& SparkJob::driverpodtemplatename() const { - // @@protoc_insertion_point(field_get:flyteidl.plugins.SparkJob.driverPodTemplateName) - return driverpodtemplatename_.GetNoArena(); +// .flyteidl.plugins.RoleSpec driverSpec = 10; +inline bool SparkJob::has_driverspec() const { + return this != internal_default_instance() && driverspec_ != nullptr; } -inline void SparkJob::set_driverpodtemplatename(const ::std::string& value) { - - driverpodtemplatename_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), value); - // @@protoc_insertion_point(field_set:flyteidl.plugins.SparkJob.driverPodTemplateName) -} -#if LANG_CXX11 -inline void SparkJob::set_driverpodtemplatename(::std::string&& value) { - - driverpodtemplatename_.SetNoArena( - &::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); - // @@protoc_insertion_point(field_set_rvalue:flyteidl.plugins.SparkJob.driverPodTemplateName) +inline const ::flyteidl::plugins::RoleSpec& SparkJob::driverspec() const { + const ::flyteidl::plugins::RoleSpec* p = driverspec_; + // @@protoc_insertion_point(field_get:flyteidl.plugins.SparkJob.driverSpec) + return p != nullptr ? *p : *reinterpret_cast( + &::flyteidl::plugins::_RoleSpec_default_instance_); } -#endif -inline void SparkJob::set_driverpodtemplatename(const char* value) { - GOOGLE_DCHECK(value != nullptr); +inline ::flyteidl::plugins::RoleSpec* SparkJob::release_driverspec() { + // @@protoc_insertion_point(field_release:flyteidl.plugins.SparkJob.driverSpec) - driverpodtemplatename_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); - // @@protoc_insertion_point(field_set_char:flyteidl.plugins.SparkJob.driverPodTemplateName) -} -inline void SparkJob::set_driverpodtemplatename(const char* value, size_t size) { - - driverpodtemplatename_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), - ::std::string(reinterpret_cast(value), size)); - // @@protoc_insertion_point(field_set_pointer:flyteidl.plugins.SparkJob.driverPodTemplateName) -} -inline ::std::string* SparkJob::mutable_driverpodtemplatename() { - - // @@protoc_insertion_point(field_mutable:flyteidl.plugins.SparkJob.driverPodTemplateName) - return driverpodtemplatename_.MutableNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + ::flyteidl::plugins::RoleSpec* temp = driverspec_; + driverspec_ = nullptr; + return temp; } -inline ::std::string* SparkJob::release_driverpodtemplatename() { - // @@protoc_insertion_point(field_release:flyteidl.plugins.SparkJob.driverPodTemplateName) +inline ::flyteidl::plugins::RoleSpec* SparkJob::mutable_driverspec() { - return driverpodtemplatename_.ReleaseNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + if (driverspec_ == nullptr) { + auto* p = CreateMaybeMessage<::flyteidl::plugins::RoleSpec>(GetArenaNoVirtual()); + driverspec_ = p; + } + // @@protoc_insertion_point(field_mutable:flyteidl.plugins.SparkJob.driverSpec) + return driverspec_; } -inline void SparkJob::set_allocated_driverpodtemplatename(::std::string* driverpodtemplatename) { - if (driverpodtemplatename != nullptr) { +inline void SparkJob::set_allocated_driverspec(::flyteidl::plugins::RoleSpec* driverspec) { + ::google::protobuf::Arena* message_arena = GetArenaNoVirtual(); + if (message_arena == nullptr) { + delete reinterpret_cast< ::google::protobuf::MessageLite*>(driverspec_); + } + if (driverspec) { + ::google::protobuf::Arena* submessage_arena = nullptr; + if (message_arena != submessage_arena) { + driverspec = ::google::protobuf::internal::GetOwnedMessage( + message_arena, driverspec, submessage_arena); + } } else { } - driverpodtemplatename_.SetAllocatedNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), driverpodtemplatename); - // @@protoc_insertion_point(field_set_allocated:flyteidl.plugins.SparkJob.driverPodTemplateName) + driverspec_ = driverspec; + // @@protoc_insertion_point(field_set_allocated:flyteidl.plugins.SparkJob.driverSpec) } -// .flyteidl.core.K8sPod executorPod = 12; -inline bool SparkJob::has_executorpod() const { - return executorPodValue_case() == kExecutorPod; -} -inline void SparkJob::set_has_executorpod() { - _oneof_case_[1] = kExecutorPod; -} -inline ::flyteidl::core::K8sPod* SparkJob::release_executorpod() { - // @@protoc_insertion_point(field_release:flyteidl.plugins.SparkJob.executorPod) - if (has_executorpod()) { - clear_has_executorPodValue(); - ::flyteidl::core::K8sPod* temp = executorPodValue_.executorpod_; - executorPodValue_.executorpod_ = nullptr; - return temp; - } else { - return nullptr; - } +// .flyteidl.plugins.RoleSpec executorSpec = 11; +inline bool SparkJob::has_executorspec() const { + return this != internal_default_instance() && executorspec_ != nullptr; } -inline const ::flyteidl::core::K8sPod& SparkJob::executorpod() const { - // @@protoc_insertion_point(field_get:flyteidl.plugins.SparkJob.executorPod) - return has_executorpod() - ? *executorPodValue_.executorpod_ - : *reinterpret_cast< ::flyteidl::core::K8sPod*>(&::flyteidl::core::_K8sPod_default_instance_); -} -inline ::flyteidl::core::K8sPod* SparkJob::mutable_executorpod() { - if (!has_executorpod()) { - clear_executorPodValue(); - set_has_executorpod(); - executorPodValue_.executorpod_ = CreateMaybeMessage< ::flyteidl::core::K8sPod >( - GetArenaNoVirtual()); - } - // @@protoc_insertion_point(field_mutable:flyteidl.plugins.SparkJob.executorPod) - return executorPodValue_.executorpod_; -} - -// string executorPodTemplateName = 13; -inline void SparkJob::clear_executorpodtemplatename() { - executorpodtemplatename_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); +inline const ::flyteidl::plugins::RoleSpec& SparkJob::executorspec() const { + const ::flyteidl::plugins::RoleSpec* p = executorspec_; + // @@protoc_insertion_point(field_get:flyteidl.plugins.SparkJob.executorSpec) + return p != nullptr ? *p : *reinterpret_cast( + &::flyteidl::plugins::_RoleSpec_default_instance_); } -inline const ::std::string& SparkJob::executorpodtemplatename() const { - // @@protoc_insertion_point(field_get:flyteidl.plugins.SparkJob.executorPodTemplateName) - return executorpodtemplatename_.GetNoArena(); -} -inline void SparkJob::set_executorpodtemplatename(const ::std::string& value) { - - executorpodtemplatename_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), value); - // @@protoc_insertion_point(field_set:flyteidl.plugins.SparkJob.executorPodTemplateName) -} -#if LANG_CXX11 -inline void SparkJob::set_executorpodtemplatename(::std::string&& value) { - - executorpodtemplatename_.SetNoArena( - &::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); - // @@protoc_insertion_point(field_set_rvalue:flyteidl.plugins.SparkJob.executorPodTemplateName) -} -#endif -inline void SparkJob::set_executorpodtemplatename(const char* value) { - GOOGLE_DCHECK(value != nullptr); +inline ::flyteidl::plugins::RoleSpec* SparkJob::release_executorspec() { + // @@protoc_insertion_point(field_release:flyteidl.plugins.SparkJob.executorSpec) - executorpodtemplatename_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); - // @@protoc_insertion_point(field_set_char:flyteidl.plugins.SparkJob.executorPodTemplateName) -} -inline void SparkJob::set_executorpodtemplatename(const char* value, size_t size) { - - executorpodtemplatename_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), - ::std::string(reinterpret_cast(value), size)); - // @@protoc_insertion_point(field_set_pointer:flyteidl.plugins.SparkJob.executorPodTemplateName) -} -inline ::std::string* SparkJob::mutable_executorpodtemplatename() { - - // @@protoc_insertion_point(field_mutable:flyteidl.plugins.SparkJob.executorPodTemplateName) - return executorpodtemplatename_.MutableNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + ::flyteidl::plugins::RoleSpec* temp = executorspec_; + executorspec_ = nullptr; + return temp; } -inline ::std::string* SparkJob::release_executorpodtemplatename() { - // @@protoc_insertion_point(field_release:flyteidl.plugins.SparkJob.executorPodTemplateName) +inline ::flyteidl::plugins::RoleSpec* SparkJob::mutable_executorspec() { - return executorpodtemplatename_.ReleaseNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + if (executorspec_ == nullptr) { + auto* p = CreateMaybeMessage<::flyteidl::plugins::RoleSpec>(GetArenaNoVirtual()); + executorspec_ = p; + } + // @@protoc_insertion_point(field_mutable:flyteidl.plugins.SparkJob.executorSpec) + return executorspec_; } -inline void SparkJob::set_allocated_executorpodtemplatename(::std::string* executorpodtemplatename) { - if (executorpodtemplatename != nullptr) { +inline void SparkJob::set_allocated_executorspec(::flyteidl::plugins::RoleSpec* executorspec) { + ::google::protobuf::Arena* message_arena = GetArenaNoVirtual(); + if (message_arena == nullptr) { + delete reinterpret_cast< ::google::protobuf::MessageLite*>(executorspec_); + } + if (executorspec) { + ::google::protobuf::Arena* submessage_arena = nullptr; + if (message_arena != submessage_arena) { + executorspec = ::google::protobuf::internal::GetOwnedMessage( + message_arena, executorspec, submessage_arena); + } } else { } - executorpodtemplatename_.SetAllocatedNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), executorpodtemplatename); - // @@protoc_insertion_point(field_set_allocated:flyteidl.plugins.SparkJob.executorPodTemplateName) + executorspec_ = executorspec; + // @@protoc_insertion_point(field_set_allocated:flyteidl.plugins.SparkJob.executorSpec) } -inline bool SparkJob::has_driverPodValue() const { - return driverPodValue_case() != DRIVERPODVALUE_NOT_SET; -} -inline void SparkJob::clear_has_driverPodValue() { - _oneof_case_[0] = DRIVERPODVALUE_NOT_SET; -} -inline bool SparkJob::has_executorPodValue() const { - return executorPodValue_case() != EXECUTORPODVALUE_NOT_SET; -} -inline void SparkJob::clear_has_executorPodValue() { - _oneof_case_[1] = EXECUTORPODVALUE_NOT_SET; -} -inline SparkJob::DriverPodValueCase SparkJob::driverPodValue_case() const { - return SparkJob::DriverPodValueCase(_oneof_case_[0]); -} -inline SparkJob::ExecutorPodValueCase SparkJob::executorPodValue_case() const { - return SparkJob::ExecutorPodValueCase(_oneof_case_[1]); -} #ifdef __GNUC__ #pragma GCC diagnostic pop #endif // __GNUC__ diff --git a/flyteidl/gen/pb-go/flyteidl/plugins/common.pb.go b/flyteidl/gen/pb-go/flyteidl/plugins/common.pb.go new file mode 100644 index 0000000000..c7c899f80c --- /dev/null +++ b/flyteidl/gen/pb-go/flyteidl/plugins/common.pb.go @@ -0,0 +1,128 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: flyteidl/plugins/common.proto + +package plugins + +import ( + fmt "fmt" + core "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/core" + proto "github.com/golang/protobuf/proto" + math "math" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion3 // please upgrade the proto package + +// Used in place of the pod template references in core.TaskTemplate and core.TaskMetdata. This allows +// specifying pod configuration on a per role basis. +type RoleSpec struct { + // The pod spec and metadata to be used as the base configuration when creating a Pod for this role. + // +optional + // + // Types that are valid to be assigned to PodValue: + // *RoleSpec_Pod + PodValue isRoleSpec_PodValue `protobuf_oneof:"pod_value"` + // Reference to an existing PodTemplate k8s resource to be used as the base configuration when creating a + // Pod for this role. If this value is set, the specified PodTemplate will be used instead of, but applied + // identically as, the default PodTemplate configured in FlytePropeller. + // +optional + PodTemplateName string `protobuf:"bytes,2,opt,name=pod_template_name,json=podTemplateName,proto3" json:"pod_template_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RoleSpec) Reset() { *m = RoleSpec{} } +func (m *RoleSpec) String() string { return proto.CompactTextString(m) } +func (*RoleSpec) ProtoMessage() {} +func (*RoleSpec) Descriptor() ([]byte, []int) { + return fileDescriptor_09d1ab4fb5fe7faf, []int{0} +} + +func (m *RoleSpec) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RoleSpec.Unmarshal(m, b) +} +func (m *RoleSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RoleSpec.Marshal(b, m, deterministic) +} +func (m *RoleSpec) XXX_Merge(src proto.Message) { + xxx_messageInfo_RoleSpec.Merge(m, src) +} +func (m *RoleSpec) XXX_Size() int { + return xxx_messageInfo_RoleSpec.Size(m) +} +func (m *RoleSpec) XXX_DiscardUnknown() { + xxx_messageInfo_RoleSpec.DiscardUnknown(m) +} + +var xxx_messageInfo_RoleSpec proto.InternalMessageInfo + +type isRoleSpec_PodValue interface { + isRoleSpec_PodValue() +} + +type RoleSpec_Pod struct { + Pod *core.K8SPod `protobuf:"bytes,1,opt,name=pod,proto3,oneof"` +} + +func (*RoleSpec_Pod) isRoleSpec_PodValue() {} + +func (m *RoleSpec) GetPodValue() isRoleSpec_PodValue { + if m != nil { + return m.PodValue + } + return nil +} + +func (m *RoleSpec) GetPod() *core.K8SPod { + if x, ok := m.GetPodValue().(*RoleSpec_Pod); ok { + return x.Pod + } + return nil +} + +func (m *RoleSpec) GetPodTemplateName() string { + if m != nil { + return m.PodTemplateName + } + return "" +} + +// XXX_OneofWrappers is for the internal use of the proto package. +func (*RoleSpec) XXX_OneofWrappers() []interface{} { + return []interface{}{ + (*RoleSpec_Pod)(nil), + } +} + +func init() { + proto.RegisterType((*RoleSpec)(nil), "flyteidl.plugins.RoleSpec") +} + +func init() { proto.RegisterFile("flyteidl/plugins/common.proto", fileDescriptor_09d1ab4fb5fe7faf) } + +var fileDescriptor_09d1ab4fb5fe7faf = []byte{ + // 214 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x5c, 0x8f, 0x3f, 0x4b, 0xc0, 0x30, + 0x10, 0xc5, 0xad, 0x82, 0xd8, 0x74, 0x50, 0x0b, 0x42, 0x15, 0x84, 0xe2, 0x54, 0x05, 0x13, 0xd0, + 0xc5, 0x45, 0x84, 0x4e, 0x82, 0x20, 0x52, 0x9d, 0x5c, 0x4a, 0x9a, 0x9c, 0xb1, 0x98, 0xe4, 0x8e, + 0x26, 0x15, 0xfc, 0xf6, 0xd2, 0x3f, 0x76, 0x70, 0x3b, 0xee, 0xfd, 0xde, 0xdd, 0x7b, 0xec, 0xfc, + 0xc3, 0xfe, 0x44, 0xe8, 0xb5, 0x15, 0x64, 0x47, 0xd3, 0xfb, 0x20, 0x14, 0x3a, 0x87, 0x9e, 0xd3, + 0x80, 0x11, 0xf3, 0xa3, 0x3f, 0x99, 0xaf, 0xf2, 0xd9, 0xe9, 0x66, 0x50, 0x38, 0x80, 0x88, 0x32, + 0x7c, 0x85, 0x05, 0xbe, 0xf0, 0xec, 0xa0, 0x41, 0x0b, 0xaf, 0x04, 0x2a, 0xbf, 0x64, 0x7b, 0x84, + 0xba, 0x48, 0xca, 0xa4, 0xca, 0x6e, 0x4e, 0xf8, 0x76, 0x66, 0x32, 0xf1, 0xa7, 0xbb, 0xf0, 0x82, + 0xfa, 0x71, 0xa7, 0x99, 0x98, 0xfc, 0x8a, 0x1d, 0x13, 0xea, 0x36, 0x82, 0x23, 0x2b, 0x23, 0xb4, + 0x5e, 0x3a, 0x28, 0x76, 0xcb, 0xa4, 0x4a, 0x9b, 0x43, 0x42, 0xfd, 0xb6, 0xee, 0x9f, 0xa5, 0x83, + 0x3a, 0x63, 0xe9, 0xc4, 0x7e, 0x4b, 0x3b, 0x42, 0xfd, 0xf0, 0x7e, 0x6f, 0xfa, 0xf8, 0x39, 0x76, + 0x5c, 0xa1, 0x13, 0xf3, 0x0b, 0x1c, 0xcc, 0x32, 0x88, 0x2d, 0xa6, 0x01, 0x2f, 0xa8, 0xbb, 0x36, + 0x28, 0xfe, 0x57, 0xed, 0xf6, 0xe7, 0xdc, 0xb7, 0xbf, 0x01, 0x00, 0x00, 0xff, 0xff, 0x91, 0x48, + 0xdd, 0x4a, 0x05, 0x01, 0x00, 0x00, +} diff --git a/flyteidl/gen/pb-go/flyteidl/plugins/common.pb.validate.go b/flyteidl/gen/pb-go/flyteidl/plugins/common.pb.validate.go new file mode 100644 index 0000000000..3799b0782b --- /dev/null +++ b/flyteidl/gen/pb-go/flyteidl/plugins/common.pb.validate.go @@ -0,0 +1,119 @@ +// Code generated by protoc-gen-validate. DO NOT EDIT. +// source: flyteidl/plugins/common.proto + +package plugins + +import ( + "bytes" + "errors" + "fmt" + "net" + "net/mail" + "net/url" + "regexp" + "strings" + "time" + "unicode/utf8" + + "github.com/golang/protobuf/ptypes" +) + +// ensure the imports are used +var ( + _ = bytes.MinRead + _ = errors.New("") + _ = fmt.Print + _ = utf8.UTFMax + _ = (*regexp.Regexp)(nil) + _ = (*strings.Reader)(nil) + _ = net.IPv4len + _ = time.Duration(0) + _ = (*url.URL)(nil) + _ = (*mail.Address)(nil) + _ = ptypes.DynamicAny{} +) + +// define the regex for a UUID once up-front +var _common_uuidPattern = regexp.MustCompile("^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}$") + +// Validate checks the field values on RoleSpec with the rules defined in the +// proto definition for this message. If any rules are violated, an error is returned. +func (m *RoleSpec) Validate() error { + if m == nil { + return nil + } + + // no validation rules for PodTemplateName + + switch m.PodValue.(type) { + + case *RoleSpec_Pod: + + if v, ok := interface{}(m.GetPod()).(interface{ Validate() error }); ok { + if err := v.Validate(); err != nil { + return RoleSpecValidationError{ + field: "Pod", + reason: "embedded message failed validation", + cause: err, + } + } + } + + } + + return nil +} + +// RoleSpecValidationError is the validation error returned by +// RoleSpec.Validate if the designated constraints aren't met. +type RoleSpecValidationError struct { + field string + reason string + cause error + key bool +} + +// Field function returns field value. +func (e RoleSpecValidationError) Field() string { return e.field } + +// Reason function returns reason value. +func (e RoleSpecValidationError) Reason() string { return e.reason } + +// Cause function returns cause value. +func (e RoleSpecValidationError) Cause() error { return e.cause } + +// Key function returns key value. +func (e RoleSpecValidationError) Key() bool { return e.key } + +// ErrorName returns error name. +func (e RoleSpecValidationError) ErrorName() string { return "RoleSpecValidationError" } + +// Error satisfies the builtin error interface +func (e RoleSpecValidationError) Error() string { + cause := "" + if e.cause != nil { + cause = fmt.Sprintf(" | caused by: %v", e.cause) + } + + key := "" + if e.key { + key = "key for " + } + + return fmt.Sprintf( + "invalid %sRoleSpec.%s: %s%s", + key, + e.field, + e.reason, + cause) +} + +var _ error = RoleSpecValidationError{} + +var _ interface { + Field() string + Reason() string + Key() bool + Cause() error + ErrorName() string +} = RoleSpecValidationError{} diff --git a/flyteidl/gen/pb-go/flyteidl/plugins/spark.pb.go b/flyteidl/gen/pb-go/flyteidl/plugins/spark.pb.go index f755336b82..40a47452a8 100644 --- a/flyteidl/gen/pb-go/flyteidl/plugins/spark.pb.go +++ b/flyteidl/gen/pb-go/flyteidl/plugins/spark.pb.go @@ -5,7 +5,6 @@ package plugins import ( fmt "fmt" - core "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/core" proto "github.com/golang/protobuf/proto" _struct "github.com/golang/protobuf/ptypes/struct" math "math" @@ -101,31 +100,13 @@ type SparkJob struct { // Domain name of your deployment. Use the form .cloud.databricks.com. // This instance name can be set in either flytepropeller or flytekit. DatabricksInstance string `protobuf:"bytes,9,opt,name=databricksInstance,proto3" json:"databricksInstance,omitempty"` - // The pod spec and metadata to be used as the base configuration when creating the driver Pod for this task. - // +optional - // - // Types that are valid to be assigned to DriverPodValue: - // *SparkJob_DriverPod - DriverPodValue isSparkJob_DriverPodValue `protobuf_oneof:"driverPodValue"` - // Reference to an existing PodTemplate k8s resource to be used as the base configuration when creating the - // driver Pod for this task. If this value is set, the specified PodTemplate will be used instead of, but applied - // identically as, the default PodTemplate configured in FlytePropeller. - // +optional - DriverPodTemplateName string `protobuf:"bytes,11,opt,name=driverPodTemplateName,proto3" json:"driverPodTemplateName,omitempty"` - // The pod spec and metadata to be used as the base configuration when creating the executor Pods for this task. - // +optional - // - // Types that are valid to be assigned to ExecutorPodValue: - // *SparkJob_ExecutorPod - ExecutorPodValue isSparkJob_ExecutorPodValue `protobuf_oneof:"executorPodValue"` - // Reference to an existing PodTemplate k8s resource to be used as the base configuration when creating the - // executor Pods for this task. If this value is set, the specified PodTemplate will be used instead of, but applied - // identically as, the default PodTemplate configured in FlytePropeller. - // +optional - ExecutorPodTemplateName string `protobuf:"bytes,13,opt,name=executorPodTemplateName,proto3" json:"executorPodTemplateName,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` + // The driver spec, used in place of the task's pod template + DriverSpec *RoleSpec `protobuf:"bytes,10,opt,name=driverSpec,proto3" json:"driverSpec,omitempty"` + // The executor spec, used in place of the task's pod template + ExecutorSpec *RoleSpec `protobuf:"bytes,11,opt,name=executorSpec,proto3" json:"executorSpec,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } func (m *SparkJob) Reset() { *m = SparkJob{} } @@ -216,76 +197,20 @@ func (m *SparkJob) GetDatabricksInstance() string { return "" } -type isSparkJob_DriverPodValue interface { - isSparkJob_DriverPodValue() -} - -type SparkJob_DriverPod struct { - DriverPod *core.K8SPod `protobuf:"bytes,10,opt,name=driverPod,proto3,oneof"` -} - -func (*SparkJob_DriverPod) isSparkJob_DriverPodValue() {} - -func (m *SparkJob) GetDriverPodValue() isSparkJob_DriverPodValue { +func (m *SparkJob) GetDriverSpec() *RoleSpec { if m != nil { - return m.DriverPodValue - } - return nil -} - -func (m *SparkJob) GetDriverPod() *core.K8SPod { - if x, ok := m.GetDriverPodValue().(*SparkJob_DriverPod); ok { - return x.DriverPod + return m.DriverSpec } return nil } -func (m *SparkJob) GetDriverPodTemplateName() string { +func (m *SparkJob) GetExecutorSpec() *RoleSpec { if m != nil { - return m.DriverPodTemplateName - } - return "" -} - -type isSparkJob_ExecutorPodValue interface { - isSparkJob_ExecutorPodValue() -} - -type SparkJob_ExecutorPod struct { - ExecutorPod *core.K8SPod `protobuf:"bytes,12,opt,name=executorPod,proto3,oneof"` -} - -func (*SparkJob_ExecutorPod) isSparkJob_ExecutorPodValue() {} - -func (m *SparkJob) GetExecutorPodValue() isSparkJob_ExecutorPodValue { - if m != nil { - return m.ExecutorPodValue - } - return nil -} - -func (m *SparkJob) GetExecutorPod() *core.K8SPod { - if x, ok := m.GetExecutorPodValue().(*SparkJob_ExecutorPod); ok { - return x.ExecutorPod + return m.ExecutorSpec } return nil } -func (m *SparkJob) GetExecutorPodTemplateName() string { - if m != nil { - return m.ExecutorPodTemplateName - } - return "" -} - -// XXX_OneofWrappers is for the internal use of the proto package. -func (*SparkJob) XXX_OneofWrappers() []interface{} { - return []interface{}{ - (*SparkJob_DriverPod)(nil), - (*SparkJob_ExecutorPod)(nil), - } -} - func init() { proto.RegisterEnum("flyteidl.plugins.SparkApplication_Type", SparkApplication_Type_name, SparkApplication_Type_value) proto.RegisterType((*SparkApplication)(nil), "flyteidl.plugins.SparkApplication") @@ -297,40 +222,36 @@ func init() { func init() { proto.RegisterFile("flyteidl/plugins/spark.proto", fileDescriptor_ca8a069b9820144a) } var fileDescriptor_ca8a069b9820144a = []byte{ - // 549 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x54, 0x51, 0x6f, 0xd2, 0x50, - 0x14, 0x5e, 0xc7, 0xd8, 0xd6, 0xc3, 0x64, 0xcd, 0xd1, 0x65, 0x95, 0xf0, 0x40, 0x78, 0x11, 0x4d, - 0xbc, 0x35, 0xa8, 0x09, 0x1a, 0x97, 0x05, 0x88, 0x8a, 0x68, 0x26, 0x16, 0xb2, 0x44, 0xdf, 0x6e, - 0xdb, 0x4b, 0x69, 0x28, 0xbd, 0x4d, 0x7b, 0xbb, 0xc8, 0x1f, 0xf0, 0x77, 0x9b, 0xde, 0x4a, 0x0b, - 0x0d, 0x5b, 0xe2, 0xdb, 0x3d, 0xdf, 0xf9, 0xce, 0xf7, 0x9d, 0x9c, 0x73, 0x72, 0xa1, 0x39, 0xf7, - 0xd7, 0x82, 0x79, 0x8e, 0x6f, 0x84, 0x7e, 0xe2, 0x7a, 0x41, 0x6c, 0xc4, 0x21, 0x8d, 0x96, 0x24, - 0x8c, 0xb8, 0xe0, 0xa8, 0x6d, 0xb2, 0xe4, 0x5f, 0xb6, 0xf1, 0x34, 0xe7, 0xdb, 0x3c, 0x62, 0x86, - 0xa0, 0xf1, 0x32, 0xce, 0xc8, 0x8d, 0xa6, 0xcb, 0xb9, 0xeb, 0x33, 0x43, 0x46, 0x56, 0x32, 0x37, - 0x62, 0x11, 0x25, 0xb6, 0xc8, 0xb2, 0xed, 0x01, 0x68, 0xd3, 0x54, 0xb9, 0x1f, 0x86, 0xbe, 0x67, - 0x53, 0xe1, 0xf1, 0xa0, 0x4d, 0xe0, 0x68, 0xb6, 0x0e, 0x19, 0x02, 0x1c, 0x4f, 0x7e, 0xce, 0x46, - 0xdf, 0x6f, 0xb4, 0x03, 0x3c, 0x85, 0xa3, 0x71, 0xff, 0xb6, 0xaf, 0x29, 0xa8, 0x42, 0x75, 0x3a, - 0xec, 0x7f, 0xeb, 0x6b, 0x87, 0x58, 0x05, 0xc5, 0xd4, 0x2a, 0xed, 0x3f, 0x27, 0x70, 0x2a, 0x45, - 0xc6, 0xdc, 0xc2, 0x1f, 0x70, 0x4e, 0x0b, 0xad, 0x54, 0x47, 0x57, 0x5a, 0x4a, 0xa7, 0xde, 0x7d, - 0x46, 0xca, 0x5d, 0x93, 0xb2, 0x33, 0x49, 0xe9, 0x66, 0xb9, 0x1e, 0x5f, 0xc1, 0xe3, 0x15, 0xf5, - 0x82, 0x2d, 0xe2, 0x27, 0xcf, 0x67, 0xfa, 0x61, 0x4b, 0xe9, 0xa8, 0xe6, 0xbe, 0x14, 0x36, 0x41, - 0x4d, 0xe1, 0xa1, 0x4f, 0xe3, 0x58, 0xaf, 0x48, 0x5e, 0x01, 0xe0, 0x67, 0x50, 0xe5, 0x34, 0x87, - 0x3c, 0x98, 0xeb, 0x47, 0xad, 0x4a, 0xa7, 0xd6, 0x7d, 0x7e, 0x4f, 0x73, 0x63, 0x6e, 0x65, 0x8f, - 0x94, 0xfb, 0x31, 0x10, 0xd1, 0xda, 0x2c, 0x6a, 0x71, 0x0c, 0xb0, 0xa0, 0x0e, 0xe7, 0xa1, 0x54, - 0xaa, 0x4a, 0xa5, 0x17, 0x0f, 0x28, 0x8d, 0x72, 0x72, 0x26, 0xb5, 0x55, 0x8d, 0x6d, 0x38, 0x63, - 0xbf, 0x99, 0x9d, 0x08, 0x1e, 0x4d, 0xa8, 0x58, 0xe8, 0xc7, 0xb2, 0xeb, 0x1d, 0x0c, 0xaf, 0xa1, - 0xee, 0x50, 0x41, 0xad, 0xc8, 0xb3, 0x97, 0xb1, 0xf4, 0x3c, 0x69, 0x29, 0x9d, 0x5a, 0xf7, 0x92, - 0x64, 0x3b, 0x26, 0x9b, 0x1d, 0x93, 0xa9, 0xdc, 0xb1, 0x59, 0xa2, 0x63, 0x07, 0xce, 0x0b, 0x64, - 0xc6, 0x97, 0x2c, 0xd0, 0x4f, 0xa5, 0x4f, 0x19, 0x46, 0x02, 0x58, 0x40, 0x5f, 0x82, 0x58, 0xd0, - 0xc0, 0x66, 0xba, 0x2a, 0xc9, 0x7b, 0x32, 0xf8, 0x16, 0x54, 0x27, 0xf2, 0xee, 0x58, 0x34, 0xe1, - 0x8e, 0x0e, 0xb2, 0xab, 0x8b, 0x62, 0x12, 0xe9, 0x51, 0x92, 0xaf, 0xbd, 0x78, 0xc2, 0x9d, 0xd1, - 0x81, 0x59, 0x30, 0xf1, 0x0d, 0x5c, 0xe4, 0xc1, 0x8c, 0xad, 0x42, 0x9f, 0x0a, 0x76, 0x43, 0x57, - 0x4c, 0xaf, 0x49, 0xa7, 0xfd, 0x49, 0x7c, 0x07, 0xb5, 0x7c, 0x2e, 0xdc, 0xd1, 0xcf, 0x1e, 0xb2, - 0x53, 0xcc, 0x6d, 0x2e, 0xf6, 0xe0, 0x72, 0x2b, 0xdc, 0xb1, 0x7c, 0x24, 0x2d, 0xef, 0x4b, 0x37, - 0x3e, 0x40, 0x7d, 0xf7, 0x12, 0x50, 0x83, 0xca, 0x92, 0xad, 0xe5, 0x79, 0xab, 0x66, 0xfa, 0xc4, - 0x27, 0x50, 0xbd, 0xa3, 0x7e, 0xb2, 0xb9, 0xcd, 0x2c, 0x78, 0x7f, 0xd8, 0x53, 0x1a, 0x57, 0x70, - 0x5e, 0xda, 0xfe, 0xff, 0x94, 0x0f, 0x34, 0xa8, 0xe7, 0xa3, 0xb8, 0x4d, 0xd1, 0x01, 0x82, 0xb6, - 0xd5, 0x69, 0x86, 0x5d, 0xff, 0xba, 0x72, 0x3d, 0xb1, 0x48, 0x2c, 0x62, 0xf3, 0x95, 0x21, 0xc7, - 0xc1, 0x23, 0x37, 0x7b, 0x18, 0xf9, 0x0f, 0xe1, 0xb2, 0xc0, 0x08, 0xad, 0x97, 0x2e, 0x37, 0xca, - 0x9f, 0x8c, 0x75, 0x2c, 0x0f, 0xe8, 0xf5, 0xdf, 0x00, 0x00, 0x00, 0xff, 0xff, 0xe3, 0xaa, 0xe1, - 0x4a, 0x7f, 0x04, 0x00, 0x00, + // 490 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x53, 0x5d, 0x8b, 0xd3, 0x40, + 0x14, 0x35, 0xfd, 0xb2, 0xb9, 0x95, 0x36, 0x5c, 0x05, 0x43, 0xa8, 0x50, 0xfa, 0x62, 0x14, 0x9c, + 0x48, 0x7d, 0x91, 0xc5, 0x75, 0xc9, 0x16, 0x75, 0x2d, 0xa2, 0x6b, 0x5a, 0x04, 0x7d, 0x9b, 0xa4, + 0xd3, 0x34, 0x34, 0x9d, 0x09, 0xc9, 0x64, 0xb1, 0x7f, 0xdd, 0x27, 0xc9, 0xc4, 0x6e, 0xda, 0xec, + 0xaa, 0xec, 0xdb, 0xcd, 0xb9, 0xe7, 0x9c, 0x7b, 0x33, 0x67, 0x06, 0x86, 0xab, 0x78, 0x27, 0x59, + 0xb4, 0x8c, 0x9d, 0x24, 0xce, 0xc3, 0x88, 0x67, 0x4e, 0x96, 0xd0, 0x74, 0x43, 0x92, 0x54, 0x48, + 0x81, 0xc6, 0xbe, 0x4b, 0xfe, 0x74, 0xad, 0x27, 0x37, 0xf8, 0x81, 0xd8, 0x6e, 0x05, 0x2f, 0x05, + 0xd6, 0x30, 0x14, 0x22, 0x8c, 0x99, 0xa3, 0xbe, 0xfc, 0x7c, 0xe5, 0x64, 0x32, 0xcd, 0x03, 0x59, + 0x76, 0xc7, 0xe7, 0x60, 0xcc, 0x0b, 0x77, 0x37, 0x49, 0xe2, 0x28, 0xa0, 0x32, 0x12, 0x7c, 0x4c, + 0xa0, 0xb5, 0xd8, 0x25, 0x0c, 0x01, 0x3a, 0x97, 0xdf, 0x17, 0x17, 0x5f, 0x3e, 0x1b, 0xf7, 0xb0, + 0x0b, 0xad, 0x99, 0xfb, 0xcd, 0x35, 0x34, 0xd4, 0xa1, 0x3d, 0x9f, 0xba, 0x9f, 0x5c, 0xa3, 0x81, + 0x6d, 0xd0, 0x3c, 0xa3, 0x39, 0xfe, 0xd5, 0x86, 0xae, 0x32, 0x99, 0x09, 0x1f, 0xbf, 0xc2, 0x80, + 0x56, 0x5e, 0x85, 0x8f, 0xa9, 0x8d, 0x34, 0xbb, 0x3f, 0x79, 0x4a, 0xea, 0x9b, 0x93, 0xfa, 0x64, + 0x52, 0xd0, 0xbd, 0xba, 0x1e, 0x5f, 0xc2, 0xc3, 0x2d, 0x8d, 0xf8, 0x01, 0xf1, 0x7d, 0x14, 0x33, + 0xb3, 0x31, 0xd2, 0x6c, 0xdd, 0xbb, 0xad, 0x85, 0x43, 0xd0, 0x0b, 0x78, 0x1a, 0xd3, 0x2c, 0x33, + 0x9b, 0x8a, 0x57, 0x01, 0xf8, 0x01, 0x74, 0x75, 0xa2, 0x53, 0xc1, 0x57, 0x66, 0x6b, 0xd4, 0xb4, + 0x7b, 0x93, 0x67, 0x7f, 0x59, 0x6e, 0x26, 0xfc, 0xb2, 0x28, 0xb8, 0xef, 0xb8, 0x4c, 0x77, 0x5e, + 0xa5, 0xc5, 0x19, 0xc0, 0x9a, 0x2e, 0x85, 0x48, 0x94, 0x53, 0x5b, 0x39, 0x3d, 0xff, 0x87, 0xd3, + 0xc5, 0x35, 0xb9, 0xb4, 0x3a, 0x50, 0xe3, 0x18, 0x1e, 0xb0, 0x9f, 0x2c, 0xc8, 0xa5, 0x48, 0x2f, + 0xa9, 0x5c, 0x9b, 0x1d, 0xb5, 0xf5, 0x11, 0x86, 0x67, 0xd0, 0x5f, 0x52, 0x49, 0xfd, 0x34, 0x0a, + 0x36, 0x99, 0x9a, 0x79, 0x7f, 0xa4, 0xd9, 0xbd, 0xc9, 0x63, 0x52, 0x66, 0x4c, 0xf6, 0x19, 0x93, + 0xb9, 0xca, 0xd8, 0xab, 0xd1, 0xd1, 0x86, 0x41, 0x85, 0x2c, 0xc4, 0x86, 0x71, 0xb3, 0xab, 0xe6, + 0xd4, 0x61, 0x24, 0x80, 0x15, 0xf4, 0x91, 0x67, 0x92, 0xf2, 0x80, 0x99, 0xba, 0x22, 0xdf, 0xd2, + 0xc1, 0x13, 0x80, 0x65, 0x1a, 0x5d, 0xb1, 0x74, 0x9e, 0xb0, 0xc0, 0x04, 0xb5, 0x96, 0x75, 0xf3, + 0x28, 0x3c, 0x11, 0xb3, 0x82, 0xe1, 0x1d, 0xb0, 0xf1, 0x6d, 0xf5, 0xeb, 0x4a, 0xdd, 0xfb, 0xaf, + 0xfa, 0x88, 0x6f, 0xbd, 0x81, 0xfe, 0x71, 0x46, 0x68, 0x40, 0x73, 0xc3, 0x76, 0xea, 0xe2, 0xe9, + 0x5e, 0x51, 0xe2, 0x23, 0x68, 0x5f, 0xd1, 0x38, 0xdf, 0xdf, 0x9a, 0xf2, 0xe3, 0xa4, 0xf1, 0x5a, + 0xb3, 0x4e, 0x61, 0x50, 0xcb, 0xe5, 0x2e, 0xf2, 0xf3, 0xb3, 0x1f, 0xa7, 0x61, 0x24, 0xd7, 0xb9, + 0x4f, 0x02, 0xb1, 0x75, 0xd4, 0xca, 0x22, 0x0d, 0xcb, 0xc2, 0xb9, 0x7e, 0x99, 0x21, 0xe3, 0x4e, + 0xe2, 0xbf, 0x08, 0x85, 0x53, 0x7f, 0xac, 0x7e, 0x47, 0x85, 0xf6, 0xea, 0x77, 0x00, 0x00, 0x00, + 0xff, 0xff, 0x6e, 0x42, 0x28, 0x27, 0xf7, 0x03, 0x00, 0x00, } diff --git a/flyteidl/gen/pb-go/flyteidl/plugins/spark.pb.validate.go b/flyteidl/gen/pb-go/flyteidl/plugins/spark.pb.validate.go index 1e7d7c5f71..5baee12c37 100644 --- a/flyteidl/gen/pb-go/flyteidl/plugins/spark.pb.validate.go +++ b/flyteidl/gen/pb-go/flyteidl/plugins/spark.pb.validate.go @@ -134,40 +134,24 @@ func (m *SparkJob) Validate() error { // no validation rules for DatabricksInstance - // no validation rules for DriverPodTemplateName - - // no validation rules for ExecutorPodTemplateName - - switch m.DriverPodValue.(type) { - - case *SparkJob_DriverPod: - - if v, ok := interface{}(m.GetDriverPod()).(interface{ Validate() error }); ok { - if err := v.Validate(); err != nil { - return SparkJobValidationError{ - field: "DriverPod", - reason: "embedded message failed validation", - cause: err, - } + if v, ok := interface{}(m.GetDriverSpec()).(interface{ Validate() error }); ok { + if err := v.Validate(); err != nil { + return SparkJobValidationError{ + field: "DriverSpec", + reason: "embedded message failed validation", + cause: err, } } - } - switch m.ExecutorPodValue.(type) { - - case *SparkJob_ExecutorPod: - - if v, ok := interface{}(m.GetExecutorPod()).(interface{ Validate() error }); ok { - if err := v.Validate(); err != nil { - return SparkJobValidationError{ - field: "ExecutorPod", - reason: "embedded message failed validation", - cause: err, - } + if v, ok := interface{}(m.GetExecutorSpec()).(interface{ Validate() error }); ok { + if err := v.Validate(); err != nil { + return SparkJobValidationError{ + field: "ExecutorSpec", + reason: "embedded message failed validation", + cause: err, } } - } return nil diff --git a/flyteidl/gen/pb-java/flyteidl/plugins/Common.java b/flyteidl/gen/pb-java/flyteidl/plugins/Common.java new file mode 100644 index 0000000000..45a66ada9f --- /dev/null +++ b/flyteidl/gen/pb-java/flyteidl/plugins/Common.java @@ -0,0 +1,968 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: flyteidl/plugins/common.proto + +package flyteidl.plugins; + +public final class Common { + private Common() {} + public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistryLite registry) { + } + + public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistry registry) { + registerAllExtensions( + (com.google.protobuf.ExtensionRegistryLite) registry); + } + public interface RoleSpecOrBuilder extends + // @@protoc_insertion_point(interface_extends:flyteidl.plugins.RoleSpec) + com.google.protobuf.MessageOrBuilder { + + /** + * .flyteidl.core.K8sPod pod = 1; + */ + boolean hasPod(); + /** + * .flyteidl.core.K8sPod pod = 1; + */ + flyteidl.core.Tasks.K8sPod getPod(); + /** + * .flyteidl.core.K8sPod pod = 1; + */ + flyteidl.core.Tasks.K8sPodOrBuilder getPodOrBuilder(); + + /** + *
+     * Reference to an existing PodTemplate k8s resource to be used as the base configuration when creating a
+     * Pod for this role. If this value is set, the specified PodTemplate will be used instead of, but applied
+     * identically as, the default PodTemplate configured in FlytePropeller.
+     * +optional
+     * 
+ * + * string pod_template_name = 2; + */ + java.lang.String getPodTemplateName(); + /** + *
+     * Reference to an existing PodTemplate k8s resource to be used as the base configuration when creating a
+     * Pod for this role. If this value is set, the specified PodTemplate will be used instead of, but applied
+     * identically as, the default PodTemplate configured in FlytePropeller.
+     * +optional
+     * 
+ * + * string pod_template_name = 2; + */ + com.google.protobuf.ByteString + getPodTemplateNameBytes(); + + public flyteidl.plugins.Common.RoleSpec.PodValueCase getPodValueCase(); + } + /** + *
+   * Used in place of the pod template references in core.TaskTemplate and core.TaskMetdata. This allows
+   * specifying pod configuration on a per role basis.
+   * 
+ * + * Protobuf type {@code flyteidl.plugins.RoleSpec} + */ + public static final class RoleSpec extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:flyteidl.plugins.RoleSpec) + RoleSpecOrBuilder { + private static final long serialVersionUID = 0L; + // Use RoleSpec.newBuilder() to construct. + private RoleSpec(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private RoleSpec() { + podTemplateName_ = ""; + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private RoleSpec( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + this(); + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: { + flyteidl.core.Tasks.K8sPod.Builder subBuilder = null; + if (podValueCase_ == 1) { + subBuilder = ((flyteidl.core.Tasks.K8sPod) podValue_).toBuilder(); + } + podValue_ = + input.readMessage(flyteidl.core.Tasks.K8sPod.parser(), extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom((flyteidl.core.Tasks.K8sPod) podValue_); + podValue_ = subBuilder.buildPartial(); + } + podValueCase_ = 1; + break; + } + case 18: { + java.lang.String s = input.readStringRequireUtf8(); + + podTemplateName_ = s; + break; + } + default: { + if (!parseUnknownField( + input, unknownFields, extensionRegistry, tag)) { + done = true; + } + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return flyteidl.plugins.Common.internal_static_flyteidl_plugins_RoleSpec_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return flyteidl.plugins.Common.internal_static_flyteidl_plugins_RoleSpec_fieldAccessorTable + .ensureFieldAccessorsInitialized( + flyteidl.plugins.Common.RoleSpec.class, flyteidl.plugins.Common.RoleSpec.Builder.class); + } + + private int podValueCase_ = 0; + private java.lang.Object podValue_; + public enum PodValueCase + implements com.google.protobuf.Internal.EnumLite { + POD(1), + PODVALUE_NOT_SET(0); + private final int value; + private PodValueCase(int value) { + this.value = value; + } + /** + * @deprecated Use {@link #forNumber(int)} instead. + */ + @java.lang.Deprecated + public static PodValueCase valueOf(int value) { + return forNumber(value); + } + + public static PodValueCase forNumber(int value) { + switch (value) { + case 1: return POD; + case 0: return PODVALUE_NOT_SET; + default: return null; + } + } + public int getNumber() { + return this.value; + } + }; + + public PodValueCase + getPodValueCase() { + return PodValueCase.forNumber( + podValueCase_); + } + + public static final int POD_FIELD_NUMBER = 1; + /** + * .flyteidl.core.K8sPod pod = 1; + */ + public boolean hasPod() { + return podValueCase_ == 1; + } + /** + * .flyteidl.core.K8sPod pod = 1; + */ + public flyteidl.core.Tasks.K8sPod getPod() { + if (podValueCase_ == 1) { + return (flyteidl.core.Tasks.K8sPod) podValue_; + } + return flyteidl.core.Tasks.K8sPod.getDefaultInstance(); + } + /** + * .flyteidl.core.K8sPod pod = 1; + */ + public flyteidl.core.Tasks.K8sPodOrBuilder getPodOrBuilder() { + if (podValueCase_ == 1) { + return (flyteidl.core.Tasks.K8sPod) podValue_; + } + return flyteidl.core.Tasks.K8sPod.getDefaultInstance(); + } + + public static final int POD_TEMPLATE_NAME_FIELD_NUMBER = 2; + private volatile java.lang.Object podTemplateName_; + /** + *
+     * Reference to an existing PodTemplate k8s resource to be used as the base configuration when creating a
+     * Pod for this role. If this value is set, the specified PodTemplate will be used instead of, but applied
+     * identically as, the default PodTemplate configured in FlytePropeller.
+     * +optional
+     * 
+ * + * string pod_template_name = 2; + */ + public java.lang.String getPodTemplateName() { + java.lang.Object ref = podTemplateName_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + podTemplateName_ = s; + return s; + } + } + /** + *
+     * Reference to an existing PodTemplate k8s resource to be used as the base configuration when creating a
+     * Pod for this role. If this value is set, the specified PodTemplate will be used instead of, but applied
+     * identically as, the default PodTemplate configured in FlytePropeller.
+     * +optional
+     * 
+ * + * string pod_template_name = 2; + */ + public com.google.protobuf.ByteString + getPodTemplateNameBytes() { + java.lang.Object ref = podTemplateName_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + podTemplateName_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + private byte memoizedIsInitialized = -1; + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (podValueCase_ == 1) { + output.writeMessage(1, (flyteidl.core.Tasks.K8sPod) podValue_); + } + if (!getPodTemplateNameBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 2, podTemplateName_); + } + unknownFields.writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (podValueCase_ == 1) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, (flyteidl.core.Tasks.K8sPod) podValue_); + } + if (!getPodTemplateNameBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, podTemplateName_); + } + size += unknownFields.getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof flyteidl.plugins.Common.RoleSpec)) { + return super.equals(obj); + } + flyteidl.plugins.Common.RoleSpec other = (flyteidl.plugins.Common.RoleSpec) obj; + + if (!getPodTemplateName() + .equals(other.getPodTemplateName())) return false; + if (!getPodValueCase().equals(other.getPodValueCase())) return false; + switch (podValueCase_) { + case 1: + if (!getPod() + .equals(other.getPod())) return false; + break; + case 0: + default: + } + if (!unknownFields.equals(other.unknownFields)) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + hash = (37 * hash) + POD_TEMPLATE_NAME_FIELD_NUMBER; + hash = (53 * hash) + getPodTemplateName().hashCode(); + switch (podValueCase_) { + case 1: + hash = (37 * hash) + POD_FIELD_NUMBER; + hash = (53 * hash) + getPod().hashCode(); + break; + case 0: + default: + } + hash = (29 * hash) + unknownFields.hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static flyteidl.plugins.Common.RoleSpec parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static flyteidl.plugins.Common.RoleSpec parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static flyteidl.plugins.Common.RoleSpec parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static flyteidl.plugins.Common.RoleSpec parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static flyteidl.plugins.Common.RoleSpec parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static flyteidl.plugins.Common.RoleSpec parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static flyteidl.plugins.Common.RoleSpec parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static flyteidl.plugins.Common.RoleSpec parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + public static flyteidl.plugins.Common.RoleSpec parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + public static flyteidl.plugins.Common.RoleSpec parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static flyteidl.plugins.Common.RoleSpec parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static flyteidl.plugins.Common.RoleSpec parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(flyteidl.plugins.Common.RoleSpec prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + *
+     * Used in place of the pod template references in core.TaskTemplate and core.TaskMetdata. This allows
+     * specifying pod configuration on a per role basis.
+     * 
+ * + * Protobuf type {@code flyteidl.plugins.RoleSpec} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:flyteidl.plugins.RoleSpec) + flyteidl.plugins.Common.RoleSpecOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return flyteidl.plugins.Common.internal_static_flyteidl_plugins_RoleSpec_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return flyteidl.plugins.Common.internal_static_flyteidl_plugins_RoleSpec_fieldAccessorTable + .ensureFieldAccessorsInitialized( + flyteidl.plugins.Common.RoleSpec.class, flyteidl.plugins.Common.RoleSpec.Builder.class); + } + + // Construct using flyteidl.plugins.Common.RoleSpec.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { + } + } + @java.lang.Override + public Builder clear() { + super.clear(); + podTemplateName_ = ""; + + podValueCase_ = 0; + podValue_ = null; + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return flyteidl.plugins.Common.internal_static_flyteidl_plugins_RoleSpec_descriptor; + } + + @java.lang.Override + public flyteidl.plugins.Common.RoleSpec getDefaultInstanceForType() { + return flyteidl.plugins.Common.RoleSpec.getDefaultInstance(); + } + + @java.lang.Override + public flyteidl.plugins.Common.RoleSpec build() { + flyteidl.plugins.Common.RoleSpec result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public flyteidl.plugins.Common.RoleSpec buildPartial() { + flyteidl.plugins.Common.RoleSpec result = new flyteidl.plugins.Common.RoleSpec(this); + if (podValueCase_ == 1) { + if (podBuilder_ == null) { + result.podValue_ = podValue_; + } else { + result.podValue_ = podBuilder_.build(); + } + } + result.podTemplateName_ = podTemplateName_; + result.podValueCase_ = podValueCase_; + onBuilt(); + return result; + } + + @java.lang.Override + public Builder clone() { + return super.clone(); + } + @java.lang.Override + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + java.lang.Object value) { + return super.setField(field, value); + } + @java.lang.Override + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return super.clearField(field); + } + @java.lang.Override + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return super.clearOneof(oneof); + } + @java.lang.Override + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, java.lang.Object value) { + return super.setRepeatedField(field, index, value); + } + @java.lang.Override + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + java.lang.Object value) { + return super.addRepeatedField(field, value); + } + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof flyteidl.plugins.Common.RoleSpec) { + return mergeFrom((flyteidl.plugins.Common.RoleSpec)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(flyteidl.plugins.Common.RoleSpec other) { + if (other == flyteidl.plugins.Common.RoleSpec.getDefaultInstance()) return this; + if (!other.getPodTemplateName().isEmpty()) { + podTemplateName_ = other.podTemplateName_; + onChanged(); + } + switch (other.getPodValueCase()) { + case POD: { + mergePod(other.getPod()); + break; + } + case PODVALUE_NOT_SET: { + break; + } + } + this.mergeUnknownFields(other.unknownFields); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + flyteidl.plugins.Common.RoleSpec parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (flyteidl.plugins.Common.RoleSpec) e.getUnfinishedMessage(); + throw e.unwrapIOException(); + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int podValueCase_ = 0; + private java.lang.Object podValue_; + public PodValueCase + getPodValueCase() { + return PodValueCase.forNumber( + podValueCase_); + } + + public Builder clearPodValue() { + podValueCase_ = 0; + podValue_ = null; + onChanged(); + return this; + } + + + private com.google.protobuf.SingleFieldBuilderV3< + flyteidl.core.Tasks.K8sPod, flyteidl.core.Tasks.K8sPod.Builder, flyteidl.core.Tasks.K8sPodOrBuilder> podBuilder_; + /** + * .flyteidl.core.K8sPod pod = 1; + */ + public boolean hasPod() { + return podValueCase_ == 1; + } + /** + * .flyteidl.core.K8sPod pod = 1; + */ + public flyteidl.core.Tasks.K8sPod getPod() { + if (podBuilder_ == null) { + if (podValueCase_ == 1) { + return (flyteidl.core.Tasks.K8sPod) podValue_; + } + return flyteidl.core.Tasks.K8sPod.getDefaultInstance(); + } else { + if (podValueCase_ == 1) { + return podBuilder_.getMessage(); + } + return flyteidl.core.Tasks.K8sPod.getDefaultInstance(); + } + } + /** + * .flyteidl.core.K8sPod pod = 1; + */ + public Builder setPod(flyteidl.core.Tasks.K8sPod value) { + if (podBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + podValue_ = value; + onChanged(); + } else { + podBuilder_.setMessage(value); + } + podValueCase_ = 1; + return this; + } + /** + * .flyteidl.core.K8sPod pod = 1; + */ + public Builder setPod( + flyteidl.core.Tasks.K8sPod.Builder builderForValue) { + if (podBuilder_ == null) { + podValue_ = builderForValue.build(); + onChanged(); + } else { + podBuilder_.setMessage(builderForValue.build()); + } + podValueCase_ = 1; + return this; + } + /** + * .flyteidl.core.K8sPod pod = 1; + */ + public Builder mergePod(flyteidl.core.Tasks.K8sPod value) { + if (podBuilder_ == null) { + if (podValueCase_ == 1 && + podValue_ != flyteidl.core.Tasks.K8sPod.getDefaultInstance()) { + podValue_ = flyteidl.core.Tasks.K8sPod.newBuilder((flyteidl.core.Tasks.K8sPod) podValue_) + .mergeFrom(value).buildPartial(); + } else { + podValue_ = value; + } + onChanged(); + } else { + if (podValueCase_ == 1) { + podBuilder_.mergeFrom(value); + } + podBuilder_.setMessage(value); + } + podValueCase_ = 1; + return this; + } + /** + * .flyteidl.core.K8sPod pod = 1; + */ + public Builder clearPod() { + if (podBuilder_ == null) { + if (podValueCase_ == 1) { + podValueCase_ = 0; + podValue_ = null; + onChanged(); + } + } else { + if (podValueCase_ == 1) { + podValueCase_ = 0; + podValue_ = null; + } + podBuilder_.clear(); + } + return this; + } + /** + * .flyteidl.core.K8sPod pod = 1; + */ + public flyteidl.core.Tasks.K8sPod.Builder getPodBuilder() { + return getPodFieldBuilder().getBuilder(); + } + /** + * .flyteidl.core.K8sPod pod = 1; + */ + public flyteidl.core.Tasks.K8sPodOrBuilder getPodOrBuilder() { + if ((podValueCase_ == 1) && (podBuilder_ != null)) { + return podBuilder_.getMessageOrBuilder(); + } else { + if (podValueCase_ == 1) { + return (flyteidl.core.Tasks.K8sPod) podValue_; + } + return flyteidl.core.Tasks.K8sPod.getDefaultInstance(); + } + } + /** + * .flyteidl.core.K8sPod pod = 1; + */ + private com.google.protobuf.SingleFieldBuilderV3< + flyteidl.core.Tasks.K8sPod, flyteidl.core.Tasks.K8sPod.Builder, flyteidl.core.Tasks.K8sPodOrBuilder> + getPodFieldBuilder() { + if (podBuilder_ == null) { + if (!(podValueCase_ == 1)) { + podValue_ = flyteidl.core.Tasks.K8sPod.getDefaultInstance(); + } + podBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + flyteidl.core.Tasks.K8sPod, flyteidl.core.Tasks.K8sPod.Builder, flyteidl.core.Tasks.K8sPodOrBuilder>( + (flyteidl.core.Tasks.K8sPod) podValue_, + getParentForChildren(), + isClean()); + podValue_ = null; + } + podValueCase_ = 1; + onChanged();; + return podBuilder_; + } + + private java.lang.Object podTemplateName_ = ""; + /** + *
+       * Reference to an existing PodTemplate k8s resource to be used as the base configuration when creating a
+       * Pod for this role. If this value is set, the specified PodTemplate will be used instead of, but applied
+       * identically as, the default PodTemplate configured in FlytePropeller.
+       * +optional
+       * 
+ * + * string pod_template_name = 2; + */ + public java.lang.String getPodTemplateName() { + java.lang.Object ref = podTemplateName_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + podTemplateName_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+       * Reference to an existing PodTemplate k8s resource to be used as the base configuration when creating a
+       * Pod for this role. If this value is set, the specified PodTemplate will be used instead of, but applied
+       * identically as, the default PodTemplate configured in FlytePropeller.
+       * +optional
+       * 
+ * + * string pod_template_name = 2; + */ + public com.google.protobuf.ByteString + getPodTemplateNameBytes() { + java.lang.Object ref = podTemplateName_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + podTemplateName_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+       * Reference to an existing PodTemplate k8s resource to be used as the base configuration when creating a
+       * Pod for this role. If this value is set, the specified PodTemplate will be used instead of, but applied
+       * identically as, the default PodTemplate configured in FlytePropeller.
+       * +optional
+       * 
+ * + * string pod_template_name = 2; + */ + public Builder setPodTemplateName( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + podTemplateName_ = value; + onChanged(); + return this; + } + /** + *
+       * Reference to an existing PodTemplate k8s resource to be used as the base configuration when creating a
+       * Pod for this role. If this value is set, the specified PodTemplate will be used instead of, but applied
+       * identically as, the default PodTemplate configured in FlytePropeller.
+       * +optional
+       * 
+ * + * string pod_template_name = 2; + */ + public Builder clearPodTemplateName() { + + podTemplateName_ = getDefaultInstance().getPodTemplateName(); + onChanged(); + return this; + } + /** + *
+       * Reference to an existing PodTemplate k8s resource to be used as the base configuration when creating a
+       * Pod for this role. If this value is set, the specified PodTemplate will be used instead of, but applied
+       * identically as, the default PodTemplate configured in FlytePropeller.
+       * +optional
+       * 
+ * + * string pod_template_name = 2; + */ + public Builder setPodTemplateNameBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + podTemplateName_ = value; + onChanged(); + return this; + } + @java.lang.Override + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:flyteidl.plugins.RoleSpec) + } + + // @@protoc_insertion_point(class_scope:flyteidl.plugins.RoleSpec) + private static final flyteidl.plugins.Common.RoleSpec DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new flyteidl.plugins.Common.RoleSpec(); + } + + public static flyteidl.plugins.Common.RoleSpec getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @java.lang.Override + public RoleSpec parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new RoleSpec(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public flyteidl.plugins.Common.RoleSpec getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_flyteidl_plugins_RoleSpec_descriptor; + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_flyteidl_plugins_RoleSpec_fieldAccessorTable; + + public static com.google.protobuf.Descriptors.FileDescriptor + getDescriptor() { + return descriptor; + } + private static com.google.protobuf.Descriptors.FileDescriptor + descriptor; + static { + java.lang.String[] descriptorData = { + "\n\035flyteidl/plugins/common.proto\022\020flyteid" + + "l.plugins\032\031flyteidl/core/tasks.proto\"X\n\010" + + "RoleSpec\022$\n\003pod\030\001 \001(\0132\025.flyteidl.core.K8" + + "sPodH\000\022\031\n\021pod_template_name\030\002 \001(\tB\013\n\tpod" + + "_valueB?Z=github.com/flyteorg/flyte/flyt" + + "eidl/gen/pb-go/flyteidl/pluginsb\006proto3" + }; + com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = + new com.google.protobuf.Descriptors.FileDescriptor. InternalDescriptorAssigner() { + public com.google.protobuf.ExtensionRegistry assignDescriptors( + com.google.protobuf.Descriptors.FileDescriptor root) { + descriptor = root; + return null; + } + }; + com.google.protobuf.Descriptors.FileDescriptor + .internalBuildGeneratedFileFrom(descriptorData, + new com.google.protobuf.Descriptors.FileDescriptor[] { + flyteidl.core.Tasks.getDescriptor(), + }, assigner); + internal_static_flyteidl_plugins_RoleSpec_descriptor = + getDescriptor().getMessageTypes().get(0); + internal_static_flyteidl_plugins_RoleSpec_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_flyteidl_plugins_RoleSpec_descriptor, + new java.lang.String[] { "Pod", "PodTemplateName", "PodValue", }); + flyteidl.core.Tasks.getDescriptor(); + } + + // @@protoc_insertion_point(outer_class_scope) +} diff --git a/flyteidl/gen/pb-java/flyteidl/plugins/Spark.java b/flyteidl/gen/pb-java/flyteidl/plugins/Spark.java index e1afaeb7d9..0c97d3b979 100644 --- a/flyteidl/gen/pb-java/flyteidl/plugins/Spark.java +++ b/flyteidl/gen/pb-java/flyteidl/plugins/Spark.java @@ -729,82 +729,54 @@ java.lang.String getHadoopConfOrThrow( getDatabricksInstanceBytes(); /** - * .flyteidl.core.K8sPod driverPod = 10; - */ - boolean hasDriverPod(); - /** - * .flyteidl.core.K8sPod driverPod = 10; - */ - flyteidl.core.Tasks.K8sPod getDriverPod(); - /** - * .flyteidl.core.K8sPod driverPod = 10; + *
+     * The driver spec, used in place of the task's pod template
+     * 
+ * + * .flyteidl.plugins.RoleSpec driverSpec = 10; */ - flyteidl.core.Tasks.K8sPodOrBuilder getDriverPodOrBuilder(); - + boolean hasDriverSpec(); /** *
-     * Reference to an existing PodTemplate k8s resource to be used as the base configuration when creating the 
-     * driver Pod for this task. If this value is set, the specified PodTemplate will be used instead of, but applied
-     * identically as, the default PodTemplate configured in FlytePropeller.
-     * +optional
+     * The driver spec, used in place of the task's pod template
      * 
* - * string driverPodTemplateName = 11; + * .flyteidl.plugins.RoleSpec driverSpec = 10; */ - java.lang.String getDriverPodTemplateName(); + flyteidl.plugins.Common.RoleSpec getDriverSpec(); /** *
-     * Reference to an existing PodTemplate k8s resource to be used as the base configuration when creating the 
-     * driver Pod for this task. If this value is set, the specified PodTemplate will be used instead of, but applied
-     * identically as, the default PodTemplate configured in FlytePropeller.
-     * +optional
+     * The driver spec, used in place of the task's pod template
      * 
* - * string driverPodTemplateName = 11; + * .flyteidl.plugins.RoleSpec driverSpec = 10; */ - com.google.protobuf.ByteString - getDriverPodTemplateNameBytes(); + flyteidl.plugins.Common.RoleSpecOrBuilder getDriverSpecOrBuilder(); /** - * .flyteidl.core.K8sPod executorPod = 12; - */ - boolean hasExecutorPod(); - /** - * .flyteidl.core.K8sPod executorPod = 12; - */ - flyteidl.core.Tasks.K8sPod getExecutorPod(); - /** - * .flyteidl.core.K8sPod executorPod = 12; + *
+     * The executor spec, used in place of the task's pod template
+     * 
+ * + * .flyteidl.plugins.RoleSpec executorSpec = 11; */ - flyteidl.core.Tasks.K8sPodOrBuilder getExecutorPodOrBuilder(); - + boolean hasExecutorSpec(); /** *
-     * Reference to an existing PodTemplate k8s resource to be used as the base configuration when creating the 
-     * executor Pods for this task. If this value is set, the specified PodTemplate will be used instead of, but applied
-     * identically as, the default PodTemplate configured in FlytePropeller.
-     * +optional
+     * The executor spec, used in place of the task's pod template
      * 
* - * string executorPodTemplateName = 13; + * .flyteidl.plugins.RoleSpec executorSpec = 11; */ - java.lang.String getExecutorPodTemplateName(); + flyteidl.plugins.Common.RoleSpec getExecutorSpec(); /** *
-     * Reference to an existing PodTemplate k8s resource to be used as the base configuration when creating the 
-     * executor Pods for this task. If this value is set, the specified PodTemplate will be used instead of, but applied
-     * identically as, the default PodTemplate configured in FlytePropeller.
-     * +optional
+     * The executor spec, used in place of the task's pod template
      * 
* - * string executorPodTemplateName = 13; + * .flyteidl.plugins.RoleSpec executorSpec = 11; */ - com.google.protobuf.ByteString - getExecutorPodTemplateNameBytes(); - - public flyteidl.plugins.Spark.SparkJob.DriverPodValueCase getDriverPodValueCase(); - - public flyteidl.plugins.Spark.SparkJob.ExecutorPodValueCase getExecutorPodValueCase(); + flyteidl.plugins.Common.RoleSpecOrBuilder getExecutorSpecOrBuilder(); } /** *
@@ -829,8 +801,6 @@ private SparkJob() {
       executorPath_ = "";
       databricksToken_ = "";
       databricksInstance_ = "";
-      driverPodTemplateName_ = "";
-      executorPodTemplateName_ = "";
     }
 
     @java.lang.Override
@@ -933,43 +903,29 @@ private SparkJob(
               break;
             }
             case 82: {
-              flyteidl.core.Tasks.K8sPod.Builder subBuilder = null;
-              if (driverPodValueCase_ == 10) {
-                subBuilder = ((flyteidl.core.Tasks.K8sPod) driverPodValue_).toBuilder();
+              flyteidl.plugins.Common.RoleSpec.Builder subBuilder = null;
+              if (driverSpec_ != null) {
+                subBuilder = driverSpec_.toBuilder();
               }
-              driverPodValue_ =
-                  input.readMessage(flyteidl.core.Tasks.K8sPod.parser(), extensionRegistry);
+              driverSpec_ = input.readMessage(flyteidl.plugins.Common.RoleSpec.parser(), extensionRegistry);
               if (subBuilder != null) {
-                subBuilder.mergeFrom((flyteidl.core.Tasks.K8sPod) driverPodValue_);
-                driverPodValue_ = subBuilder.buildPartial();
+                subBuilder.mergeFrom(driverSpec_);
+                driverSpec_ = subBuilder.buildPartial();
               }
-              driverPodValueCase_ = 10;
-              break;
-            }
-            case 90: {
-              java.lang.String s = input.readStringRequireUtf8();
 
-              driverPodTemplateName_ = s;
               break;
             }
-            case 98: {
-              flyteidl.core.Tasks.K8sPod.Builder subBuilder = null;
-              if (executorPodValueCase_ == 12) {
-                subBuilder = ((flyteidl.core.Tasks.K8sPod) executorPodValue_).toBuilder();
+            case 90: {
+              flyteidl.plugins.Common.RoleSpec.Builder subBuilder = null;
+              if (executorSpec_ != null) {
+                subBuilder = executorSpec_.toBuilder();
               }
-              executorPodValue_ =
-                  input.readMessage(flyteidl.core.Tasks.K8sPod.parser(), extensionRegistry);
+              executorSpec_ = input.readMessage(flyteidl.plugins.Common.RoleSpec.parser(), extensionRegistry);
               if (subBuilder != null) {
-                subBuilder.mergeFrom((flyteidl.core.Tasks.K8sPod) executorPodValue_);
-                executorPodValue_ = subBuilder.buildPartial();
+                subBuilder.mergeFrom(executorSpec_);
+                executorSpec_ = subBuilder.buildPartial();
               }
-              executorPodValueCase_ = 12;
-              break;
-            }
-            case 106: {
-              java.lang.String s = input.readStringRequireUtf8();
 
-              executorPodTemplateName_ = s;
               break;
             }
             default: {
@@ -1019,78 +975,6 @@ protected com.google.protobuf.MapField internalGetMapField(
     }
 
     private int bitField0_;
-    private int driverPodValueCase_ = 0;
-    private java.lang.Object driverPodValue_;
-    public enum DriverPodValueCase
-        implements com.google.protobuf.Internal.EnumLite {
-      DRIVERPOD(10),
-      DRIVERPODVALUE_NOT_SET(0);
-      private final int value;
-      private DriverPodValueCase(int value) {
-        this.value = value;
-      }
-      /**
-       * @deprecated Use {@link #forNumber(int)} instead.
-       */
-      @java.lang.Deprecated
-      public static DriverPodValueCase valueOf(int value) {
-        return forNumber(value);
-      }
-
-      public static DriverPodValueCase forNumber(int value) {
-        switch (value) {
-          case 10: return DRIVERPOD;
-          case 0: return DRIVERPODVALUE_NOT_SET;
-          default: return null;
-        }
-      }
-      public int getNumber() {
-        return this.value;
-      }
-    };
-
-    public DriverPodValueCase
-    getDriverPodValueCase() {
-      return DriverPodValueCase.forNumber(
-          driverPodValueCase_);
-    }
-
-    private int executorPodValueCase_ = 0;
-    private java.lang.Object executorPodValue_;
-    public enum ExecutorPodValueCase
-        implements com.google.protobuf.Internal.EnumLite {
-      EXECUTORPOD(12),
-      EXECUTORPODVALUE_NOT_SET(0);
-      private final int value;
-      private ExecutorPodValueCase(int value) {
-        this.value = value;
-      }
-      /**
-       * @deprecated Use {@link #forNumber(int)} instead.
-       */
-      @java.lang.Deprecated
-      public static ExecutorPodValueCase valueOf(int value) {
-        return forNumber(value);
-      }
-
-      public static ExecutorPodValueCase forNumber(int value) {
-        switch (value) {
-          case 12: return EXECUTORPOD;
-          case 0: return EXECUTORPODVALUE_NOT_SET;
-          default: return null;
-        }
-      }
-      public int getNumber() {
-        return this.value;
-      }
-    };
-
-    public ExecutorPodValueCase
-    getExecutorPodValueCase() {
-      return ExecutorPodValueCase.forNumber(
-          executorPodValueCase_);
-    }
-
     public static final int APPLICATIONTYPE_FIELD_NUMBER = 1;
     private int applicationType_;
     /**
@@ -1494,152 +1378,70 @@ public java.lang.String getDatabricksInstance() {
       }
     }
 
-    public static final int DRIVERPOD_FIELD_NUMBER = 10;
+    public static final int DRIVERSPEC_FIELD_NUMBER = 10;
+    private flyteidl.plugins.Common.RoleSpec driverSpec_;
     /**
-     * .flyteidl.core.K8sPod driverPod = 10;
-     */
-    public boolean hasDriverPod() {
-      return driverPodValueCase_ == 10;
-    }
-    /**
-     * .flyteidl.core.K8sPod driverPod = 10;
-     */
-    public flyteidl.core.Tasks.K8sPod getDriverPod() {
-      if (driverPodValueCase_ == 10) {
-         return (flyteidl.core.Tasks.K8sPod) driverPodValue_;
-      }
-      return flyteidl.core.Tasks.K8sPod.getDefaultInstance();
-    }
-    /**
-     * .flyteidl.core.K8sPod driverPod = 10;
+     * 
+     * The driver spec, used in place of the task's pod template
+     * 
+ * + * .flyteidl.plugins.RoleSpec driverSpec = 10; */ - public flyteidl.core.Tasks.K8sPodOrBuilder getDriverPodOrBuilder() { - if (driverPodValueCase_ == 10) { - return (flyteidl.core.Tasks.K8sPod) driverPodValue_; - } - return flyteidl.core.Tasks.K8sPod.getDefaultInstance(); + public boolean hasDriverSpec() { + return driverSpec_ != null; } - - public static final int DRIVERPODTEMPLATENAME_FIELD_NUMBER = 11; - private volatile java.lang.Object driverPodTemplateName_; /** *
-     * Reference to an existing PodTemplate k8s resource to be used as the base configuration when creating the 
-     * driver Pod for this task. If this value is set, the specified PodTemplate will be used instead of, but applied
-     * identically as, the default PodTemplate configured in FlytePropeller.
-     * +optional
+     * The driver spec, used in place of the task's pod template
      * 
* - * string driverPodTemplateName = 11; + * .flyteidl.plugins.RoleSpec driverSpec = 10; */ - public java.lang.String getDriverPodTemplateName() { - java.lang.Object ref = driverPodTemplateName_; - if (ref instanceof java.lang.String) { - return (java.lang.String) ref; - } else { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - driverPodTemplateName_ = s; - return s; - } + public flyteidl.plugins.Common.RoleSpec getDriverSpec() { + return driverSpec_ == null ? flyteidl.plugins.Common.RoleSpec.getDefaultInstance() : driverSpec_; } /** *
-     * Reference to an existing PodTemplate k8s resource to be used as the base configuration when creating the 
-     * driver Pod for this task. If this value is set, the specified PodTemplate will be used instead of, but applied
-     * identically as, the default PodTemplate configured in FlytePropeller.
-     * +optional
+     * The driver spec, used in place of the task's pod template
      * 
* - * string driverPodTemplateName = 11; + * .flyteidl.plugins.RoleSpec driverSpec = 10; */ - public com.google.protobuf.ByteString - getDriverPodTemplateNameBytes() { - java.lang.Object ref = driverPodTemplateName_; - if (ref instanceof java.lang.String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - driverPodTemplateName_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } + public flyteidl.plugins.Common.RoleSpecOrBuilder getDriverSpecOrBuilder() { + return getDriverSpec(); } - public static final int EXECUTORPOD_FIELD_NUMBER = 12; - /** - * .flyteidl.core.K8sPod executorPod = 12; - */ - public boolean hasExecutorPod() { - return executorPodValueCase_ == 12; - } - /** - * .flyteidl.core.K8sPod executorPod = 12; - */ - public flyteidl.core.Tasks.K8sPod getExecutorPod() { - if (executorPodValueCase_ == 12) { - return (flyteidl.core.Tasks.K8sPod) executorPodValue_; - } - return flyteidl.core.Tasks.K8sPod.getDefaultInstance(); - } + public static final int EXECUTORSPEC_FIELD_NUMBER = 11; + private flyteidl.plugins.Common.RoleSpec executorSpec_; /** - * .flyteidl.core.K8sPod executorPod = 12; + *
+     * The executor spec, used in place of the task's pod template
+     * 
+ * + * .flyteidl.plugins.RoleSpec executorSpec = 11; */ - public flyteidl.core.Tasks.K8sPodOrBuilder getExecutorPodOrBuilder() { - if (executorPodValueCase_ == 12) { - return (flyteidl.core.Tasks.K8sPod) executorPodValue_; - } - return flyteidl.core.Tasks.K8sPod.getDefaultInstance(); + public boolean hasExecutorSpec() { + return executorSpec_ != null; } - - public static final int EXECUTORPODTEMPLATENAME_FIELD_NUMBER = 13; - private volatile java.lang.Object executorPodTemplateName_; /** *
-     * Reference to an existing PodTemplate k8s resource to be used as the base configuration when creating the 
-     * executor Pods for this task. If this value is set, the specified PodTemplate will be used instead of, but applied
-     * identically as, the default PodTemplate configured in FlytePropeller.
-     * +optional
+     * The executor spec, used in place of the task's pod template
      * 
* - * string executorPodTemplateName = 13; + * .flyteidl.plugins.RoleSpec executorSpec = 11; */ - public java.lang.String getExecutorPodTemplateName() { - java.lang.Object ref = executorPodTemplateName_; - if (ref instanceof java.lang.String) { - return (java.lang.String) ref; - } else { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - executorPodTemplateName_ = s; - return s; - } + public flyteidl.plugins.Common.RoleSpec getExecutorSpec() { + return executorSpec_ == null ? flyteidl.plugins.Common.RoleSpec.getDefaultInstance() : executorSpec_; } /** *
-     * Reference to an existing PodTemplate k8s resource to be used as the base configuration when creating the 
-     * executor Pods for this task. If this value is set, the specified PodTemplate will be used instead of, but applied
-     * identically as, the default PodTemplate configured in FlytePropeller.
-     * +optional
+     * The executor spec, used in place of the task's pod template
      * 
* - * string executorPodTemplateName = 13; + * .flyteidl.plugins.RoleSpec executorSpec = 11; */ - public com.google.protobuf.ByteString - getExecutorPodTemplateNameBytes() { - java.lang.Object ref = executorPodTemplateName_; - if (ref instanceof java.lang.String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - executorPodTemplateName_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } + public flyteidl.plugins.Common.RoleSpecOrBuilder getExecutorSpecOrBuilder() { + return getExecutorSpec(); } private byte memoizedIsInitialized = -1; @@ -1689,17 +1491,11 @@ public void writeTo(com.google.protobuf.CodedOutputStream output) if (!getDatabricksInstanceBytes().isEmpty()) { com.google.protobuf.GeneratedMessageV3.writeString(output, 9, databricksInstance_); } - if (driverPodValueCase_ == 10) { - output.writeMessage(10, (flyteidl.core.Tasks.K8sPod) driverPodValue_); + if (driverSpec_ != null) { + output.writeMessage(10, getDriverSpec()); } - if (!getDriverPodTemplateNameBytes().isEmpty()) { - com.google.protobuf.GeneratedMessageV3.writeString(output, 11, driverPodTemplateName_); - } - if (executorPodValueCase_ == 12) { - output.writeMessage(12, (flyteidl.core.Tasks.K8sPod) executorPodValue_); - } - if (!getExecutorPodTemplateNameBytes().isEmpty()) { - com.google.protobuf.GeneratedMessageV3.writeString(output, 13, executorPodTemplateName_); + if (executorSpec_ != null) { + output.writeMessage(11, getExecutorSpec()); } unknownFields.writeTo(output); } @@ -1753,19 +1549,13 @@ public int getSerializedSize() { if (!getDatabricksInstanceBytes().isEmpty()) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(9, databricksInstance_); } - if (driverPodValueCase_ == 10) { + if (driverSpec_ != null) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(10, (flyteidl.core.Tasks.K8sPod) driverPodValue_); - } - if (!getDriverPodTemplateNameBytes().isEmpty()) { - size += com.google.protobuf.GeneratedMessageV3.computeStringSize(11, driverPodTemplateName_); + .computeMessageSize(10, getDriverSpec()); } - if (executorPodValueCase_ == 12) { + if (executorSpec_ != null) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(12, (flyteidl.core.Tasks.K8sPod) executorPodValue_); - } - if (!getExecutorPodTemplateNameBytes().isEmpty()) { - size += com.google.protobuf.GeneratedMessageV3.computeStringSize(13, executorPodTemplateName_); + .computeMessageSize(11, getExecutorSpec()); } size += unknownFields.getSerializedSize(); memoizedSize = size; @@ -1802,27 +1592,15 @@ public boolean equals(final java.lang.Object obj) { .equals(other.getDatabricksToken())) return false; if (!getDatabricksInstance() .equals(other.getDatabricksInstance())) return false; - if (!getDriverPodTemplateName() - .equals(other.getDriverPodTemplateName())) return false; - if (!getExecutorPodTemplateName() - .equals(other.getExecutorPodTemplateName())) return false; - if (!getDriverPodValueCase().equals(other.getDriverPodValueCase())) return false; - switch (driverPodValueCase_) { - case 10: - if (!getDriverPod() - .equals(other.getDriverPod())) return false; - break; - case 0: - default: + if (hasDriverSpec() != other.hasDriverSpec()) return false; + if (hasDriverSpec()) { + if (!getDriverSpec() + .equals(other.getDriverSpec())) return false; } - if (!getExecutorPodValueCase().equals(other.getExecutorPodValueCase())) return false; - switch (executorPodValueCase_) { - case 12: - if (!getExecutorPod() - .equals(other.getExecutorPod())) return false; - break; - case 0: - default: + if (hasExecutorSpec() != other.hasExecutorSpec()) return false; + if (hasExecutorSpec()) { + if (!getExecutorSpec() + .equals(other.getExecutorSpec())) return false; } if (!unknownFields.equals(other.unknownFields)) return false; return true; @@ -1859,25 +1637,13 @@ public int hashCode() { hash = (53 * hash) + getDatabricksToken().hashCode(); hash = (37 * hash) + DATABRICKSINSTANCE_FIELD_NUMBER; hash = (53 * hash) + getDatabricksInstance().hashCode(); - hash = (37 * hash) + DRIVERPODTEMPLATENAME_FIELD_NUMBER; - hash = (53 * hash) + getDriverPodTemplateName().hashCode(); - hash = (37 * hash) + EXECUTORPODTEMPLATENAME_FIELD_NUMBER; - hash = (53 * hash) + getExecutorPodTemplateName().hashCode(); - switch (driverPodValueCase_) { - case 10: - hash = (37 * hash) + DRIVERPOD_FIELD_NUMBER; - hash = (53 * hash) + getDriverPod().hashCode(); - break; - case 0: - default: + if (hasDriverSpec()) { + hash = (37 * hash) + DRIVERSPEC_FIELD_NUMBER; + hash = (53 * hash) + getDriverSpec().hashCode(); } - switch (executorPodValueCase_) { - case 12: - hash = (37 * hash) + EXECUTORPOD_FIELD_NUMBER; - hash = (53 * hash) + getExecutorPod().hashCode(); - break; - case 0: - default: + if (hasExecutorSpec()) { + hash = (37 * hash) + EXECUTORSPEC_FIELD_NUMBER; + hash = (53 * hash) + getExecutorSpec().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; @@ -2062,14 +1828,18 @@ public Builder clear() { databricksInstance_ = ""; - driverPodTemplateName_ = ""; - - executorPodTemplateName_ = ""; - - driverPodValueCase_ = 0; - driverPodValue_ = null; - executorPodValueCase_ = 0; - executorPodValue_ = null; + if (driverSpecBuilder_ == null) { + driverSpec_ = null; + } else { + driverSpec_ = null; + driverSpecBuilder_ = null; + } + if (executorSpecBuilder_ == null) { + executorSpec_ = null; + } else { + executorSpec_ = null; + executorSpecBuilder_ = null; + } return this; } @@ -2113,25 +1883,17 @@ public flyteidl.plugins.Spark.SparkJob buildPartial() { } result.databricksToken_ = databricksToken_; result.databricksInstance_ = databricksInstance_; - if (driverPodValueCase_ == 10) { - if (driverPodBuilder_ == null) { - result.driverPodValue_ = driverPodValue_; - } else { - result.driverPodValue_ = driverPodBuilder_.build(); - } + if (driverSpecBuilder_ == null) { + result.driverSpec_ = driverSpec_; + } else { + result.driverSpec_ = driverSpecBuilder_.build(); } - result.driverPodTemplateName_ = driverPodTemplateName_; - if (executorPodValueCase_ == 12) { - if (executorPodBuilder_ == null) { - result.executorPodValue_ = executorPodValue_; - } else { - result.executorPodValue_ = executorPodBuilder_.build(); - } + if (executorSpecBuilder_ == null) { + result.executorSpec_ = executorSpec_; + } else { + result.executorSpec_ = executorSpecBuilder_.build(); } - result.executorPodTemplateName_ = executorPodTemplateName_; result.bitField0_ = to_bitField0_; - result.driverPodValueCase_ = driverPodValueCase_; - result.executorPodValueCase_ = executorPodValueCase_; onBuilt(); return result; } @@ -2210,31 +1972,11 @@ public Builder mergeFrom(flyteidl.plugins.Spark.SparkJob other) { databricksInstance_ = other.databricksInstance_; onChanged(); } - if (!other.getDriverPodTemplateName().isEmpty()) { - driverPodTemplateName_ = other.driverPodTemplateName_; - onChanged(); - } - if (!other.getExecutorPodTemplateName().isEmpty()) { - executorPodTemplateName_ = other.executorPodTemplateName_; - onChanged(); + if (other.hasDriverSpec()) { + mergeDriverSpec(other.getDriverSpec()); } - switch (other.getDriverPodValueCase()) { - case DRIVERPOD: { - mergeDriverPod(other.getDriverPod()); - break; - } - case DRIVERPODVALUE_NOT_SET: { - break; - } - } - switch (other.getExecutorPodValueCase()) { - case EXECUTORPOD: { - mergeExecutorPod(other.getExecutorPod()); - break; - } - case EXECUTORPODVALUE_NOT_SET: { - break; - } + if (other.hasExecutorSpec()) { + mergeExecutorSpec(other.getExecutorSpec()); } this.mergeUnknownFields(other.unknownFields); onChanged(); @@ -2264,36 +2006,6 @@ public Builder mergeFrom( } return this; } - private int driverPodValueCase_ = 0; - private java.lang.Object driverPodValue_; - public DriverPodValueCase - getDriverPodValueCase() { - return DriverPodValueCase.forNumber( - driverPodValueCase_); - } - - public Builder clearDriverPodValue() { - driverPodValueCase_ = 0; - driverPodValue_ = null; - onChanged(); - return this; - } - - private int executorPodValueCase_ = 0; - private java.lang.Object executorPodValue_; - public ExecutorPodValueCase - getExecutorPodValueCase() { - return ExecutorPodValueCase.forNumber( - executorPodValueCase_); - } - - public Builder clearExecutorPodValue() { - executorPodValueCase_ = 0; - executorPodValue_ = null; - onChanged(); - return this; - } - private int bitField0_; private int applicationType_ = 0; @@ -3164,484 +2876,310 @@ public Builder setDatabricksInstanceBytes( return this; } + private flyteidl.plugins.Common.RoleSpec driverSpec_; private com.google.protobuf.SingleFieldBuilderV3< - flyteidl.core.Tasks.K8sPod, flyteidl.core.Tasks.K8sPod.Builder, flyteidl.core.Tasks.K8sPodOrBuilder> driverPodBuilder_; + flyteidl.plugins.Common.RoleSpec, flyteidl.plugins.Common.RoleSpec.Builder, flyteidl.plugins.Common.RoleSpecOrBuilder> driverSpecBuilder_; /** - * .flyteidl.core.K8sPod driverPod = 10; + *
+       * The driver spec, used in place of the task's pod template
+       * 
+ * + * .flyteidl.plugins.RoleSpec driverSpec = 10; */ - public boolean hasDriverPod() { - return driverPodValueCase_ == 10; + public boolean hasDriverSpec() { + return driverSpecBuilder_ != null || driverSpec_ != null; } /** - * .flyteidl.core.K8sPod driverPod = 10; + *
+       * The driver spec, used in place of the task's pod template
+       * 
+ * + * .flyteidl.plugins.RoleSpec driverSpec = 10; */ - public flyteidl.core.Tasks.K8sPod getDriverPod() { - if (driverPodBuilder_ == null) { - if (driverPodValueCase_ == 10) { - return (flyteidl.core.Tasks.K8sPod) driverPodValue_; - } - return flyteidl.core.Tasks.K8sPod.getDefaultInstance(); + public flyteidl.plugins.Common.RoleSpec getDriverSpec() { + if (driverSpecBuilder_ == null) { + return driverSpec_ == null ? flyteidl.plugins.Common.RoleSpec.getDefaultInstance() : driverSpec_; } else { - if (driverPodValueCase_ == 10) { - return driverPodBuilder_.getMessage(); - } - return flyteidl.core.Tasks.K8sPod.getDefaultInstance(); + return driverSpecBuilder_.getMessage(); } } /** - * .flyteidl.core.K8sPod driverPod = 10; + *
+       * The driver spec, used in place of the task's pod template
+       * 
+ * + * .flyteidl.plugins.RoleSpec driverSpec = 10; */ - public Builder setDriverPod(flyteidl.core.Tasks.K8sPod value) { - if (driverPodBuilder_ == null) { + public Builder setDriverSpec(flyteidl.plugins.Common.RoleSpec value) { + if (driverSpecBuilder_ == null) { if (value == null) { throw new NullPointerException(); } - driverPodValue_ = value; + driverSpec_ = value; onChanged(); } else { - driverPodBuilder_.setMessage(value); + driverSpecBuilder_.setMessage(value); } - driverPodValueCase_ = 10; + return this; } /** - * .flyteidl.core.K8sPod driverPod = 10; + *
+       * The driver spec, used in place of the task's pod template
+       * 
+ * + * .flyteidl.plugins.RoleSpec driverSpec = 10; */ - public Builder setDriverPod( - flyteidl.core.Tasks.K8sPod.Builder builderForValue) { - if (driverPodBuilder_ == null) { - driverPodValue_ = builderForValue.build(); + public Builder setDriverSpec( + flyteidl.plugins.Common.RoleSpec.Builder builderForValue) { + if (driverSpecBuilder_ == null) { + driverSpec_ = builderForValue.build(); onChanged(); } else { - driverPodBuilder_.setMessage(builderForValue.build()); + driverSpecBuilder_.setMessage(builderForValue.build()); } - driverPodValueCase_ = 10; + return this; } /** - * .flyteidl.core.K8sPod driverPod = 10; + *
+       * The driver spec, used in place of the task's pod template
+       * 
+ * + * .flyteidl.plugins.RoleSpec driverSpec = 10; */ - public Builder mergeDriverPod(flyteidl.core.Tasks.K8sPod value) { - if (driverPodBuilder_ == null) { - if (driverPodValueCase_ == 10 && - driverPodValue_ != flyteidl.core.Tasks.K8sPod.getDefaultInstance()) { - driverPodValue_ = flyteidl.core.Tasks.K8sPod.newBuilder((flyteidl.core.Tasks.K8sPod) driverPodValue_) - .mergeFrom(value).buildPartial(); + public Builder mergeDriverSpec(flyteidl.plugins.Common.RoleSpec value) { + if (driverSpecBuilder_ == null) { + if (driverSpec_ != null) { + driverSpec_ = + flyteidl.plugins.Common.RoleSpec.newBuilder(driverSpec_).mergeFrom(value).buildPartial(); } else { - driverPodValue_ = value; + driverSpec_ = value; } onChanged(); } else { - if (driverPodValueCase_ == 10) { - driverPodBuilder_.mergeFrom(value); - } - driverPodBuilder_.setMessage(value); + driverSpecBuilder_.mergeFrom(value); } - driverPodValueCase_ = 10; + return this; } /** - * .flyteidl.core.K8sPod driverPod = 10; + *
+       * The driver spec, used in place of the task's pod template
+       * 
+ * + * .flyteidl.plugins.RoleSpec driverSpec = 10; */ - public Builder clearDriverPod() { - if (driverPodBuilder_ == null) { - if (driverPodValueCase_ == 10) { - driverPodValueCase_ = 0; - driverPodValue_ = null; - onChanged(); - } + public Builder clearDriverSpec() { + if (driverSpecBuilder_ == null) { + driverSpec_ = null; + onChanged(); } else { - if (driverPodValueCase_ == 10) { - driverPodValueCase_ = 0; - driverPodValue_ = null; - } - driverPodBuilder_.clear(); + driverSpec_ = null; + driverSpecBuilder_ = null; } + return this; } - /** - * .flyteidl.core.K8sPod driverPod = 10; - */ - public flyteidl.core.Tasks.K8sPod.Builder getDriverPodBuilder() { - return getDriverPodFieldBuilder().getBuilder(); - } - /** - * .flyteidl.core.K8sPod driverPod = 10; - */ - public flyteidl.core.Tasks.K8sPodOrBuilder getDriverPodOrBuilder() { - if ((driverPodValueCase_ == 10) && (driverPodBuilder_ != null)) { - return driverPodBuilder_.getMessageOrBuilder(); - } else { - if (driverPodValueCase_ == 10) { - return (flyteidl.core.Tasks.K8sPod) driverPodValue_; - } - return flyteidl.core.Tasks.K8sPod.getDefaultInstance(); - } - } - /** - * .flyteidl.core.K8sPod driverPod = 10; - */ - private com.google.protobuf.SingleFieldBuilderV3< - flyteidl.core.Tasks.K8sPod, flyteidl.core.Tasks.K8sPod.Builder, flyteidl.core.Tasks.K8sPodOrBuilder> - getDriverPodFieldBuilder() { - if (driverPodBuilder_ == null) { - if (!(driverPodValueCase_ == 10)) { - driverPodValue_ = flyteidl.core.Tasks.K8sPod.getDefaultInstance(); - } - driverPodBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< - flyteidl.core.Tasks.K8sPod, flyteidl.core.Tasks.K8sPod.Builder, flyteidl.core.Tasks.K8sPodOrBuilder>( - (flyteidl.core.Tasks.K8sPod) driverPodValue_, - getParentForChildren(), - isClean()); - driverPodValue_ = null; - } - driverPodValueCase_ = 10; - onChanged();; - return driverPodBuilder_; - } - - private java.lang.Object driverPodTemplateName_ = ""; /** *
-       * Reference to an existing PodTemplate k8s resource to be used as the base configuration when creating the 
-       * driver Pod for this task. If this value is set, the specified PodTemplate will be used instead of, but applied
-       * identically as, the default PodTemplate configured in FlytePropeller.
-       * +optional
+       * The driver spec, used in place of the task's pod template
        * 
* - * string driverPodTemplateName = 11; + * .flyteidl.plugins.RoleSpec driverSpec = 10; */ - public java.lang.String getDriverPodTemplateName() { - java.lang.Object ref = driverPodTemplateName_; - if (!(ref instanceof java.lang.String)) { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - driverPodTemplateName_ = s; - return s; - } else { - return (java.lang.String) ref; - } + public flyteidl.plugins.Common.RoleSpec.Builder getDriverSpecBuilder() { + + onChanged(); + return getDriverSpecFieldBuilder().getBuilder(); } /** *
-       * Reference to an existing PodTemplate k8s resource to be used as the base configuration when creating the 
-       * driver Pod for this task. If this value is set, the specified PodTemplate will be used instead of, but applied
-       * identically as, the default PodTemplate configured in FlytePropeller.
-       * +optional
+       * The driver spec, used in place of the task's pod template
        * 
* - * string driverPodTemplateName = 11; + * .flyteidl.plugins.RoleSpec driverSpec = 10; */ - public com.google.protobuf.ByteString - getDriverPodTemplateNameBytes() { - java.lang.Object ref = driverPodTemplateName_; - if (ref instanceof String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - driverPodTemplateName_ = b; - return b; + public flyteidl.plugins.Common.RoleSpecOrBuilder getDriverSpecOrBuilder() { + if (driverSpecBuilder_ != null) { + return driverSpecBuilder_.getMessageOrBuilder(); } else { - return (com.google.protobuf.ByteString) ref; + return driverSpec_ == null ? + flyteidl.plugins.Common.RoleSpec.getDefaultInstance() : driverSpec_; } } /** *
-       * Reference to an existing PodTemplate k8s resource to be used as the base configuration when creating the 
-       * driver Pod for this task. If this value is set, the specified PodTemplate will be used instead of, but applied
-       * identically as, the default PodTemplate configured in FlytePropeller.
-       * +optional
+       * The driver spec, used in place of the task's pod template
        * 
* - * string driverPodTemplateName = 11; + * .flyteidl.plugins.RoleSpec driverSpec = 10; */ - public Builder setDriverPodTemplateName( - java.lang.String value) { - if (value == null) { - throw new NullPointerException(); - } - - driverPodTemplateName_ = value; - onChanged(); - return this; + private com.google.protobuf.SingleFieldBuilderV3< + flyteidl.plugins.Common.RoleSpec, flyteidl.plugins.Common.RoleSpec.Builder, flyteidl.plugins.Common.RoleSpecOrBuilder> + getDriverSpecFieldBuilder() { + if (driverSpecBuilder_ == null) { + driverSpecBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + flyteidl.plugins.Common.RoleSpec, flyteidl.plugins.Common.RoleSpec.Builder, flyteidl.plugins.Common.RoleSpecOrBuilder>( + getDriverSpec(), + getParentForChildren(), + isClean()); + driverSpec_ = null; + } + return driverSpecBuilder_; } + + private flyteidl.plugins.Common.RoleSpec executorSpec_; + private com.google.protobuf.SingleFieldBuilderV3< + flyteidl.plugins.Common.RoleSpec, flyteidl.plugins.Common.RoleSpec.Builder, flyteidl.plugins.Common.RoleSpecOrBuilder> executorSpecBuilder_; /** *
-       * Reference to an existing PodTemplate k8s resource to be used as the base configuration when creating the 
-       * driver Pod for this task. If this value is set, the specified PodTemplate will be used instead of, but applied
-       * identically as, the default PodTemplate configured in FlytePropeller.
-       * +optional
+       * The executor spec, used in place of the task's pod template
        * 
* - * string driverPodTemplateName = 11; + * .flyteidl.plugins.RoleSpec executorSpec = 11; */ - public Builder clearDriverPodTemplateName() { - - driverPodTemplateName_ = getDefaultInstance().getDriverPodTemplateName(); - onChanged(); - return this; + public boolean hasExecutorSpec() { + return executorSpecBuilder_ != null || executorSpec_ != null; } /** *
-       * Reference to an existing PodTemplate k8s resource to be used as the base configuration when creating the 
-       * driver Pod for this task. If this value is set, the specified PodTemplate will be used instead of, but applied
-       * identically as, the default PodTemplate configured in FlytePropeller.
-       * +optional
+       * The executor spec, used in place of the task's pod template
        * 
* - * string driverPodTemplateName = 11; - */ - public Builder setDriverPodTemplateNameBytes( - com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - checkByteStringIsUtf8(value); - - driverPodTemplateName_ = value; - onChanged(); - return this; - } - - private com.google.protobuf.SingleFieldBuilderV3< - flyteidl.core.Tasks.K8sPod, flyteidl.core.Tasks.K8sPod.Builder, flyteidl.core.Tasks.K8sPodOrBuilder> executorPodBuilder_; - /** - * .flyteidl.core.K8sPod executorPod = 12; + * .flyteidl.plugins.RoleSpec executorSpec = 11; */ - public boolean hasExecutorPod() { - return executorPodValueCase_ == 12; - } - /** - * .flyteidl.core.K8sPod executorPod = 12; - */ - public flyteidl.core.Tasks.K8sPod getExecutorPod() { - if (executorPodBuilder_ == null) { - if (executorPodValueCase_ == 12) { - return (flyteidl.core.Tasks.K8sPod) executorPodValue_; - } - return flyteidl.core.Tasks.K8sPod.getDefaultInstance(); + public flyteidl.plugins.Common.RoleSpec getExecutorSpec() { + if (executorSpecBuilder_ == null) { + return executorSpec_ == null ? flyteidl.plugins.Common.RoleSpec.getDefaultInstance() : executorSpec_; } else { - if (executorPodValueCase_ == 12) { - return executorPodBuilder_.getMessage(); - } - return flyteidl.core.Tasks.K8sPod.getDefaultInstance(); + return executorSpecBuilder_.getMessage(); } } /** - * .flyteidl.core.K8sPod executorPod = 12; + *
+       * The executor spec, used in place of the task's pod template
+       * 
+ * + * .flyteidl.plugins.RoleSpec executorSpec = 11; */ - public Builder setExecutorPod(flyteidl.core.Tasks.K8sPod value) { - if (executorPodBuilder_ == null) { + public Builder setExecutorSpec(flyteidl.plugins.Common.RoleSpec value) { + if (executorSpecBuilder_ == null) { if (value == null) { throw new NullPointerException(); } - executorPodValue_ = value; + executorSpec_ = value; onChanged(); } else { - executorPodBuilder_.setMessage(value); + executorSpecBuilder_.setMessage(value); } - executorPodValueCase_ = 12; + return this; } /** - * .flyteidl.core.K8sPod executorPod = 12; + *
+       * The executor spec, used in place of the task's pod template
+       * 
+ * + * .flyteidl.plugins.RoleSpec executorSpec = 11; */ - public Builder setExecutorPod( - flyteidl.core.Tasks.K8sPod.Builder builderForValue) { - if (executorPodBuilder_ == null) { - executorPodValue_ = builderForValue.build(); + public Builder setExecutorSpec( + flyteidl.plugins.Common.RoleSpec.Builder builderForValue) { + if (executorSpecBuilder_ == null) { + executorSpec_ = builderForValue.build(); onChanged(); } else { - executorPodBuilder_.setMessage(builderForValue.build()); + executorSpecBuilder_.setMessage(builderForValue.build()); } - executorPodValueCase_ = 12; + return this; } /** - * .flyteidl.core.K8sPod executorPod = 12; + *
+       * The executor spec, used in place of the task's pod template
+       * 
+ * + * .flyteidl.plugins.RoleSpec executorSpec = 11; */ - public Builder mergeExecutorPod(flyteidl.core.Tasks.K8sPod value) { - if (executorPodBuilder_ == null) { - if (executorPodValueCase_ == 12 && - executorPodValue_ != flyteidl.core.Tasks.K8sPod.getDefaultInstance()) { - executorPodValue_ = flyteidl.core.Tasks.K8sPod.newBuilder((flyteidl.core.Tasks.K8sPod) executorPodValue_) - .mergeFrom(value).buildPartial(); + public Builder mergeExecutorSpec(flyteidl.plugins.Common.RoleSpec value) { + if (executorSpecBuilder_ == null) { + if (executorSpec_ != null) { + executorSpec_ = + flyteidl.plugins.Common.RoleSpec.newBuilder(executorSpec_).mergeFrom(value).buildPartial(); } else { - executorPodValue_ = value; + executorSpec_ = value; } onChanged(); } else { - if (executorPodValueCase_ == 12) { - executorPodBuilder_.mergeFrom(value); - } - executorPodBuilder_.setMessage(value); - } - executorPodValueCase_ = 12; - return this; - } - /** - * .flyteidl.core.K8sPod executorPod = 12; - */ - public Builder clearExecutorPod() { - if (executorPodBuilder_ == null) { - if (executorPodValueCase_ == 12) { - executorPodValueCase_ = 0; - executorPodValue_ = null; - onChanged(); - } - } else { - if (executorPodValueCase_ == 12) { - executorPodValueCase_ = 0; - executorPodValue_ = null; - } - executorPodBuilder_.clear(); - } - return this; - } - /** - * .flyteidl.core.K8sPod executorPod = 12; - */ - public flyteidl.core.Tasks.K8sPod.Builder getExecutorPodBuilder() { - return getExecutorPodFieldBuilder().getBuilder(); - } - /** - * .flyteidl.core.K8sPod executorPod = 12; - */ - public flyteidl.core.Tasks.K8sPodOrBuilder getExecutorPodOrBuilder() { - if ((executorPodValueCase_ == 12) && (executorPodBuilder_ != null)) { - return executorPodBuilder_.getMessageOrBuilder(); - } else { - if (executorPodValueCase_ == 12) { - return (flyteidl.core.Tasks.K8sPod) executorPodValue_; - } - return flyteidl.core.Tasks.K8sPod.getDefaultInstance(); - } - } - /** - * .flyteidl.core.K8sPod executorPod = 12; - */ - private com.google.protobuf.SingleFieldBuilderV3< - flyteidl.core.Tasks.K8sPod, flyteidl.core.Tasks.K8sPod.Builder, flyteidl.core.Tasks.K8sPodOrBuilder> - getExecutorPodFieldBuilder() { - if (executorPodBuilder_ == null) { - if (!(executorPodValueCase_ == 12)) { - executorPodValue_ = flyteidl.core.Tasks.K8sPod.getDefaultInstance(); - } - executorPodBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< - flyteidl.core.Tasks.K8sPod, flyteidl.core.Tasks.K8sPod.Builder, flyteidl.core.Tasks.K8sPodOrBuilder>( - (flyteidl.core.Tasks.K8sPod) executorPodValue_, - getParentForChildren(), - isClean()); - executorPodValue_ = null; + executorSpecBuilder_.mergeFrom(value); } - executorPodValueCase_ = 12; - onChanged();; - return executorPodBuilder_; - } - private java.lang.Object executorPodTemplateName_ = ""; - /** - *
-       * Reference to an existing PodTemplate k8s resource to be used as the base configuration when creating the 
-       * executor Pods for this task. If this value is set, the specified PodTemplate will be used instead of, but applied
-       * identically as, the default PodTemplate configured in FlytePropeller.
-       * +optional
-       * 
- * - * string executorPodTemplateName = 13; - */ - public java.lang.String getExecutorPodTemplateName() { - java.lang.Object ref = executorPodTemplateName_; - if (!(ref instanceof java.lang.String)) { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - executorPodTemplateName_ = s; - return s; - } else { - return (java.lang.String) ref; - } + return this; } /** *
-       * Reference to an existing PodTemplate k8s resource to be used as the base configuration when creating the 
-       * executor Pods for this task. If this value is set, the specified PodTemplate will be used instead of, but applied
-       * identically as, the default PodTemplate configured in FlytePropeller.
-       * +optional
+       * The executor spec, used in place of the task's pod template
        * 
* - * string executorPodTemplateName = 13; + * .flyteidl.plugins.RoleSpec executorSpec = 11; */ - public com.google.protobuf.ByteString - getExecutorPodTemplateNameBytes() { - java.lang.Object ref = executorPodTemplateName_; - if (ref instanceof String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - executorPodTemplateName_ = b; - return b; + public Builder clearExecutorSpec() { + if (executorSpecBuilder_ == null) { + executorSpec_ = null; + onChanged(); } else { - return (com.google.protobuf.ByteString) ref; + executorSpec_ = null; + executorSpecBuilder_ = null; } + + return this; } /** *
-       * Reference to an existing PodTemplate k8s resource to be used as the base configuration when creating the 
-       * executor Pods for this task. If this value is set, the specified PodTemplate will be used instead of, but applied
-       * identically as, the default PodTemplate configured in FlytePropeller.
-       * +optional
+       * The executor spec, used in place of the task's pod template
        * 
* - * string executorPodTemplateName = 13; + * .flyteidl.plugins.RoleSpec executorSpec = 11; */ - public Builder setExecutorPodTemplateName( - java.lang.String value) { - if (value == null) { - throw new NullPointerException(); - } - - executorPodTemplateName_ = value; + public flyteidl.plugins.Common.RoleSpec.Builder getExecutorSpecBuilder() { + onChanged(); - return this; + return getExecutorSpecFieldBuilder().getBuilder(); } /** *
-       * Reference to an existing PodTemplate k8s resource to be used as the base configuration when creating the 
-       * executor Pods for this task. If this value is set, the specified PodTemplate will be used instead of, but applied
-       * identically as, the default PodTemplate configured in FlytePropeller.
-       * +optional
+       * The executor spec, used in place of the task's pod template
        * 
* - * string executorPodTemplateName = 13; + * .flyteidl.plugins.RoleSpec executorSpec = 11; */ - public Builder clearExecutorPodTemplateName() { - - executorPodTemplateName_ = getDefaultInstance().getExecutorPodTemplateName(); - onChanged(); - return this; + public flyteidl.plugins.Common.RoleSpecOrBuilder getExecutorSpecOrBuilder() { + if (executorSpecBuilder_ != null) { + return executorSpecBuilder_.getMessageOrBuilder(); + } else { + return executorSpec_ == null ? + flyteidl.plugins.Common.RoleSpec.getDefaultInstance() : executorSpec_; + } } /** *
-       * Reference to an existing PodTemplate k8s resource to be used as the base configuration when creating the 
-       * executor Pods for this task. If this value is set, the specified PodTemplate will be used instead of, but applied
-       * identically as, the default PodTemplate configured in FlytePropeller.
-       * +optional
+       * The executor spec, used in place of the task's pod template
        * 
* - * string executorPodTemplateName = 13; + * .flyteidl.plugins.RoleSpec executorSpec = 11; */ - public Builder setExecutorPodTemplateNameBytes( - com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - checkByteStringIsUtf8(value); - - executorPodTemplateName_ = value; - onChanged(); - return this; + private com.google.protobuf.SingleFieldBuilderV3< + flyteidl.plugins.Common.RoleSpec, flyteidl.plugins.Common.RoleSpec.Builder, flyteidl.plugins.Common.RoleSpecOrBuilder> + getExecutorSpecFieldBuilder() { + if (executorSpecBuilder_ == null) { + executorSpecBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + flyteidl.plugins.Common.RoleSpec, flyteidl.plugins.Common.RoleSpec.Builder, flyteidl.plugins.Common.RoleSpecOrBuilder>( + getExecutorSpec(), + getParentForChildren(), + isClean()); + executorSpec_ = null; + } + return executorSpecBuilder_; } @java.lang.Override public final Builder setUnknownFields( @@ -3726,28 +3264,26 @@ public flyteidl.plugins.Spark.SparkJob getDefaultInstanceForType() { static { java.lang.String[] descriptorData = { "\n\034flyteidl/plugins/spark.proto\022\020flyteidl" + - ".plugins\032\031flyteidl/core/tasks.proto\032\034goo" + - "gle/protobuf/struct.proto\"B\n\020SparkApplic" + - "ation\".\n\004Type\022\n\n\006PYTHON\020\000\022\010\n\004JAVA\020\001\022\t\n\005S" + - "CALA\020\002\022\005\n\001R\020\003\"\233\005\n\010SparkJob\022@\n\017applicatio" + - "nType\030\001 \001(\0162\'.flyteidl.plugins.SparkAppl" + - "ication.Type\022\033\n\023mainApplicationFile\030\002 \001(" + - "\t\022\021\n\tmainClass\030\003 \001(\t\022<\n\tsparkConf\030\004 \003(\0132" + - ").flyteidl.plugins.SparkJob.SparkConfEnt" + - "ry\022>\n\nhadoopConf\030\005 \003(\0132*.flyteidl.plugin" + - "s.SparkJob.HadoopConfEntry\022\024\n\014executorPa" + - "th\030\006 \001(\t\022/\n\016databricksConf\030\007 \001(\0132\027.googl" + - "e.protobuf.Struct\022\027\n\017databricksToken\030\010 \001" + - "(\t\022\032\n\022databricksInstance\030\t \001(\t\022*\n\tdriver" + - "Pod\030\n \001(\0132\025.flyteidl.core.K8sPodH\000\022\035\n\025dr" + - "iverPodTemplateName\030\013 \001(\t\022,\n\013executorPod" + - "\030\014 \001(\0132\025.flyteidl.core.K8sPodH\001\022\037\n\027execu" + - "torPodTemplateName\030\r \001(\t\0320\n\016SparkConfEnt" + - "ry\022\013\n\003key\030\001 \001(\t\022\r\n\005value\030\002 \001(\t:\0028\001\0321\n\017Ha" + - "doopConfEntry\022\013\n\003key\030\001 \001(\t\022\r\n\005value\030\002 \001(" + - "\t:\0028\001B\020\n\016driverPodValueB\022\n\020executorPodVa" + - "lueB?Z=github.com/flyteorg/flyte/flyteid" + - "l/gen/pb-go/flyteidl/pluginsb\006proto3" + ".plugins\032\035flyteidl/plugins/common.proto\032" + + "\034google/protobuf/struct.proto\"B\n\020SparkAp" + + "plication\".\n\004Type\022\n\n\006PYTHON\020\000\022\010\n\004JAVA\020\001\022" + + "\t\n\005SCALA\020\002\022\005\n\001R\020\003\"\275\004\n\010SparkJob\022@\n\017applic" + + "ationType\030\001 \001(\0162\'.flyteidl.plugins.Spark" + + "Application.Type\022\033\n\023mainApplicationFile\030" + + "\002 \001(\t\022\021\n\tmainClass\030\003 \001(\t\022<\n\tsparkConf\030\004 " + + "\003(\0132).flyteidl.plugins.SparkJob.SparkCon" + + "fEntry\022>\n\nhadoopConf\030\005 \003(\0132*.flyteidl.pl" + + "ugins.SparkJob.HadoopConfEntry\022\024\n\014execut" + + "orPath\030\006 \001(\t\022/\n\016databricksConf\030\007 \001(\0132\027.g" + + "oogle.protobuf.Struct\022\027\n\017databricksToken" + + "\030\010 \001(\t\022\032\n\022databricksInstance\030\t \001(\t\022.\n\ndr" + + "iverSpec\030\n \001(\0132\032.flyteidl.plugins.RoleSp" + + "ec\0220\n\014executorSpec\030\013 \001(\0132\032.flyteidl.plug" + + "ins.RoleSpec\0320\n\016SparkConfEntry\022\013\n\003key\030\001 " + + "\001(\t\022\r\n\005value\030\002 \001(\t:\0028\001\0321\n\017HadoopConfEntr" + + "y\022\013\n\003key\030\001 \001(\t\022\r\n\005value\030\002 \001(\t:\0028\001B?Z=git" + + "hub.com/flyteorg/flyte/flyteidl/gen/pb-g" + + "o/flyteidl/pluginsb\006proto3" }; com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = new com.google.protobuf.Descriptors.FileDescriptor. InternalDescriptorAssigner() { @@ -3760,7 +3296,7 @@ public com.google.protobuf.ExtensionRegistry assignDescriptors( com.google.protobuf.Descriptors.FileDescriptor .internalBuildGeneratedFileFrom(descriptorData, new com.google.protobuf.Descriptors.FileDescriptor[] { - flyteidl.core.Tasks.getDescriptor(), + flyteidl.plugins.Common.getDescriptor(), com.google.protobuf.StructProto.getDescriptor(), }, assigner); internal_static_flyteidl_plugins_SparkApplication_descriptor = @@ -3774,7 +3310,7 @@ public com.google.protobuf.ExtensionRegistry assignDescriptors( internal_static_flyteidl_plugins_SparkJob_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_flyteidl_plugins_SparkJob_descriptor, - new java.lang.String[] { "ApplicationType", "MainApplicationFile", "MainClass", "SparkConf", "HadoopConf", "ExecutorPath", "DatabricksConf", "DatabricksToken", "DatabricksInstance", "DriverPod", "DriverPodTemplateName", "ExecutorPod", "ExecutorPodTemplateName", "DriverPodValue", "ExecutorPodValue", }); + new java.lang.String[] { "ApplicationType", "MainApplicationFile", "MainClass", "SparkConf", "HadoopConf", "ExecutorPath", "DatabricksConf", "DatabricksToken", "DatabricksInstance", "DriverSpec", "ExecutorSpec", }); internal_static_flyteidl_plugins_SparkJob_SparkConfEntry_descriptor = internal_static_flyteidl_plugins_SparkJob_descriptor.getNestedTypes().get(0); internal_static_flyteidl_plugins_SparkJob_SparkConfEntry_fieldAccessorTable = new @@ -3787,7 +3323,7 @@ public com.google.protobuf.ExtensionRegistry assignDescriptors( com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_flyteidl_plugins_SparkJob_HadoopConfEntry_descriptor, new java.lang.String[] { "Key", "Value", }); - flyteidl.core.Tasks.getDescriptor(); + flyteidl.plugins.Common.getDescriptor(); com.google.protobuf.StructProto.getDescriptor(); } diff --git a/flyteidl/gen/pb_python/flyteidl/plugins/common_pb2.py b/flyteidl/gen/pb_python/flyteidl/plugins/common_pb2.py new file mode 100644 index 0000000000..00e1af8b08 --- /dev/null +++ b/flyteidl/gen/pb_python/flyteidl/plugins/common_pb2.py @@ -0,0 +1,28 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: flyteidl/plugins/common.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from flyteidl.core import tasks_pb2 as flyteidl_dot_core_dot_tasks__pb2 + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1d\x66lyteidl/plugins/common.proto\x12\x10\x66lyteidl.plugins\x1a\x19\x66lyteidl/core/tasks.proto\"n\n\x08RoleSpec\x12)\n\x03pod\x18\x01 \x01(\x0b\x32\x15.flyteidl.core.K8sPodH\x00R\x03pod\x12*\n\x11pod_template_name\x18\x02 \x01(\tR\x0fpodTemplateNameB\x0b\n\tpod_valueB\xc3\x01\n\x14\x63om.flyteidl.pluginsB\x0b\x43ommonProtoP\x01Z=github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/plugins\xa2\x02\x03\x46PX\xaa\x02\x10\x46lyteidl.Plugins\xca\x02\x10\x46lyteidl\\Plugins\xe2\x02\x1c\x46lyteidl\\Plugins\\GPBMetadata\xea\x02\x11\x46lyteidl::Pluginsb\x06proto3') + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'flyteidl.plugins.common_pb2', _globals) +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + DESCRIPTOR._serialized_options = b'\n\024com.flyteidl.pluginsB\013CommonProtoP\001Z=github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/plugins\242\002\003FPX\252\002\020Flyteidl.Plugins\312\002\020Flyteidl\\Plugins\342\002\034Flyteidl\\Plugins\\GPBMetadata\352\002\021Flyteidl::Plugins' + _globals['_ROLESPEC']._serialized_start=78 + _globals['_ROLESPEC']._serialized_end=188 +# @@protoc_insertion_point(module_scope) diff --git a/flyteidl/gen/pb_python/flyteidl/plugins/common_pb2.pyi b/flyteidl/gen/pb_python/flyteidl/plugins/common_pb2.pyi new file mode 100644 index 0000000000..56c9ef0211 --- /dev/null +++ b/flyteidl/gen/pb_python/flyteidl/plugins/common_pb2.pyi @@ -0,0 +1,14 @@ +from flyteidl.core import tasks_pb2 as _tasks_pb2 +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from typing import ClassVar as _ClassVar, Mapping as _Mapping, Optional as _Optional, Union as _Union + +DESCRIPTOR: _descriptor.FileDescriptor + +class RoleSpec(_message.Message): + __slots__ = ["pod", "pod_template_name"] + POD_FIELD_NUMBER: _ClassVar[int] + POD_TEMPLATE_NAME_FIELD_NUMBER: _ClassVar[int] + pod: _tasks_pb2.K8sPod + pod_template_name: str + def __init__(self, pod: _Optional[_Union[_tasks_pb2.K8sPod, _Mapping]] = ..., pod_template_name: _Optional[str] = ...) -> None: ... diff --git a/flyteidl/gen/pb_python/flyteidl/plugins/common_pb2_grpc.py b/flyteidl/gen/pb_python/flyteidl/plugins/common_pb2_grpc.py new file mode 100644 index 0000000000..2daafffebf --- /dev/null +++ b/flyteidl/gen/pb_python/flyteidl/plugins/common_pb2_grpc.py @@ -0,0 +1,4 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" +import grpc + diff --git a/flyteidl/gen/pb_python/flyteidl/plugins/spark_pb2.py b/flyteidl/gen/pb_python/flyteidl/plugins/spark_pb2.py index 933435f09c..cee49c9494 100644 --- a/flyteidl/gen/pb_python/flyteidl/plugins/spark_pb2.py +++ b/flyteidl/gen/pb_python/flyteidl/plugins/spark_pb2.py @@ -11,11 +11,11 @@ _sym_db = _symbol_database.Default() -from flyteidl.core import tasks_pb2 as flyteidl_dot_core_dot_tasks__pb2 +from flyteidl.plugins import common_pb2 as flyteidl_dot_plugins_dot_common__pb2 from google.protobuf import struct_pb2 as google_dot_protobuf_dot_struct__pb2 -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1c\x66lyteidl/plugins/spark.proto\x12\x10\x66lyteidl.plugins\x1a\x19\x66lyteidl/core/tasks.proto\x1a\x1cgoogle/protobuf/struct.proto\"B\n\x10SparkApplication\".\n\x04Type\x12\n\n\x06PYTHON\x10\x00\x12\x08\n\x04JAVA\x10\x01\x12\t\n\x05SCALA\x10\x02\x12\x05\n\x01R\x10\x03\"\x86\x07\n\x08SparkJob\x12Q\n\x0f\x61pplicationType\x18\x01 \x01(\x0e\x32\'.flyteidl.plugins.SparkApplication.TypeR\x0f\x61pplicationType\x12\x30\n\x13mainApplicationFile\x18\x02 \x01(\tR\x13mainApplicationFile\x12\x1c\n\tmainClass\x18\x03 \x01(\tR\tmainClass\x12G\n\tsparkConf\x18\x04 \x03(\x0b\x32).flyteidl.plugins.SparkJob.SparkConfEntryR\tsparkConf\x12J\n\nhadoopConf\x18\x05 \x03(\x0b\x32*.flyteidl.plugins.SparkJob.HadoopConfEntryR\nhadoopConf\x12\"\n\x0c\x65xecutorPath\x18\x06 \x01(\tR\x0c\x65xecutorPath\x12?\n\x0e\x64\x61tabricksConf\x18\x07 \x01(\x0b\x32\x17.google.protobuf.StructR\x0e\x64\x61tabricksConf\x12(\n\x0f\x64\x61tabricksToken\x18\x08 \x01(\tR\x0f\x64\x61tabricksToken\x12.\n\x12\x64\x61tabricksInstance\x18\t \x01(\tR\x12\x64\x61tabricksInstance\x12\x35\n\tdriverPod\x18\n \x01(\x0b\x32\x15.flyteidl.core.K8sPodH\x00R\tdriverPod\x12\x34\n\x15\x64riverPodTemplateName\x18\x0b \x01(\tR\x15\x64riverPodTemplateName\x12\x39\n\x0b\x65xecutorPod\x18\x0c \x01(\x0b\x32\x15.flyteidl.core.K8sPodH\x01R\x0b\x65xecutorPod\x12\x38\n\x17\x65xecutorPodTemplateName\x18\r \x01(\tR\x17\x65xecutorPodTemplateName\x1a<\n\x0eSparkConfEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\tR\x05value:\x02\x38\x01\x1a=\n\x0fHadoopConfEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\tR\x05value:\x02\x38\x01\x42\x10\n\x0e\x64riverPodValueB\x12\n\x10\x65xecutorPodValueB\xc2\x01\n\x14\x63om.flyteidl.pluginsB\nSparkProtoP\x01Z=github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/plugins\xa2\x02\x03\x46PX\xaa\x02\x10\x46lyteidl.Plugins\xca\x02\x10\x46lyteidl\\Plugins\xe2\x02\x1c\x46lyteidl\\Plugins\\GPBMetadata\xea\x02\x11\x46lyteidl::Pluginsb\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1c\x66lyteidl/plugins/spark.proto\x12\x10\x66lyteidl.plugins\x1a\x1d\x66lyteidl/plugins/common.proto\x1a\x1cgoogle/protobuf/struct.proto\"B\n\x10SparkApplication\".\n\x04Type\x12\n\n\x06PYTHON\x10\x00\x12\x08\n\x04JAVA\x10\x01\x12\t\n\x05SCALA\x10\x02\x12\x05\n\x01R\x10\x03\"\xfa\x05\n\x08SparkJob\x12Q\n\x0f\x61pplicationType\x18\x01 \x01(\x0e\x32\'.flyteidl.plugins.SparkApplication.TypeR\x0f\x61pplicationType\x12\x30\n\x13mainApplicationFile\x18\x02 \x01(\tR\x13mainApplicationFile\x12\x1c\n\tmainClass\x18\x03 \x01(\tR\tmainClass\x12G\n\tsparkConf\x18\x04 \x03(\x0b\x32).flyteidl.plugins.SparkJob.SparkConfEntryR\tsparkConf\x12J\n\nhadoopConf\x18\x05 \x03(\x0b\x32*.flyteidl.plugins.SparkJob.HadoopConfEntryR\nhadoopConf\x12\"\n\x0c\x65xecutorPath\x18\x06 \x01(\tR\x0c\x65xecutorPath\x12?\n\x0e\x64\x61tabricksConf\x18\x07 \x01(\x0b\x32\x17.google.protobuf.StructR\x0e\x64\x61tabricksConf\x12(\n\x0f\x64\x61tabricksToken\x18\x08 \x01(\tR\x0f\x64\x61tabricksToken\x12.\n\x12\x64\x61tabricksInstance\x18\t \x01(\tR\x12\x64\x61tabricksInstance\x12:\n\ndriverSpec\x18\n \x01(\x0b\x32\x1a.flyteidl.plugins.RoleSpecR\ndriverSpec\x12>\n\x0c\x65xecutorSpec\x18\x0b \x01(\x0b\x32\x1a.flyteidl.plugins.RoleSpecR\x0c\x65xecutorSpec\x1a<\n\x0eSparkConfEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\tR\x05value:\x02\x38\x01\x1a=\n\x0fHadoopConfEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\tR\x05value:\x02\x38\x01\x42\xc2\x01\n\x14\x63om.flyteidl.pluginsB\nSparkProtoP\x01Z=github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/plugins\xa2\x02\x03\x46PX\xaa\x02\x10\x46lyteidl.Plugins\xca\x02\x10\x46lyteidl\\Plugins\xe2\x02\x1c\x46lyteidl\\Plugins\\GPBMetadata\xea\x02\x11\x46lyteidl::Pluginsb\x06proto3') _globals = globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) @@ -28,14 +28,14 @@ _SPARKJOB_SPARKCONFENTRY._serialized_options = b'8\001' _SPARKJOB_HADOOPCONFENTRY._options = None _SPARKJOB_HADOOPCONFENTRY._serialized_options = b'8\001' - _globals['_SPARKAPPLICATION']._serialized_start=107 - _globals['_SPARKAPPLICATION']._serialized_end=173 - _globals['_SPARKAPPLICATION_TYPE']._serialized_start=127 - _globals['_SPARKAPPLICATION_TYPE']._serialized_end=173 - _globals['_SPARKJOB']._serialized_start=176 - _globals['_SPARKJOB']._serialized_end=1078 - _globals['_SPARKJOB_SPARKCONFENTRY']._serialized_start=917 - _globals['_SPARKJOB_SPARKCONFENTRY']._serialized_end=977 - _globals['_SPARKJOB_HADOOPCONFENTRY']._serialized_start=979 - _globals['_SPARKJOB_HADOOPCONFENTRY']._serialized_end=1040 + _globals['_SPARKAPPLICATION']._serialized_start=111 + _globals['_SPARKAPPLICATION']._serialized_end=177 + _globals['_SPARKAPPLICATION_TYPE']._serialized_start=131 + _globals['_SPARKAPPLICATION_TYPE']._serialized_end=177 + _globals['_SPARKJOB']._serialized_start=180 + _globals['_SPARKJOB']._serialized_end=942 + _globals['_SPARKJOB_SPARKCONFENTRY']._serialized_start=819 + _globals['_SPARKJOB_SPARKCONFENTRY']._serialized_end=879 + _globals['_SPARKJOB_HADOOPCONFENTRY']._serialized_start=881 + _globals['_SPARKJOB_HADOOPCONFENTRY']._serialized_end=942 # @@protoc_insertion_point(module_scope) diff --git a/flyteidl/gen/pb_python/flyteidl/plugins/spark_pb2.pyi b/flyteidl/gen/pb_python/flyteidl/plugins/spark_pb2.pyi index 95029a2613..ef7e61baec 100644 --- a/flyteidl/gen/pb_python/flyteidl/plugins/spark_pb2.pyi +++ b/flyteidl/gen/pb_python/flyteidl/plugins/spark_pb2.pyi @@ -1,4 +1,4 @@ -from flyteidl.core import tasks_pb2 as _tasks_pb2 +from flyteidl.plugins import common_pb2 as _common_pb2 from google.protobuf import struct_pb2 as _struct_pb2 from google.protobuf.internal import containers as _containers from google.protobuf.internal import enum_type_wrapper as _enum_type_wrapper @@ -23,7 +23,7 @@ class SparkApplication(_message.Message): def __init__(self) -> None: ... class SparkJob(_message.Message): - __slots__ = ["applicationType", "mainApplicationFile", "mainClass", "sparkConf", "hadoopConf", "executorPath", "databricksConf", "databricksToken", "databricksInstance", "driverPod", "driverPodTemplateName", "executorPod", "executorPodTemplateName"] + __slots__ = ["applicationType", "mainApplicationFile", "mainClass", "sparkConf", "hadoopConf", "executorPath", "databricksConf", "databricksToken", "databricksInstance", "driverSpec", "executorSpec"] class SparkConfEntry(_message.Message): __slots__ = ["key", "value"] KEY_FIELD_NUMBER: _ClassVar[int] @@ -47,10 +47,8 @@ class SparkJob(_message.Message): DATABRICKSCONF_FIELD_NUMBER: _ClassVar[int] DATABRICKSTOKEN_FIELD_NUMBER: _ClassVar[int] DATABRICKSINSTANCE_FIELD_NUMBER: _ClassVar[int] - DRIVERPOD_FIELD_NUMBER: _ClassVar[int] - DRIVERPODTEMPLATENAME_FIELD_NUMBER: _ClassVar[int] - EXECUTORPOD_FIELD_NUMBER: _ClassVar[int] - EXECUTORPODTEMPLATENAME_FIELD_NUMBER: _ClassVar[int] + DRIVERSPEC_FIELD_NUMBER: _ClassVar[int] + EXECUTORSPEC_FIELD_NUMBER: _ClassVar[int] applicationType: SparkApplication.Type mainApplicationFile: str mainClass: str @@ -60,8 +58,6 @@ class SparkJob(_message.Message): databricksConf: _struct_pb2.Struct databricksToken: str databricksInstance: str - driverPod: _tasks_pb2.K8sPod - driverPodTemplateName: str - executorPod: _tasks_pb2.K8sPod - executorPodTemplateName: str - def __init__(self, applicationType: _Optional[_Union[SparkApplication.Type, str]] = ..., mainApplicationFile: _Optional[str] = ..., mainClass: _Optional[str] = ..., sparkConf: _Optional[_Mapping[str, str]] = ..., hadoopConf: _Optional[_Mapping[str, str]] = ..., executorPath: _Optional[str] = ..., databricksConf: _Optional[_Union[_struct_pb2.Struct, _Mapping]] = ..., databricksToken: _Optional[str] = ..., databricksInstance: _Optional[str] = ..., driverPod: _Optional[_Union[_tasks_pb2.K8sPod, _Mapping]] = ..., driverPodTemplateName: _Optional[str] = ..., executorPod: _Optional[_Union[_tasks_pb2.K8sPod, _Mapping]] = ..., executorPodTemplateName: _Optional[str] = ...) -> None: ... + driverSpec: _common_pb2.RoleSpec + executorSpec: _common_pb2.RoleSpec + def __init__(self, applicationType: _Optional[_Union[SparkApplication.Type, str]] = ..., mainApplicationFile: _Optional[str] = ..., mainClass: _Optional[str] = ..., sparkConf: _Optional[_Mapping[str, str]] = ..., hadoopConf: _Optional[_Mapping[str, str]] = ..., executorPath: _Optional[str] = ..., databricksConf: _Optional[_Union[_struct_pb2.Struct, _Mapping]] = ..., databricksToken: _Optional[str] = ..., databricksInstance: _Optional[str] = ..., driverSpec: _Optional[_Union[_common_pb2.RoleSpec, _Mapping]] = ..., executorSpec: _Optional[_Union[_common_pb2.RoleSpec, _Mapping]] = ...) -> None: ... diff --git a/flyteidl/gen/pb_rust/flyteidl.plugins.rs b/flyteidl/gen/pb_rust/flyteidl.plugins.rs index edf7e1eab1..a7adf62e66 100644 --- a/flyteidl/gen/pb_rust/flyteidl.plugins.rs +++ b/flyteidl/gen/pb_rust/flyteidl.plugins.rs @@ -33,6 +33,33 @@ pub mod array_job { MinSuccessRatio(f32), } } +/// Used in place of the pod template references in core.TaskTemplate and core.TaskMetdata. This allows +/// specifying pod configuration on a per role basis. +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct RoleSpec { + /// Reference to an existing PodTemplate k8s resource to be used as the base configuration when creating a + /// Pod for this role. If this value is set, the specified PodTemplate will be used instead of, but applied + /// identically as, the default PodTemplate configured in FlytePropeller. + /// +optional + #[prost(string, tag="2")] + pub pod_template_name: ::prost::alloc::string::String, + /// The pod spec and metadata to be used as the base configuration when creating a Pod for this role. + /// +optional + #[prost(oneof="role_spec::PodValue", tags="1")] + pub pod_value: ::core::option::Option, +} +/// Nested message and enum types in `RoleSpec`. +pub mod role_spec { + /// The pod spec and metadata to be used as the base configuration when creating a Pod for this role. + /// +optional + #[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Oneof)] + pub enum PodValue { + #[prost(message, tag="1")] + Pod(super::super::core::K8sPod), + } +} /// Custom Proto for Dask Plugin. #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] @@ -285,45 +312,12 @@ pub struct SparkJob { /// This instance name can be set in either flytepropeller or flytekit. #[prost(string, tag="9")] pub databricks_instance: ::prost::alloc::string::String, - /// Reference to an existing PodTemplate k8s resource to be used as the base configuration when creating the - /// driver Pod for this task. If this value is set, the specified PodTemplate will be used instead of, but applied - /// identically as, the default PodTemplate configured in FlytePropeller. - /// +optional - #[prost(string, tag="11")] - pub driver_pod_template_name: ::prost::alloc::string::String, - /// Reference to an existing PodTemplate k8s resource to be used as the base configuration when creating the - /// executor Pods for this task. If this value is set, the specified PodTemplate will be used instead of, but applied - /// identically as, the default PodTemplate configured in FlytePropeller. - /// +optional - #[prost(string, tag="13")] - pub executor_pod_template_name: ::prost::alloc::string::String, - /// The pod spec and metadata to be used as the base configuration when creating the driver Pod for this task. - /// +optional - #[prost(oneof="spark_job::DriverPodValue", tags="10")] - pub driver_pod_value: ::core::option::Option, - /// The pod spec and metadata to be used as the base configuration when creating the executor Pods for this task. - /// +optional - #[prost(oneof="spark_job::ExecutorPodValue", tags="12")] - pub executor_pod_value: ::core::option::Option, -} -/// Nested message and enum types in `SparkJob`. -pub mod spark_job { - /// The pod spec and metadata to be used as the base configuration when creating the driver Pod for this task. - /// +optional - #[allow(clippy::derive_partial_eq_without_eq)] -#[derive(Clone, PartialEq, ::prost::Oneof)] - pub enum DriverPodValue { - #[prost(message, tag="10")] - DriverPod(super::super::core::K8sPod), - } - /// The pod spec and metadata to be used as the base configuration when creating the executor Pods for this task. - /// +optional - #[allow(clippy::derive_partial_eq_without_eq)] -#[derive(Clone, PartialEq, ::prost::Oneof)] - pub enum ExecutorPodValue { - #[prost(message, tag="12")] - ExecutorPod(super::super::core::K8sPod), - } + /// The driver spec, used in place of the task's pod template + #[prost(message, optional, tag="10")] + pub driver_spec: ::core::option::Option, + /// The executor spec, used in place of the task's pod template + #[prost(message, optional, tag="11")] + pub executor_spec: ::core::option::Option, } /// Custom proto for plugin that enables distributed training using #[allow(clippy::derive_partial_eq_without_eq)] diff --git a/flyteidl/protos/flyteidl/plugins/common.proto b/flyteidl/protos/flyteidl/plugins/common.proto new file mode 100644 index 0000000000..fb10efc127 --- /dev/null +++ b/flyteidl/protos/flyteidl/plugins/common.proto @@ -0,0 +1,22 @@ +syntax = "proto3"; + +import "flyteidl/core/tasks.proto"; + +package flyteidl.plugins; + +option go_package = "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/plugins"; + +// Used in place of the pod template references in core.TaskTemplate and core.TaskMetdata. This allows +// specifying pod configuration on a per role basis. +message RoleSpec { + // The pod spec and metadata to be used as the base configuration when creating a Pod for this role. + // +optional + oneof pod_value { + core.K8sPod pod = 1; + } + // Reference to an existing PodTemplate k8s resource to be used as the base configuration when creating a + // Pod for this role. If this value is set, the specified PodTemplate will be used instead of, but applied + // identically as, the default PodTemplate configured in FlytePropeller. + // +optional + string pod_template_name = 2; +} \ No newline at end of file diff --git a/flyteidl/protos/flyteidl/plugins/spark.proto b/flyteidl/protos/flyteidl/plugins/spark.proto index a9c3a5f901..7abfea6d62 100644 --- a/flyteidl/protos/flyteidl/plugins/spark.proto +++ b/flyteidl/protos/flyteidl/plugins/spark.proto @@ -1,6 +1,6 @@ syntax = "proto3"; -import "flyteidl/core/tasks.proto"; +import "flyteidl/plugins/common.proto"; import "google/protobuf/struct.proto"; package flyteidl.plugins; @@ -33,24 +33,8 @@ message SparkJob { // Domain name of your deployment. Use the form .cloud.databricks.com. // This instance name can be set in either flytepropeller or flytekit. string databricksInstance = 9; - // The pod spec and metadata to be used as the base configuration when creating the driver Pod for this task. - // +optional - oneof driverPodValue { - core.K8sPod driverPod = 10; - } - // Reference to an existing PodTemplate k8s resource to be used as the base configuration when creating the - // driver Pod for this task. If this value is set, the specified PodTemplate will be used instead of, but applied - // identically as, the default PodTemplate configured in FlytePropeller. - // +optional - string driverPodTemplateName = 11; - // The pod spec and metadata to be used as the base configuration when creating the executor Pods for this task. - // +optional - oneof executorPodValue { - core.K8sPod executorPod = 12; - } - // Reference to an existing PodTemplate k8s resource to be used as the base configuration when creating the - // executor Pods for this task. If this value is set, the specified PodTemplate will be used instead of, but applied - // identically as, the default PodTemplate configured in FlytePropeller. - // +optional - string executorPodTemplateName = 13; + // The driver spec, used in place of the task's pod template + RoleSpec driverSpec = 10; + // The executor spec, used in place of the task's pod template + RoleSpec executorSpec = 11; }