diff --git a/apis/bigquerydatatransfer/v1beta1/transferconfig_types.go b/apis/bigquerydatatransfer/v1beta1/transferconfig_types.go index 1b84e11bd7..a23250319f 100644 --- a/apis/bigquerydatatransfer/v1beta1/transferconfig_types.go +++ b/apis/bigquerydatatransfer/v1beta1/transferconfig_types.go @@ -32,12 +32,14 @@ type EncryptionConfiguration struct { type Status struct { // The status code, which should be an enum value of // [google.rpc.Code][google.rpc.Code]. + // +kcc:proto:field=google.rpc.Status.code Code *int32 `json:"code,omitempty"` // A developer-facing error message, which should be in English. Any // user-facing error message should be localized and sent in the // [google.rpc.Status.details][google.rpc.Status.details] field, or localized // by the client. + // +kcc:proto:field=google.rpc.Status.message Message *string `json:"message,omitempty"` /* NOTYET @@ -63,14 +65,16 @@ type BigQueryDataTransferConfigSpec struct { // for just [today-1]. // Only valid if the data source supports the feature. Set the value to 0 // to use the default value. + // +kcc:proto:field=google.cloud.bigquery.datatransfer.v1.TransferConfig.data_refresh_window_days DataRefreshWindowDays *int32 `json:"dataRefreshWindowDays,omitempty"` // +kubebuilder:validation:XValidation:rule="self == oldSelf",message="DataSourceID field is immutable" // Immutable. + // +required // Data source ID. This cannot be changed once data transfer is created. The // full list of available data source IDs can be returned through an API call: // https://cloud.google.com/bigquery-transfer/docs/reference/datatransfer/rest/v1/projects.locations.dataSources/list - // +required + // +kcc:proto:field=google.cloud.bigquery.datatransfer.v1.TransferConfig.data_source_id DataSourceID *string `json:"dataSourceID,omitempty"` // The BigQuery target dataset id. @@ -79,13 +83,16 @@ type BigQueryDataTransferConfigSpec struct { // Is this config disabled. When set to true, no runs will be scheduled for // this transfer config. + // +kcc:proto:field=google.cloud.bigquery.datatransfer.v1.TransferConfig.disabled Disabled *bool `json:"disabled,omitempty"` // User specified display name for the data transfer. + // +kcc:proto:field=google.cloud.bigquery.datatransfer.v1.TransferConfig.display_name DisplayName *string `json:"displayName,omitempty"` // Email notifications will be sent according to these preferences // to the email address of the user who owns this transfer config. + // +kcc:proto:field=google.cloud.bigquery.datatransfer.v1.TransferConfig.email_preferences EmailPreferences *EmailPreferences `json:"emailPreferences,omitempty"` // The encryption configuration part. Currently, it is only used for the @@ -93,17 +100,19 @@ type BigQueryDataTransferConfigSpec struct { // granted permissions to use the key. Read methods will return the key name // applied in effect. Write methods will apply the key if it is present, or // otherwise try to apply project default keys if it is absent. + // +kcc:proto:field=google.cloud.bigquery.datatransfer.v1.TransferConfig.encryption_configuration EncryptionConfiguration *EncryptionConfiguration `json:"encryptionConfiguration,omitempty"` // Pub/Sub topic where notifications will be sent after transfer runs // associated with this transfer config finish. PubSubTopicRef *refv1beta1.PubSubTopicRef `json:"pubSubTopicRef,omitempty"` + // +required // Parameters specific to each data source. For more information see the // bq tab in the 'Setting up a data transfer' section for each data source. // For example the parameters for Cloud Storage transfers are listed here: // https://cloud.google.com/bigquery-transfer/docs/cloud-storage-transfer#bq - // +required + // +kcc:proto:field=google.cloud.bigquery.datatransfer.v1.TransferConfig.params Params map[string]string `json:"params,omitempty"` Parent `json:",inline"` @@ -125,9 +134,11 @@ type BigQueryDataTransferConfigSpec struct { // // NOTE: The minimum interval time between recurring transfers depends on the // data source; refer to the documentation for your data source. + // +kcc:proto:field=google.cloud.bigquery.datatransfer.v1.TransferConfig.schedule Schedule *string `json:"schedule,omitempty"` // Options customizing the data transfer schedule. + // +kcc:proto:field=google.cloud.bigquery.datatransfer.v1.TransferConfig.schedule_options ScheduleOptions *ScheduleOptions `json:"scheduleOptions,omitempty"` // Service account email. If this field is set, the transfer config will be created with this service account's credentials. @@ -136,10 +147,10 @@ type BigQueryDataTransferConfigSpec struct { // For the latest list of data sources, please refer to https://cloud.google.com/bigquery/docs/use-service-accounts. ServiceAccountRef *refv1beta1.IAMServiceAccountRef `json:"serviceAccountRef,omitempty"` - // V2 options customizing different types of data transfer schedule. - // This field supports existing time-based and manual transfer schedule. Also - // supports Event-Driven transfer schedule. ScheduleOptionsV2 cannot be used - // together with ScheduleOptions/Schedule. + // Options customizing different types of data transfer schedule. + // This field replaces "schedule" and "schedule_options" fields. + // ScheduleOptionsV2 cannot be used together with ScheduleOptions/Schedule. + // +kcc:proto:field=google.cloud.bigquery.datatransfer.v1.TransferConfig.schedule_options_v2 ScheduleOptionsV2 *ScheduleOptionsV2 `json:"scheduleOptionsV2,omitempty"` } @@ -173,6 +184,7 @@ type BigQueryDataTransferConfigStatus struct { // +kcc:proto=google.cloud.bigquery.datatransfer.v1.TransferConfig type BigQueryDataTransferConfigObservedState struct { // Output only. Region in which BigQuery dataset is located. + // +kcc:proto:field=google.cloud.bigquery.datatransfer.v1.TransferConfig.dataset_region DatasetRegion *string `json:"datasetRegion,omitempty"` // Identifier. The resource name of the transfer config. @@ -182,27 +194,34 @@ type BigQueryDataTransferConfigObservedState struct { // where `config_id` is usually a UUID, even though it is not // guaranteed or required. The name is ignored when creating a transfer // config. + // +kcc:proto:field=google.cloud.bigquery.datatransfer.v1.TransferConfig.name Name *string `json:"name,omitempty"` // Output only. Next time when data transfer will run. + // +kcc:proto:field=google.cloud.bigquery.datatransfer.v1.TransferConfig.next_run_time NextRunTime *string `json:"nextRunTime,omitempty"` // Output only. Information about the user whose credentials are used to // transfer data. Populated only for `transferConfigs.get` requests. In case // the user information is not available, this field will not be populated. + // +kcc:proto:field=google.cloud.bigquery.datatransfer.v1.TransferConfig.owner_info OwnerInfo *UserInfo `json:"ownerInfo,omitempty"` // Output only. State of the most recently updated transfer run. + // +kcc:proto:field=google.cloud.bigquery.datatransfer.v1.TransferConfig.state State *string `json:"state,omitempty"` // Output only. Data transfer modification time. Ignored by server on input. + // +kcc:proto:field=google.cloud.bigquery.datatransfer.v1.TransferConfig.update_time UpdateTime *string `json:"updateTime,omitempty"` // Deprecated. Unique ID of the user on whose behalf transfer is done. + // +kcc:proto:field=google.cloud.bigquery.datatransfer.v1.TransferConfig.user_id UserID *int64 `json:"userID,omitempty"` // Output only. Error code with detailed information about reason of the // latest config failure. + // +kcc:proto:field=google.cloud.bigquery.datatransfer.v1.TransferConfig.error Error *Status `json:"error,omitempty"` } diff --git a/apis/bigquerydatatransfer/v1beta1/types.generated.go b/apis/bigquerydatatransfer/v1beta1/types.generated.go index b3054fcc25..d1f3cd0d9f 100644 --- a/apis/bigquerydatatransfer/v1beta1/types.generated.go +++ b/apis/bigquerydatatransfer/v1beta1/types.generated.go @@ -17,6 +17,7 @@ package v1beta1 // +kcc:proto=google.cloud.bigquery.datatransfer.v1.EmailPreferences type EmailPreferences struct { // If true, email notifications will be sent on transfer run failures. + // +kcc:proto:field=google.cloud.bigquery.datatransfer.v1.EmailPreferences.enable_failure_email EnableFailureEmail *bool `json:"enableFailureEmail,omitempty"` } @@ -30,6 +31,7 @@ type ScheduleOptions struct { // will be disabled. The runs can be started on ad-hoc basis using // StartManualTransferRuns API. When automatic scheduling is disabled, the // TransferConfig.schedule field will be ignored. + // +kcc:proto:field=google.cloud.bigquery.datatransfer.v1.ScheduleOptions.disable_auto_scheduling DisableAutoScheduling *bool `json:"disableAutoScheduling,omitempty"` // Specifies time to start scheduling transfer runs. The first run will be @@ -37,12 +39,14 @@ type ScheduleOptions struct { // defined in the schedule string. The start time can be changed at any // moment. The time when a data transfer can be triggered manually is not // limited by this option. + // +kcc:proto:field=google.cloud.bigquery.datatransfer.v1.ScheduleOptions.start_time StartTime *string `json:"startTime,omitempty"` // Defines time to stop scheduling transfer runs. A transfer run cannot be // scheduled at or after the end time. The end time can be changed at any // moment. The time when a data transfer can be triggered manually is not // limited by this option. + // +kcc:proto:field=google.cloud.bigquery.datatransfer.v1.ScheduleOptions.end_time EndTime *string `json:"endTime,omitempty"` } @@ -50,16 +54,19 @@ type ScheduleOptions struct { type ScheduleOptionsV2 struct { // Time based transfer schedule options. This is the default schedule // option. + // +kcc:proto:field=google.cloud.bigquery.datatransfer.v1.ScheduleOptionsV2.time_based_schedule TimeBasedSchedule *TimeBasedSchedule `json:"timeBasedSchedule,omitempty"` // Manual transfer schedule. If set, the transfer run will not be // auto-scheduled by the system, unless the client invokes // StartManualTransferRuns. This is equivalent to // disable_auto_scheduling = true. + // +kcc:proto:field=google.cloud.bigquery.datatransfer.v1.ScheduleOptionsV2.manual_schedule ManualSchedule *ManualSchedule `json:"manualSchedule,omitempty"` // Event driven transfer schedule options. If set, the transfer will be // scheduled upon events arrial. + // +kcc:proto:field=google.cloud.bigquery.datatransfer.v1.ScheduleOptionsV2.event_driven_schedule EventDrivenSchedule *EventDrivenSchedule `json:"eventDrivenSchedule,omitempty"` } @@ -78,23 +85,27 @@ type TimeBasedSchedule struct { // // NOTE: The minimum interval time between recurring transfers depends on the // data source; refer to the documentation for your data source. + // +kcc:proto:field=google.cloud.bigquery.datatransfer.v1.TimeBasedSchedule.schedule Schedule *string `json:"schedule,omitempty"` // Specifies time to start scheduling transfer runs. The first run will be // scheduled at or after the start time according to a recurrence pattern // defined in the schedule string. The start time can be changed at any // moment. + // +kcc:proto:field=google.cloud.bigquery.datatransfer.v1.TimeBasedSchedule.start_time StartTime *string `json:"startTime,omitempty"` // Defines time to stop scheduling transfer runs. A transfer run cannot be // scheduled at or after the end time. The end time can be changed at any // moment. + // +kcc:proto:field=google.cloud.bigquery.datatransfer.v1.TimeBasedSchedule.end_time EndTime *string `json:"endTime,omitempty"` } // +kcc:proto=google.cloud.bigquery.datatransfer.v1.UserInfo type UserInfo struct { // E-mail address of the user. + // +kcc:proto:field=google.cloud.bigquery.datatransfer.v1.UserInfo.email Email *string `json:"email,omitempty"` } @@ -127,8 +138,10 @@ type Any struct { // // Schemes other than `http`, `https` (or the empty scheme) might be // used with implementation specific semantics. + // +kcc:proto:field=google.protobuf.Any.type_url TypeURL *string `json:"typeURL,omitempty"` // Must be a valid serialized protocol buffer of the above specified type. + // +kcc:proto:field=google.protobuf.Any.value Value []byte `json:"value,omitempty"` } diff --git a/config/crds/resources/apiextensions.k8s.io_v1_customresourcedefinition_bigquerydatatransferconfigs.bigquerydatatransfer.cnrm.cloud.google.com.yaml b/config/crds/resources/apiextensions.k8s.io_v1_customresourcedefinition_bigquerydatatransferconfigs.bigquerydatatransfer.cnrm.cloud.google.com.yaml index fe62114671..6993d9ad56 100644 --- a/config/crds/resources/apiextensions.k8s.io_v1_customresourcedefinition_bigquerydatatransferconfigs.bigquerydatatransfer.cnrm.cloud.google.com.yaml +++ b/config/crds/resources/apiextensions.k8s.io_v1_customresourcedefinition_bigquerydatatransferconfigs.bigquerydatatransfer.cnrm.cloud.google.com.yaml @@ -664,10 +664,9 @@ spec: type: string type: object scheduleOptionsV2: - description: V2 options customizing different types of data transfer - schedule. This field supports existing time-based and manual transfer - schedule. Also supports Event-Driven transfer schedule. ScheduleOptionsV2 - cannot be used together with ScheduleOptions/Schedule. + description: Options customizing different types of data transfer + schedule. This field replaces "schedule" and "schedule_options" + fields. ScheduleOptionsV2 cannot be used together with ScheduleOptions/Schedule. properties: eventDrivenSchedule: description: Event driven transfer schedule options. If set, the diff --git a/dev/tools/controllerbuilder/pkg/codegen/common.go b/dev/tools/controllerbuilder/pkg/codegen/common.go new file mode 100644 index 0000000000..bf601c08b4 --- /dev/null +++ b/dev/tools/controllerbuilder/pkg/codegen/common.go @@ -0,0 +1,52 @@ +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package codegen + +import "strings" + +const ( + // KCCProtoMessageAnnotation is used for go structs that map to proto messages + KCCProtoMessageAnnotation = "+kcc:proto" + + // KCCProtoFieldAnnotation is used for go struct fields that map to proto fields + KCCProtoFieldAnnotation = "+kcc:proto:field" + + // KCCProtoIgnoreAnnotation is used for go struct fields that are ignored + KCCProtoIgnoreAnnotation = "+kcc:proto:ignore" +) + +// special-case proto messages that are currently not mapped to KRM Go structs +var protoMessagesNotMappedToGoStruct = map[string]string{ + "google.protobuf.Timestamp": "string", + "google.protobuf.Duration": "string", + "google.protobuf.Int64Value": "int64", + "google.protobuf.StringValue": "string", + "google.protobuf.Struct": "map[string]string", +} + +var Acronyms = []string{ + "ID", "HTML", "URL", "HTTP", "HTTPS", "SSH", + "IP", "GB", "FS", "PD", "KMS", "GCE", "VTPM", +} + +// IsAcronym returns true if the given string is an acronym +func IsAcronym(s string) bool { + for _, acronym := range Acronyms { + if strings.EqualFold(s, acronym) { + return true + } + } + return false +} diff --git a/dev/tools/controllerbuilder/pkg/codegen/typegenerator.go b/dev/tools/controllerbuilder/pkg/codegen/typegenerator.go index fe4e37298b..ed032f7b6f 100644 --- a/dev/tools/controllerbuilder/pkg/codegen/typegenerator.go +++ b/dev/tools/controllerbuilder/pkg/codegen/typegenerator.go @@ -30,23 +30,6 @@ import ( "k8s.io/klog/v2" ) -const ( - // KCCProtoMessageAnnotation is used for go structs that map to proto messages - KCCProtoMessageAnnotation = "+kcc:proto" - - // KCCProtoFieldAnnotation is used for go struct fields that map to proto fields - KCCProtoFieldAnnotation = "+kcc:proto:field" -) - -// Some special-case values that are not obvious how to map in KRM -var protoMessagesNotMappedToGoStruct = map[string]string{ - "google.protobuf.Timestamp": "string", - "google.protobuf.Duration": "string", - "google.protobuf.Int64Value": "int64", - "google.protobuf.StringValue": "string", - "google.protobuf.Struct": "map[string]string", -} - type TypeGenerator struct { generatorBase api *protoapi.Proto @@ -86,7 +69,7 @@ func (g *TypeGenerator) visitMessage(messageDescriptor protoreflect.MessageDescr g.visitedMessages = append(g.visitedMessages, messageDescriptor) - msgs, err := findDependenciesForMessage(messageDescriptor) + msgs, err := FindDependenciesForMessage(messageDescriptor) if err != nil { return err } @@ -131,7 +114,7 @@ func (g *TypeGenerator) WriteVisitedMessages() error { } out := g.getOutputFile(k) - goTypeName := goNameForProtoMessage(msg) + goTypeName := GoNameForProtoMessage(msg) skipGenerated := true goType, err := g.findTypeDeclaration(goTypeName, out.OutputDir(), skipGenerated) if err != nil { @@ -159,7 +142,7 @@ func (g *TypeGenerator) WriteVisitedMessages() error { } func WriteMessage(out io.Writer, msg protoreflect.MessageDescriptor) { - goType := goNameForProtoMessage(msg) + goType := GoNameForProtoMessage(msg) fmt.Fprintf(out, "\n") fmt.Fprintf(out, "// %s=%s\n", KCCProtoMessageAnnotation, msg.FullName()) @@ -171,51 +154,58 @@ func WriteMessage(out io.Writer, msg protoreflect.MessageDescriptor) { fmt.Fprintf(out, "}\n") } -func WriteField(out io.Writer, field protoreflect.FieldDescriptor, msg protoreflect.MessageDescriptor, fieldIndex int) { - sourceLocations := msg.ParentFile().SourceLocations().ByDescriptor(field) - - jsonName := getJSONForKRM(field) - goFieldName := goFieldName(field) - goType := "" - +func GoType(field protoreflect.FieldDescriptor) (string, error) { if field.IsMap() { entryMsg := field.Message() keyKind := entryMsg.Fields().ByName("key").Kind() valueKind := entryMsg.Fields().ByName("value").Kind() if keyKind == protoreflect.StringKind && valueKind == protoreflect.StringKind { - goType = "map[string]string" + return "map[string]string", nil } else if keyKind == protoreflect.StringKind && valueKind == protoreflect.Int64Kind { - goType = "map[string]int64" + return "map[string]int64", nil } else { - fmt.Fprintf(out, "\n\t// TODO: map type %v %v for %v\n\n", keyKind, valueKind, field.Name()) - return + return "", fmt.Errorf("unsupported map type with key %v and value %v", keyKind, valueKind) } + } + + var goType string + switch field.Kind() { + case protoreflect.MessageKind: + goType = GoNameForProtoMessage(field.Message()) + case protoreflect.EnumKind: + goType = "string" + default: + goType = goTypeForProtoKind(field.Kind()) + } + + if field.Cardinality() == protoreflect.Repeated { + goType = "[]" + goType } else { - switch field.Kind() { - case protoreflect.MessageKind: - goType = goNameForProtoMessage(field.Message()) + goType = "*" + goType + } - case protoreflect.EnumKind: - goType = "string" //string(field.Enum().Name()) + // Special case for proto "bytes" type + if goType == "*[]byte" { + goType = "[]byte" + } + // Special case for proto "google.protobuf.Struct" type + if goType == "*map[string]string" { + goType = "map[string]string" + } - default: - goType = goTypeForProtoKind(field.Kind()) - } + return goType, nil +} - if field.Cardinality() == protoreflect.Repeated { - goType = "[]" + goType - } else { - goType = "*" + goType - } +func WriteField(out io.Writer, field protoreflect.FieldDescriptor, msg protoreflect.MessageDescriptor, fieldIndex int) { + sourceLocations := msg.ParentFile().SourceLocations().ByDescriptor(field) - // Special case for proto "bytes" type - if goType == "*[]byte" { - goType = "[]byte" - } - // Special case for proto "google.protobuf.Struct" type - if goType == "*map[string]string" { - goType = "map[string]string" - } + jsonName := GetJSONForKRM(field) + GoFieldName := goFieldName(field) + + goType, err := GoType(field) + if err != nil { + fmt.Fprintf(out, "\n\t// TODO: %v\n\n", err) + return } // Blank line between fields for readability @@ -236,7 +226,7 @@ func WriteField(out io.Writer, field protoreflect.FieldDescriptor, msg protorefl fmt.Fprintf(out, "\t// %s=%s\n", KCCProtoFieldAnnotation, field.FullName()) fmt.Fprintf(out, "\t%s %s `json:\"%s,omitempty\"`\n", - goFieldName, + GoFieldName, goType, jsonName, ) @@ -261,7 +251,7 @@ func deduplicateAndSort(messages []protoreflect.MessageDescriptor) []protoreflec return messages } -func goNameForProtoMessage(msg protoreflect.MessageDescriptor) string { +func GoNameForProtoMessage(msg protoreflect.MessageDescriptor) string { fullName := string(msg.FullName()) // Some special-case values that are not obvious how to map in KRM @@ -315,16 +305,16 @@ func goTypeForProtoKind(kind protoreflect.Kind) string { return goType } -// getJSONForKRM returns the KRM JSON name for the field, +// GetJSONForKRM returns the KRM JSON name for the field, // honoring KRM conventions -func getJSONForKRM(protoField protoreflect.FieldDescriptor) string { +func GetJSONForKRM(protoField protoreflect.FieldDescriptor) string { tokens := strings.Split(string(protoField.Name()), "_") for i, token := range tokens { if i == 0 { // Do not capitalize first token continue } - if isAcronym(token) { + if IsAcronym(token) { token = strings.ToUpper(token) } else { token = strings.Title(token) @@ -339,7 +329,7 @@ func getJSONForKRM(protoField protoreflect.FieldDescriptor) string { func goFieldName(protoField protoreflect.FieldDescriptor) string { tokens := strings.Split(string(protoField.Name()), "_") for i, token := range tokens { - if isAcronym(token) { + if IsAcronym(token) { token = strings.ToUpper(token) } else { token = strings.Title(token) @@ -349,35 +339,8 @@ func goFieldName(protoField protoreflect.FieldDescriptor) string { return strings.Join(tokens, "") } -func isAcronym(s string) bool { - switch s { - case "id": - return true - case "html", "url": - return true - case "http", "https", "ssh": - return true - case "ip": - return true - case "gb": - return true - case "fs": - return true - case "pd": - return true - case "kms": - return true - case "gce": - return true - case "vtpm": - return true - default: - return false - } -} - -// findDependenciesForMessage recursively explores the dependent proto messages of the given message. -func findDependenciesForMessage(message protoreflect.MessageDescriptor) ([]protoreflect.MessageDescriptor, error) { +// FindDependenciesForMessage recursively explores the dependent proto messages of the given message. +func FindDependenciesForMessage(message protoreflect.MessageDescriptor) ([]protoreflect.MessageDescriptor, error) { msgs := make(map[string]protoreflect.MessageDescriptor) for i := 0; i < message.Fields().Len(); i++ { field := message.Fields().Get(i) diff --git a/dev/tools/controllerbuilder/pkg/commands/updatetypes/insertcommand.go b/dev/tools/controllerbuilder/pkg/commands/updatetypes/insertcommand.go new file mode 100644 index 0000000000..29d83b4e68 --- /dev/null +++ b/dev/tools/controllerbuilder/pkg/commands/updatetypes/insertcommand.go @@ -0,0 +1,90 @@ +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package updatetypes + +import ( + "context" + "fmt" + + "github.com/GoogleCloudPlatform/k8s-config-connector/dev/tools/controllerbuilder/pkg/typeupdater" + "github.com/spf13/cobra" +) + +type insertFieldOptions struct { + *baseUpdateTypeOptions + + parent string // Fully qualified name of the proto message holding the new field + field string // Name of the field to be inserted +} + +func buildInsertCommand(baseOptions *baseUpdateTypeOptions) *cobra.Command { + opt := &insertFieldOptions{ + baseUpdateTypeOptions: baseOptions, + } + + cmd := &cobra.Command{ + Use: "insert", + Short: "insert a new field and all of its dependent messages into KRM types", + PreRunE: validateInsertOptions(opt), + RunE: runInsert(opt), + } + + bindInsertFlags(cmd, opt) + + return cmd +} + +func bindInsertFlags(cmd *cobra.Command, opt *insertFieldOptions) { + opt.BindFlags(cmd) + cmd.Flags().StringVar(&opt.parent, "parent", opt.parent, "Fully qualified name of the proto message holding the new field. e.g. `google.cloud.bigquery.datatransfer.v1.TransferConfig`") + cmd.Flags().StringVar(&opt.field, "field", opt.field, "Name of the field to be inserted, e.g. `schedule_options_v2`") +} + +func validateInsertOptions(opt *insertFieldOptions) func(*cobra.Command, []string) error { + return func(cmd *cobra.Command, args []string) error { + if opt.apiDirectory == "" { + return fmt.Errorf("--api-dir is required") + } + if opt.parent == "" { + return fmt.Errorf("--parent is required") + } + if opt.field == "" { + return fmt.Errorf("--field is required") + } + return nil + } +} + +func runInsert(opt *insertFieldOptions) func(*cobra.Command, []string) error { + return func(cmd *cobra.Command, args []string) error { + ctx := cmd.Context() + if err := runFieldInserter(ctx, opt); err != nil { + return err + } + return nil + } +} + +func runFieldInserter(_ context.Context, opt *insertFieldOptions) error { + fieldInserter := typeupdater.NewFieldInserter(&typeupdater.InsertFieldOptions{ + ProtoSourcePath: opt.GenerateOptions.ProtoSourcePath, + ParentMessageFullName: opt.parent, + FieldToInsert: opt.field, + IgnoredFields: opt.ignoredFields, + APIDirectory: opt.apiDirectory, + GoPackagePath: opt.apiGoPackagePath, + }) + return fieldInserter.Run() +} diff --git a/dev/tools/controllerbuilder/pkg/commands/updatetypes/synccommand.go b/dev/tools/controllerbuilder/pkg/commands/updatetypes/synccommand.go new file mode 100644 index 0000000000..59de5e5ecd --- /dev/null +++ b/dev/tools/controllerbuilder/pkg/commands/updatetypes/synccommand.go @@ -0,0 +1,99 @@ +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package updatetypes + +import ( + "context" + "fmt" + + "github.com/GoogleCloudPlatform/k8s-config-connector/dev/tools/controllerbuilder/pkg/typeupdater" + "github.com/spf13/cobra" +) + +type syncProtoPackageOptions struct { + *baseUpdateTypeOptions + + legacyMode bool +} + +func buildSyncCommand(baseOptions *baseUpdateTypeOptions) *cobra.Command { + opt := &syncProtoPackageOptions{ + baseUpdateTypeOptions: baseOptions, + } + + cmd := &cobra.Command{ + Use: "sync", + Short: "sync the KRM types with the proto package", + Long: `Sync the KRM types with the proto package. This command will update the KRM types +to match the proto package. If --message is specified, only the specified message and its +dependent messages will be synced. If --message is not specified, all messages in the proto +package indicated by --service will be synced.`, + PreRunE: validateSyncOptions(opt), + RunE: runSync(opt), + } + + bindSyncFlags(cmd, opt) + + return cmd +} + +func bindSyncFlags(cmd *cobra.Command, opt *syncProtoPackageOptions) { + opt.BindFlags(cmd) + cmd.Flags().BoolVar(&opt.legacyMode, "legacy-mode", false, "Set to true if the resource has KRM fields that are missing proto annotations.") +} + +func validateSyncOptions(opt *syncProtoPackageOptions) func(*cobra.Command, []string) error { + return func(cmd *cobra.Command, args []string) error { + if err := validateRequiredFlags(opt); err != nil { + return err + } + return nil + } +} + +func validateRequiredFlags(opt *syncProtoPackageOptions) error { + if opt.apiDirectory == "" { + return fmt.Errorf("--api-dir is required") + } + if opt.apiGoPackagePath == "" { + return fmt.Errorf("--api-go-package-path is required") + } + if opt.ServiceName == "" { + return fmt.Errorf("--service is required") + } + return nil +} + +func runSync(opt *syncProtoPackageOptions) func(*cobra.Command, []string) error { + return func(cmd *cobra.Command, args []string) error { + ctx := cmd.Context() + if err := runPackageSyncer(ctx, opt); err != nil { + return err + } + return nil + } +} + +func runPackageSyncer(ctx context.Context, opt *syncProtoPackageOptions) error { + syncer := typeupdater.NewProtoPackageSyncer(&typeupdater.SyncProtoPackageOptions{ + ServiceName: opt.ServiceName, + APIVersion: opt.APIVersion, + ProtoSourcePath: opt.GenerateOptions.ProtoSourcePath, + APIDirectory: opt.apiDirectory, + GoPackagePath: opt.apiGoPackagePath, + LegacyMode: opt.legacyMode, + }) + return syncer.Run() +} diff --git a/dev/tools/controllerbuilder/pkg/commands/updatetypes/updatetypescommand.go b/dev/tools/controllerbuilder/pkg/commands/updatetypes/updatetypescommand.go index 4e96d0e256..f396655b5f 100644 --- a/dev/tools/controllerbuilder/pkg/commands/updatetypes/updatetypescommand.go +++ b/dev/tools/controllerbuilder/pkg/commands/updatetypes/updatetypescommand.go @@ -15,27 +15,23 @@ package updatetypes import ( - "context" "fmt" "os" "github.com/GoogleCloudPlatform/k8s-config-connector/dev/tools/controllerbuilder/pkg/options" - "github.com/GoogleCloudPlatform/k8s-config-connector/dev/tools/controllerbuilder/pkg/typeupdater" "github.com/spf13/cobra" ) -type UpdateTypeOptions struct { +type baseUpdateTypeOptions struct { *options.GenerateOptions - parentMessage string // The fully qualified name of the parent proto message of the field to be inserted - insertField string ignoredFields string // TODO: could be part of GenerateOptions apiDirectory string apiGoPackagePath string } -func (o *UpdateTypeOptions) InitDefaults() error { +func (o *baseUpdateTypeOptions) InitDefaults() error { root, err := options.RepoRoot() if err != nil { return err @@ -45,9 +41,7 @@ func (o *UpdateTypeOptions) InitDefaults() error { return nil } -func (o *UpdateTypeOptions) BindFlags(cmd *cobra.Command) { - cmd.Flags().StringVar(&o.parentMessage, "parent", o.parentMessage, "Fully qualified name of the proto message holding the new field. e.g. `google.cloud.bigquery.datatransfer.v1.TransferConfig`") - cmd.Flags().StringVar(&o.insertField, "insert-field", o.insertField, "Name of the new field to be inserted, e.g. `schedule_options_v2`") +func (o *baseUpdateTypeOptions) BindFlags(cmd *cobra.Command) { // TODO: Update this flag to accept a file path pointing to the ignored fields YAML file. cmd.Flags().StringVar(&o.ignoredFields, "ignored-fields", o.ignoredFields, "Comma-separated list of fields to ignore") cmd.Flags().StringVar(&o.apiDirectory, "api-dir", o.apiDirectory, "Base directory for APIs") @@ -55,10 +49,9 @@ func (o *UpdateTypeOptions) BindFlags(cmd *cobra.Command) { } func BuildCommand(baseOptions *options.GenerateOptions) *cobra.Command { - opt := &UpdateTypeOptions{ + opt := &baseUpdateTypeOptions{ GenerateOptions: baseOptions, } - if err := opt.InitDefaults(); err != nil { fmt.Fprintf(os.Stderr, "Error initializing defaults: %v\n", err) os.Exit(1) @@ -67,36 +60,13 @@ func BuildCommand(baseOptions *options.GenerateOptions) *cobra.Command { cmd := &cobra.Command{ Use: "update-types", Short: "update KRM types for a proto service", - RunE: func(cmd *cobra.Command, args []string) error { - ctx := cmd.Context() - if err := runTypeUpdater(ctx, opt); err != nil { - return err - } - return nil - }, } opt.BindFlags(cmd) - return cmd -} - -func runTypeUpdater(ctx context.Context, opt *UpdateTypeOptions) error { - if opt.apiDirectory == "" { - return fmt.Errorf("--api-dir is required") - } + // subcommands + cmd.AddCommand(buildInsertCommand(opt)) + cmd.AddCommand(buildSyncCommand(opt)) - typeUpdaterOpts := &typeupdater.UpdaterOptions{ - ProtoSourcePath: opt.GenerateOptions.ProtoSourcePath, - ParentMessageFullName: opt.parentMessage, - FieldToInsert: opt.insertField, - IgnoredFields: opt.ignoredFields, - APIDirectory: opt.apiDirectory, - GoPackagePath: opt.apiGoPackagePath, - } - updater := typeupdater.NewTypeUpdater(typeUpdaterOpts) - if err := updater.Run(); err != nil { - return err - } - return nil + return cmd } diff --git a/dev/tools/controllerbuilder/pkg/typeupdater/common.go b/dev/tools/controllerbuilder/pkg/typeupdater/common.go new file mode 100644 index 0000000000..84e12aa075 --- /dev/null +++ b/dev/tools/controllerbuilder/pkg/typeupdater/common.go @@ -0,0 +1,86 @@ +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package typeupdater + +import ( + "go/ast" + "strings" + "unicode" + + "github.com/GoogleCloudPlatform/k8s-config-connector/dev/tools/controllerbuilder/pkg/codegen" +) + +// commentContains checks if the given comment group contains a target string annotation +func commentContains(cg *ast.CommentGroup, target string) bool { + if cg == nil { + return false + } + for _, c := range cg.List { + trimmed := strings.TrimPrefix(c.Text, "//") + trimmed = strings.TrimSpace(trimmed) + if trimmed == target { + return true + } + } + return false +} + +// getProtoFieldName converts a fully qualified proto field name to a snake_case field name +// e.g. "google.cloud.bigquery.datatransfer.v1.TransferConfig.DisplayName" -> "display_name" +func getProtoFieldName(fullName string) string { + parts := strings.Split(fullName, ".") + if len(parts) == 0 { + return "" + } + lastPart := parts[len(parts)-1] + + // convert from camelCase to snake_case + var result []rune + var i int + for i < len(lastPart) { + // check for acronym sequence + if unicode.IsUpper(rune(lastPart[i])) { + if acronym := extractAcronym(lastPart[i:]); len(acronym) > 0 { + if i > 0 { + result = append(result, '_') + } + result = append(result, []rune(strings.ToLower(acronym))...) + i += len(acronym) + continue + } + } + + // regular camelCase handling + r := rune(lastPart[i]) + if i > 0 && unicode.IsUpper(r) { + result = append(result, '_') + } + result = append(result, unicode.ToLower(r)) + i++ + } + + return string(result) +} + +// extractAcronym checks if the string starts with a known acronym and returns it +func extractAcronym(s string) string { + // try to find the longest acronym starting at this position + for j := len(s); j > 0; j-- { + if codegen.IsAcronym(s[:j]) { + return s[:j] + } + } + return "" +} diff --git a/dev/tools/controllerbuilder/pkg/typeupdater/typeupdater.go b/dev/tools/controllerbuilder/pkg/typeupdater/fieldinserter.go similarity index 95% rename from dev/tools/controllerbuilder/pkg/typeupdater/typeupdater.go rename to dev/tools/controllerbuilder/pkg/typeupdater/fieldinserter.go index 6f70b0ca93..457dcb77d1 100644 --- a/dev/tools/controllerbuilder/pkg/typeupdater/typeupdater.go +++ b/dev/tools/controllerbuilder/pkg/typeupdater/fieldinserter.go @@ -30,9 +30,7 @@ import ( "k8s.io/klog" ) -const kccProtoPrefix = "+kcc:proto=" - -type UpdaterOptions struct { +type InsertFieldOptions struct { ParentMessageFullName string FieldToInsert string IgnoredFields string @@ -41,8 +39,8 @@ type UpdaterOptions struct { GoPackagePath string } -type TypeUpdater struct { - opts *UpdaterOptions +type FieldInserter struct { + opts *InsertFieldOptions // newField is the internal representation of the field to be inserted newField newField // dependentMessages is a map nested messages under the new field to be inserted. @@ -62,13 +60,13 @@ type newMessage struct { generatedContent []byte // the content of the generated Go type corresponding to this message } -func NewTypeUpdater(opts *UpdaterOptions) *TypeUpdater { - return &TypeUpdater{ +func NewFieldInserter(opts *InsertFieldOptions) *FieldInserter { + return &FieldInserter{ opts: opts, } } -func (u *TypeUpdater) Run() error { +func (u *FieldInserter) Run() error { // 1. find new field and its dependent proto messages that needs to be generated if err := u.analyze(); err != nil { return err @@ -91,7 +89,7 @@ func (u *TypeUpdater) Run() error { } // anaylze finds the new field, its parent message, and all dependent messages that need to be generated. -func (u *TypeUpdater) analyze() error { +func (u *FieldInserter) analyze() error { // find the new proto field to be inserted newProtoField, parent, err := findNewField(u.opts.ProtoSourcePath, u.opts.ParentMessageFullName, u.opts.FieldToInsert) if err != nil { @@ -179,7 +177,7 @@ func removeAlreadyGenerated(goPackagePath, outputAPIDirectory string, targets ma return nil } -func (u *TypeUpdater) generate() error { +func (u *FieldInserter) generate() error { var buf bytes.Buffer klog.Infof("generate Go code for field %s", u.newField.proto.Name()) codegen.WriteField(&buf, u.newField.proto, u.newField.parent, 0) diff --git a/dev/tools/controllerbuilder/pkg/typeupdater/fieldupdateplan.go b/dev/tools/controllerbuilder/pkg/typeupdater/fieldupdateplan.go new file mode 100644 index 0000000000..5cf4a3a74a --- /dev/null +++ b/dev/tools/controllerbuilder/pkg/typeupdater/fieldupdateplan.go @@ -0,0 +1,210 @@ +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package typeupdater + +import ( + "bytes" + "fmt" + "go/ast" + "go/parser" + "go/token" + "os" + + "github.com/GoogleCloudPlatform/k8s-config-connector/dev/tools/controllerbuilder/pkg/codegen" + "github.com/GoogleCloudPlatform/k8s-config-connector/dev/tools/controllerbuilder/pkg/gocode" + "google.golang.org/protobuf/reflect/protoreflect" + "k8s.io/klog" +) + +// FieldUpdatePlan represents a planned update to a specific Go field +type FieldUpdatePlan struct { + filepath string // path to the file containing the field + + structName string // name of the Go struct containing the field + fieldName string // name of the Go field to update + fieldInfo *fieldInfo // original field info for reference + + protoParentName string // fully qualified name of the proto parent message + protoName string // fully qualified name of the proto field + protoField protoreflect.FieldDescriptor // proto field descriptor + + content []byte // generated field content +} + +func (s *ProtoPackageSyncer) createFieldUpdatePlan(msgInfo messageInfo, fieldInfo *fieldInfo, msgDesc protoreflect.MessageDescriptor) (*FieldUpdatePlan, error) { + if fieldInfo.IsVirtual { + if s.opts.LegacyMode { + // HACK: infer the proto name for legacy go fields without proto name annotation + fieldInfo.ProtoName = fmt.Sprintf("%s.%s", msgInfo.ProtoName, getProtoFieldName(fieldInfo.GoName)) + klog.Infof("Inferring proto name for legacy field %s in struct %s: %s", fieldInfo.GoName, msgInfo.GoName, fieldInfo.ProtoName) + } else { + klog.Infof("Skipping virtual field %s in %s", fieldInfo.GoName, msgInfo.GoName) + return nil, nil + } + } + + // 1. find the proto field + name := getProtoFieldName(fieldInfo.ProtoName) // e.g. "google.cloud.bigquery.datatransfer.v1.TransferConfig.DisplayName" -> "display_name" + protoField := msgDesc.Fields().ByName(protoreflect.Name(name)) + if protoField == nil { + klog.Warningf("proto field %s (full name: %s) not found in message %s", name, fieldInfo.ProtoName, msgInfo.ProtoName) + return nil, nil + } + + // 2. generate Go structs for the field + var buf bytes.Buffer + + // 2.1 special annotations such as "// +required" are manually added to the generated code, we need to preserve them + specialAnnotations := getSpecialAnnotations(fieldInfo.Comments) + if len(specialAnnotations) > 0 { + for _, annotation := range specialAnnotations { + fmt.Fprintf(&buf, "\t// %s\n", annotation) + } + } + + // 2.2 regenerate the field content based on the proto field descriptor + if fieldInfo.IsReference { // For reference fields, preserve original comments and reference type + return nil, nil // skip generating reference fields for now since we don't plan to update them + /* for _, comment := range fieldInfo.Comments { + fmt.Fprintf(&buf, "\t// %s\n", comment) + } + jsonName := codegen.GetJSONForKRM(protoField) + fmt.Fprintf(&buf, "\t%s *refv1beta1.%s `json:\"%s,omitempty\"`\n", + fieldInfo.GoName, + fieldInfo.RefType, + jsonName) */ + } else if fieldInfo.IsIgnored { // for ignored fields, generate only the field declaration without comments + goType, err := codegen.GoType(protoField) + if err != nil { + return nil, fmt.Errorf("determining Go type for ignored field %s (proto: %s): %w", fieldInfo.GoName, fieldInfo.ProtoName, err) + } + jsonName := codegen.GetJSONForKRM(protoField) + fmt.Fprintf(&buf, "\t%s %s `json:\"%s,omitempty\"`\n", + fieldInfo.GoName, + goType, + jsonName) + } else { // for regular fields, generate complete field with comments + codegen.WriteField(&buf, protoField, msgDesc, 0) // HACK: use fieldIndex=0 to avoid generating a leading blank line on comments + } + + // 3. create the update plan to record every information we need to update the field + plan := &FieldUpdatePlan{ + filepath: msgInfo.FilePath, + structName: msgInfo.GoName, + fieldName: fieldInfo.GoName, + fieldInfo: fieldInfo, + protoParentName: msgInfo.ProtoName, + protoName: fieldInfo.ProtoName, + protoField: protoField, + content: buf.Bytes(), + } + + return plan, nil +} + +func (s *ProtoPackageSyncer) applyFieldUpdatePlan(plan FieldUpdatePlan) error { + content, err := os.ReadFile(plan.filepath) + if err != nil { + return fmt.Errorf("reading file %s: %w", plan.filepath, err) + } + + fset := token.NewFileSet() + file, err := parser.ParseFile(fset, plan.filepath, content, parser.ParseComments) + if err != nil { + return fmt.Errorf("parsing file %s: %w", plan.filepath, err) + } + + docMap := gocode.NewDocMap(fset, file) + + // find the target struct and field by matching the proto name + targetMessageAnnotation := fmt.Sprintf("%s=%s", codegen.KCCProtoMessageAnnotation, plan.protoParentName) + targetFieldAnnotation := fmt.Sprintf("%s=%s", codegen.KCCProtoFieldAnnotation, plan.protoName) + var fieldNode *ast.Field + var found bool + ast.Inspect(file, func(n ast.Node) bool { + if found { + return false + } + + ts, ok := n.(*ast.TypeSpec) + if !ok { + return true + } + st, ok := ts.Type.(*ast.StructType) + if !ok { + return false + } + if !commentContains(docMap[ts], targetMessageAnnotation) { // match by fully qualified proto name annotation + return true + } + + // find the target field + for _, field := range st.Fields.List { + fieldComments := docMap[field] + if commentContains(fieldComments, targetFieldAnnotation) || + (s.opts.LegacyMode && len(field.Names) > 0 && field.Names[0].Name == plan.fieldName) { // HACK: match the field name for legacy Go fields without proper proto name annotation + fieldNode = field + found = true + return false + } + } + return true + }) + + if !found { + return fmt.Errorf("field %s not found in struct %s", plan.fieldName, plan.structName) + } + + // get the start position (accounting for doc comments if they exist) + var startPos token.Pos + var hasDoc bool + if doc := docMap[fieldNode]; doc != nil { + startPos = doc.Pos() + hasDoc = true + } else { + startPos = fieldNode.Pos() + } + start := fset.Position(startPos) + end := fset.Position(fieldNode.End()) + + if hasDoc { // HACK: remove the leading tab ("\t") from the original field content + start.Offset-- + } + + // replace the field content + newContent := make([]byte, 0, len(content)+len(plan.content)) + newContent = append(newContent, content[:start.Offset]...) + newContent = append(newContent, plan.content...) + newContent = append(newContent, content[end.Offset:]...) + + if err := os.WriteFile(plan.filepath, newContent, 0644); err != nil { + return fmt.Errorf("writing file %s: %w", plan.filepath, err) + } + + return nil +} + +func printUpdatePlans(plans []FieldUpdatePlan) { + klog.Infof("Field update plans:") + for _, plan := range plans { + klog.Infof("- File: %s", plan.filepath) + klog.Infof(" Struct: %s", plan.structName) + klog.Infof(" Field: %s", plan.fieldName) + klog.Infof(" Proto: %s", plan.protoName) + klog.Infof(" IsReference: %v", plan.fieldInfo.IsReference) + klog.Infof(" IsIgnored: %v", plan.fieldInfo.IsIgnored) + klog.Infof(" Content: %s", string(plan.content)) + } +} diff --git a/dev/tools/controllerbuilder/pkg/typeupdater/insertfield-ast.go b/dev/tools/controllerbuilder/pkg/typeupdater/insertfield-ast.go index 9e005cdf98..df2854c92f 100644 --- a/dev/tools/controllerbuilder/pkg/typeupdater/insertfield-ast.go +++ b/dev/tools/controllerbuilder/pkg/typeupdater/insertfield-ast.go @@ -36,7 +36,7 @@ type target struct { endPos int } -func (u *TypeUpdater) insertGoField() error { +func (u *FieldInserter) insertGoField() error { klog.Infof("inserting the generated Go code for field %s", u.newField.proto.Name()) targetComment := fmt.Sprintf("+kcc:proto=%s", u.newField.parent.FullName()) @@ -79,7 +79,7 @@ func (u *TypeUpdater) insertGoField() error { } comments := docMap[ts] - if !isTargetStruct(comments, targetComment) { + if !commentContains(comments, targetComment) { return true } @@ -127,17 +127,3 @@ func (u *TypeUpdater) insertGoField() error { return nil } - -func isTargetStruct(cg *ast.CommentGroup, target string) bool { - if cg == nil { - return false - } - for _, c := range cg.List { - trimmed := strings.TrimPrefix(c.Text, "//") - trimmed = strings.TrimSpace(trimmed) - if trimmed == target { - return true - } - } - return false -} diff --git a/dev/tools/controllerbuilder/pkg/typeupdater/insertfield-gemini.go b/dev/tools/controllerbuilder/pkg/typeupdater/insertfield-gemini.go index 722e4db582..868aa9d38a 100644 --- a/dev/tools/controllerbuilder/pkg/typeupdater/insertfield-gemini.go +++ b/dev/tools/controllerbuilder/pkg/typeupdater/insertfield-gemini.go @@ -27,7 +27,7 @@ import ( "k8s.io/klog/v2" ) -func (u *TypeUpdater) insertGoFieldGemini() error { +func (u *FieldInserter) insertGoFieldGemini() error { klog.Infof("inserting the generated Go code for field %s", u.newField.proto.Name()) ctx := context.Background() client, err := genai.NewClient(ctx, option.WithAPIKey(os.Getenv("GEMINI_API_KEY"))) diff --git a/dev/tools/controllerbuilder/pkg/typeupdater/insertmessage-ast.go b/dev/tools/controllerbuilder/pkg/typeupdater/insertmessage-ast.go index 477dc1f9ae..a85dc578c6 100644 --- a/dev/tools/controllerbuilder/pkg/typeupdater/insertmessage-ast.go +++ b/dev/tools/controllerbuilder/pkg/typeupdater/insertmessage-ast.go @@ -23,6 +23,7 @@ import ( "path/filepath" "strings" + "github.com/GoogleCloudPlatform/k8s-config-connector/dev/tools/controllerbuilder/pkg/codegen" "github.com/GoogleCloudPlatform/k8s-config-connector/dev/tools/controllerbuilder/pkg/gocode" "k8s.io/klog/v2" @@ -34,7 +35,7 @@ type goStruct struct { end int // byte offset of the end of this struct } -func (u *TypeUpdater) insertGoMessages() error { +func (u *FieldInserter) insertGoMessages() error { if len(u.dependentMessages) == 0 { return nil } @@ -180,10 +181,10 @@ func protoNameFromComment(cg *ast.CommentGroup) (string, error) { for _, c := range cg.List { trimmed := strings.TrimPrefix(c.Text, "//") trimmed = strings.TrimSpace(trimmed) - if !strings.HasPrefix(trimmed, kccProtoPrefix) { + if !strings.HasPrefix(trimmed, codegen.KCCProtoMessageAnnotation+"=") { continue } - return strings.TrimSpace(strings.TrimPrefix(trimmed, kccProtoPrefix)), nil // found the comment with proto name + return strings.TrimSpace(strings.TrimPrefix(trimmed, codegen.KCCProtoMessageAnnotation+"=")), nil // found the comment with proto name } return "", fmt.Errorf("not found") } diff --git a/dev/tools/controllerbuilder/pkg/typeupdater/insertmessage-gemini.go b/dev/tools/controllerbuilder/pkg/typeupdater/insertmessage-gemini.go index 52e8e93d6b..009afa08fe 100644 --- a/dev/tools/controllerbuilder/pkg/typeupdater/insertmessage-gemini.go +++ b/dev/tools/controllerbuilder/pkg/typeupdater/insertmessage-gemini.go @@ -24,7 +24,7 @@ import ( "google.golang.org/api/option" ) -func (u *TypeUpdater) insertGoMessagesGemini() error { +func (u *FieldInserter) insertGoMessagesGemini() error { ctx := context.Background() client, err := genai.NewClient(ctx, option.WithAPIKey(os.Getenv("GEMINI_API_KEY"))) if err != nil { diff --git a/dev/tools/controllerbuilder/pkg/typeupdater/messageinfo.go b/dev/tools/controllerbuilder/pkg/typeupdater/messageinfo.go new file mode 100644 index 0000000000..509492e146 --- /dev/null +++ b/dev/tools/controllerbuilder/pkg/typeupdater/messageinfo.go @@ -0,0 +1,188 @@ +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package typeupdater + +import ( + "go/ast" + "go/parser" + "go/token" + "os" + "path/filepath" + "strings" + + "github.com/GoogleCloudPlatform/k8s-config-connector/dev/tools/controllerbuilder/pkg/codegen" + "github.com/GoogleCloudPlatform/k8s-config-connector/dev/tools/controllerbuilder/pkg/gocode" +) + +// messageInfo contains information about a Go struct parsed from existing types files. +// This struct is used to keep track of existing information about a message in the +// generated and human-edited code. +type messageInfo struct { + GoName string // The Go struct name + ProtoName string // The proto message name from +kcc:proto annotation + IsVirtual bool // KRM-specific messages that don't map to proto + Comments []string // Original comments + Fields map[string]*fieldInfo // Map of field name to field info + FilePath string // The file path where this Go struct was located +} + +// fieldInfo contains information about a field in a Go struct parsed from existing types files. +// This struct is used to keep track of existing information about a field in the +// generated and human-edited code. +type fieldInfo struct { + GoName string // Field name in Go + ProtoName string // The fully qualified proto field name from +kcc:proto:field annotation + IsVirtual bool // KRM-specific fields that don't map to proto + IsIgnored bool // Field explicitly marked as not implemented + IsReference bool // Is this a reference field? + RefType string // What type of reference (ProjectRef, etc) + Comments []string // Preserve original comments for reference fields +} + +func extractMessageInfoFromGoFiles(dir string) (map[string]messageInfo, error) { + messages := make(map[string]messageInfo) + + err := filepath.WalkDir(dir, func(path string, d os.DirEntry, err error) error { + if err != nil || d.IsDir() || filepath.Ext(path) != ".go" { + return nil + } + + fset := token.NewFileSet() + file, err := parser.ParseFile(fset, path, nil, parser.ParseComments) + if err != nil { + return err + } + + docMap := gocode.NewDocMap(fset, file) + + ast.Inspect(file, func(n ast.Node) bool { + ts, ok := n.(*ast.TypeSpec) + if !ok { + return true + } + st, ok := ts.Type.(*ast.StructType) + if !ok { + return true + } + + msgInfo := newMessageInfo(ts.Name.Name, path) + msgInfo.parseComments(ts, docMap) + + // parse fields within the message + for _, field := range st.Fields.List { + if len(field.Names) == 0 { + continue + } + fieldInfo := newFieldInfo(field.Names[0].Name) + fieldInfo.parseComments(field, docMap) + msgInfo.Fields[fieldInfo.GoName] = fieldInfo + } + + messages[msgInfo.GoName] = msgInfo + return true + }) + return nil + }) + + return messages, err +} + +func newMessageInfo(name, filePath string) messageInfo { + return messageInfo{ + GoName: name, + FilePath: filePath, + Fields: make(map[string]*fieldInfo), + } +} + +func (info *messageInfo) parseComments(ts *ast.TypeSpec, docMap map[ast.Node]*ast.CommentGroup) { + info.IsVirtual = true + + if comments := docMap[ts]; comments != nil { + info.Comments = make([]string, 0, len(comments.List)) + for _, c := range comments.List { + text := strings.TrimSpace(strings.TrimPrefix(c.Text, "//")) + info.Comments = append(info.Comments, text) + + // check for proto annotation + if strings.HasPrefix(text, codegen.KCCProtoMessageAnnotation+"=") { + protoName := strings.TrimSpace(strings.TrimPrefix(text, codegen.KCCProtoMessageAnnotation+"=")) + info.ProtoName = protoName + info.IsVirtual = false + } + } + } +} + +func newFieldInfo(name string) *fieldInfo { + return &fieldInfo{ + GoName: name, + } +} + +func (info *fieldInfo) parseComments(field *ast.Field, docMap map[ast.Node]*ast.CommentGroup) { + info.IsVirtual = true + + // check if field is a reference field + if expr, ok := field.Type.(*ast.StarExpr); ok { + if sel, ok := expr.X.(*ast.SelectorExpr); ok { + if ident, ok := sel.X.(*ast.Ident); ok { + if ident.Name == "refv1beta1" { // HACK: this is a hack to identify reference fields + info.IsReference = true + info.RefType = sel.Sel.Name + } + } + } + } + + // parse comments to find kcc codegen annotations + if comments := docMap[field]; comments != nil { + info.Comments = make([]string, 0, len(comments.List)) + for _, c := range comments.List { + text := strings.TrimSpace(strings.TrimPrefix(c.Text, "//")) + info.Comments = append(info.Comments, text) + + if strings.HasPrefix(text, codegen.KCCProtoFieldAnnotation+"=") { + protoName := strings.TrimSpace(strings.TrimPrefix(text, codegen.KCCProtoFieldAnnotation+"=")) + info.ProtoName = protoName + info.IsVirtual = false + } + if strings.Contains(text, "NOTYET") || strings.Contains(text, codegen.KCCProtoIgnoreAnnotation) { + info.IsIgnored = true + } + } + } +} + +// getSpecialAnnotations extracts special annotations like +required from comment group +// These annotations are manually added to the generated code, we need to preserve them. +func getSpecialAnnotations(comments []string) []string { + if comments == nil { + return nil + } + + var annotations []string + for _, c := range comments { + if strings.Contains(c, "+genclient") || + strings.Contains(c, "+k8s") || + strings.Contains(c, "+kubebuilder") || + strings.Contains(c, "+required") || + strings.Contains(c, "+optional") || + strings.Contains(c, "Immutable") { + annotations = append(annotations, c) + } + } + return annotations +} diff --git a/dev/tools/controllerbuilder/pkg/typeupdater/protopackagesyncer.go b/dev/tools/controllerbuilder/pkg/typeupdater/protopackagesyncer.go new file mode 100644 index 0000000000..253ddd8a1e --- /dev/null +++ b/dev/tools/controllerbuilder/pkg/typeupdater/protopackagesyncer.go @@ -0,0 +1,158 @@ +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package typeupdater + +import ( + "fmt" + "path/filepath" + "strings" + + "github.com/GoogleCloudPlatform/k8s-config-connector/dev/tools/controllerbuilder/pkg/protoapi" + "google.golang.org/protobuf/reflect/protoreflect" + "google.golang.org/protobuf/reflect/protoregistry" + "k8s.io/apimachinery/pkg/runtime/schema" + "k8s.io/klog" +) + +type SyncProtoPackageOptions struct { + ServiceName string + APIVersion string + ProtoSourcePath string + APIDirectory string + GoPackagePath string + LegacyMode bool +} + +type ProtoPackageSyncer struct { + opts *SyncProtoPackageOptions + + // holds info about Go structs in existing types files, including both generated and manually edited structs. + // key is the go struct name + existingGoMessages map[string]messageInfo + api *protoapi.Proto // Store the loaded proto API +} + +func NewProtoPackageSyncer(opts *SyncProtoPackageOptions) *ProtoPackageSyncer { + return &ProtoPackageSyncer{ + opts: opts, + existingGoMessages: make(map[string]messageInfo), + } +} + +func (s *ProtoPackageSyncer) Run() error { + // 1. parse the existing go types + if err := s.parseExistingTypes(); err != nil { + return err + } + + // 2. load the proto package + if err := s.loadProtoPackage(); err != nil { + return err + } + + // 3. create the update plans + plans, err := s.createFieldUpdatePlans() + if err != nil { + return fmt.Errorf("creating update plans: %w", err) + } + + // printUpdatePlans(plans) + + // 4. apply the update plans to update the existing types + for _, plan := range plans { + if err := s.applyFieldUpdatePlan(plan); err != nil { + return fmt.Errorf("applying update plan for field %s in struct %s: %w", + plan.fieldName, plan.structName, err) + } + } + + return nil +} + +func (s *ProtoPackageSyncer) parseExistingTypes() error { + dir, err := typeFilePath(s.opts.APIDirectory, s.opts.APIVersion) + if err != nil { + return fmt.Errorf("getting API directory for %q: %w", s.opts.APIVersion, err) + } + + klog.Infof("Parsing existing types from %q", dir) + messages, err := extractMessageInfoFromGoFiles(dir) + if err != nil { + return err + } + + s.existingGoMessages = messages + return nil +} + +// typeFilePath returns the path to the types.go file for the given API version +func typeFilePath(apiBaseDir, gv string) (string, error) { + groupVersion, err := schema.ParseGroupVersion(gv) + if err != nil { + return "", fmt.Errorf("parsing APIVersion %q: %w", gv, err) + } + + goPackagePath := strings.TrimSuffix(groupVersion.Group, ".cnrm.cloud.google.com") + "/" + groupVersion.Version + packageTokens := strings.Split(goPackagePath, ".") + return filepath.Join(append([]string{apiBaseDir}, packageTokens...)...), nil +} + +func (s *ProtoPackageSyncer) createFieldUpdatePlans() ([]FieldUpdatePlan, error) { + var plans []FieldUpdatePlan + + // for each existing Go message that has a corresponding proto message + for goTypeName, msgInfo := range s.existingGoMessages { + if msgInfo.IsVirtual { + klog.Infof("Skipping virtual type %s", goTypeName) + continue + } + + // find corresponding proto message + desc, err := s.api.Files().FindDescriptorByName(protoreflect.FullName(msgInfo.ProtoName)) + if err != nil && err != protoregistry.NotFound { + return nil, fmt.Errorf("finding proto message %s: %w", msgInfo.ProtoName, err) + } + if desc == nil { + klog.Warningf("No proto message found for %s", msgInfo.ProtoName) + continue + } + msgDesc, ok := desc.(protoreflect.MessageDescriptor) + if !ok { + return nil, fmt.Errorf("unexpected descriptor type for %s: %T", msgInfo.ProtoName, desc) + } + + // for each field in the message, create update plan based on exsiting go types and the matching proto field + for fieldName, fieldInfo := range msgInfo.Fields { + plan, err := s.createFieldUpdatePlan(msgInfo, fieldInfo, msgDesc) + if err != nil { + return nil, fmt.Errorf("creating plan for field %s: %w", fieldName, err) + } + if plan != nil { + plans = append(plans, *plan) + } + } + } + + return plans, nil +} + +func (s *ProtoPackageSyncer) loadProtoPackage() error { + api, err := protoapi.LoadProto(s.opts.ProtoSourcePath) + if err != nil { + return fmt.Errorf("loading proto: %w", err) + } + s.api = api + return nil +} diff --git a/dev/tools/controllerbuilder/update.sh b/dev/tools/controllerbuilder/update.sh index 7aac058e51..73ccfcb4fa 100755 --- a/dev/tools/controllerbuilder/update.sh +++ b/dev/tools/controllerbuilder/update.sh @@ -20,9 +20,18 @@ set -x REPO_ROOT="$(git rev-parse --show-toplevel)" cd ${REPO_ROOT}/dev/tools/controllerbuilder -# example usage -go run . update-types \ +# example usage of inserting a field +go run . update-types insert \ --parent "google.monitoring.dashboard.v1.Dashboard" \ - --insert-field "row_layout" \ + --field "row_layout" \ --api-dir ${REPO_ROOT}/apis/monitoring/v1beta1 \ --ignored-fields "google.monitoring.dashboard.v1.PickTimeSeriesFilter.interval" + +# example usage of syncing a message with all of its dependencies from proto package +go run . update-types sync \ + --service google.cloud.bigquery.datatransfer.v1 \ + --api-version bigquerydatatransfer.cnrm.cloud.google.com/v1beta1 \ + --legacy-mode # this is a flag to indicate that the resource is previously generated with KRM fields without proto annotations + +# Fix up formatting +${REPO_ROOT}/dev/tasks/fix-gofmt \ No newline at end of file diff --git a/docs/develop-resources/scenarios/new-field.md b/docs/develop-resources/scenarios/new-field.md index b32c7d665c..f10983f85e 100644 --- a/docs/develop-resources/scenarios/new-field.md +++ b/docs/develop-resources/scenarios/new-field.md @@ -10,19 +10,19 @@ Run the following command. This should add the new field and all of its dependen REPO_ROOT="$(git rev-parse --show-toplevel)" cd $REPO_ROOT/dev/tools/controllerbuilder -go run . update-types \ +go run . update-types insert \ --parent "google.monitoring.dashboard.v1.Dashboard" \ - --insert-field "row_layout" \ + --field "row_layout" \ --api-dir ${REPO_ROOT}/apis/monitoring/v1beta1 ``` -* `--parent-message` +* `--parent` Fully qualified name of the proto message holding the new field. -* `--field-to-insert` +* `--field` -Name of the new proto field to be inserted. +Name of the proto field to be inserted under the parent message. * `--api-dir` diff --git a/scripts/generate-google3-docs/resource-reference/generated/resource-docs/bigquerydatatransfer/bigquerydatatransferconfig.md b/scripts/generate-google3-docs/resource-reference/generated/resource-docs/bigquerydatatransfer/bigquerydatatransferconfig.md index 8cc7474b5b..635f2b7218 100644 --- a/scripts/generate-google3-docs/resource-reference/generated/resource-docs/bigquerydatatransfer/bigquerydatatransferconfig.md +++ b/scripts/generate-google3-docs/resource-reference/generated/resource-docs/bigquerydatatransfer/bigquerydatatransferconfig.md @@ -459,7 +459,7 @@ serviceAccountRef:

object

-

{% verbatim %}V2 options customizing different types of data transfer schedule. This field supports existing time-based and manual transfer schedule. Also supports Event-Driven transfer schedule. ScheduleOptionsV2 cannot be used together with ScheduleOptions/Schedule.{% endverbatim %}

+

{% verbatim %}Options customizing different types of data transfer schedule. This field replaces "schedule" and "schedule_options" fields. ScheduleOptionsV2 cannot be used together with ScheduleOptions/Schedule.{% endverbatim %}