diff --git a/apis/bigquery/v1beta1/dataset_identity.go b/apis/bigquery/v1beta1/dataset_identity.go new file mode 100644 index 0000000000..d1cdf644ec --- /dev/null +++ b/apis/bigquery/v1beta1/dataset_identity.go @@ -0,0 +1,113 @@ +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package v1beta1 + +import ( + "context" + "fmt" + "strings" + + "github.com/GoogleCloudPlatform/k8s-config-connector/apis/common" + refsv1beta1 "github.com/GoogleCloudPlatform/k8s-config-connector/apis/refs/v1beta1" + "sigs.k8s.io/controller-runtime/pkg/client" +) + +// DatasetIdentity defines the resource reference to BigQueryDataset, which "External" field +// holds the GCP identifier for the KRM object. +type DatasetIdentity struct { + parent *DatasetParent + id string +} + +func (i *DatasetIdentity) String() string { + return i.parent.String() + "/datasets/" + i.id +} + +func (i *DatasetIdentity) ID() string { + return i.id +} + +func (i *DatasetIdentity) Parent() *DatasetParent { + return i.parent +} + +type DatasetParent struct { + ProjectID string +} + +func (p *DatasetParent) String() string { + return "projects/" + p.ProjectID +} + +// New builds a DatasetIdentity from the Config Connector Dataset object. +func NewDatasetIdentity(ctx context.Context, reader client.Reader, obj *BigQueryDataset) (*DatasetIdentity, error) { + + // Get Parent + projectRef, err := refsv1beta1.ResolveProject(ctx, reader, obj.GetNamespace(), obj.Spec.ProjectRef) + if err != nil { + return nil, err + } + projectID := projectRef.ProjectID + if projectID == "" { + return nil, fmt.Errorf("cannot resolve project") + } + + // Get desired ID + resourceID := common.ValueOf(obj.Spec.ResourceID) + if resourceID == "" { + resourceID = obj.GetName() + } + if resourceID == "" { + return nil, fmt.Errorf("cannot resolve resource ID") + } + + // Use approved External + externalRef := common.ValueOf(obj.Status.ExternalRef) + if externalRef != "" { + // Validate desired with actual + actualParent, actualResourceID, err := ParseDatasetExternal(externalRef) + if err != nil { + return nil, err + } + if actualParent.ProjectID != projectID { + return nil, fmt.Errorf("spec.projectRef changed, expect %s, got %s", actualParent.ProjectID, projectID) + } + if actualResourceID != resourceID { + return nil, fmt.Errorf("cannot reset `metadata.name` or `spec.resourceID` to %s, since it has already assigned to %s", + resourceID, actualResourceID) + } + } + return &DatasetIdentity{ + parent: &DatasetParent{ + ProjectID: projectID, + }, + id: resourceID, + }, nil +} + +func ParseDatasetExternal(external string) (parent *DatasetParent, resourceID string, err error) { + tokens := strings.Split(external, "/") + + if len(tokens) != 4 || tokens[0] != "projects" || tokens[2] != "datasets" { + return nil, "", fmt.Errorf("format of BigQueryDataset external=%q was not known (use projects//datasets/)", external) + } + parent = &DatasetParent{ + ProjectID: tokens[1], + } + + resourceID = tokens[3] + + return parent, resourceID, nil +} diff --git a/apis/bigquery/v1beta1/dataset_reference.go b/apis/bigquery/v1beta1/dataset_reference.go index 7beae935f3..67a30afe65 100644 --- a/apis/bigquery/v1beta1/dataset_reference.go +++ b/apis/bigquery/v1beta1/dataset_reference.go @@ -17,7 +17,6 @@ package v1beta1 import ( "context" "fmt" - "strings" refsv1beta1 "github.com/GoogleCloudPlatform/k8s-config-connector/apis/refs/v1beta1" "github.com/GoogleCloudPlatform/k8s-config-connector/pkg/k8s" @@ -27,13 +26,13 @@ import ( "sigs.k8s.io/controller-runtime/pkg/client" ) -var _ refsv1beta1.ExternalNormalizer = &BigQueryDatasetRef{} +var _ refsv1beta1.ExternalNormalizer = &DatasetRef{} -// BigQueryDatasetRef defines the resource reference to BigQueryDataset, which "External" field +// DatasetRef defines the resource reference to BigQueryDataset, which "External" field // holds the GCP identifier for the KRM object. -type BigQueryDatasetRef struct { +type DatasetRef struct { // A reference to an externally managed BigQueryDataset resource. - // Should be in the format "projects/{{projectID}}/locations/{{location}}/datasets/{{datasetID}}". + // Should be in the format "projects//datasets/". External string `json:"external,omitempty"` // The name of a BigQueryDataset resource. @@ -41,20 +40,18 @@ type BigQueryDatasetRef struct { // The namespace of a BigQueryDataset resource. Namespace string `json:"namespace,omitempty"` - - parent *BigQueryDatasetParent } // NormalizedExternal provision the "External" value for other resource that depends on BigQueryDataset. // If the "External" is given in the other resource's spec.BigQueryDatasetRef, the given value will be used. // Otherwise, the "Name" and "Namespace" will be used to query the actual BigQueryDataset object from the cluster. -func (r *BigQueryDatasetRef) NormalizedExternal(ctx context.Context, reader client.Reader, otherNamespace string) (string, error) { +func (r *DatasetRef) NormalizedExternal(ctx context.Context, reader client.Reader, otherNamespace string) (string, error) { if r.External != "" && r.Name != "" { return "", fmt.Errorf("cannot specify both name and external on %s reference", BigQueryDatasetGVK.Kind) } // From given External if r.External != "" { - if _, _, err := ParseBigQueryDatasetExternal(r.External); err != nil { + if _, _, err := ParseDatasetExternal(r.External); err != nil { return "", err } return r.External, nil @@ -84,99 +81,3 @@ func (r *BigQueryDatasetRef) NormalizedExternal(ctx context.Context, reader clie r.External = actualExternalRef return r.External, nil } - -// New builds a BigQueryDatasetRef from the Config Connector BigQueryDataset object. -func NewBigQueryDatasetRef(ctx context.Context, reader client.Reader, obj *BigQueryDataset) (*BigQueryDatasetRef, error) { - id := &BigQueryDatasetRef{} - - // Get Parent - projectRef, err := refsv1beta1.ResolveProject(ctx, reader, obj.GetNamespace(), obj.Spec.ProjectRef) - if err != nil { - return nil, err - } - projectID := projectRef.ProjectID - if projectID == "" { - return nil, fmt.Errorf("cannot resolve project") - } - id.parent = &BigQueryDatasetParent{ProjectID: projectID} - - // Get desired ID - resourceID := valueOf(obj.Spec.ResourceID) - if resourceID == "" { - resourceID = obj.GetName() - } - if resourceID == "" { - return nil, fmt.Errorf("cannot resolve resource ID") - } - - // Use approved External - externalRef := valueOf(obj.Status.ExternalRef) - if externalRef == "" { - id.External = asBigQueryDatasetExternal(id.parent, resourceID) - return id, nil - } - - // Validate desired with actual - actualParent, actualResourceID, err := ParseBigQueryDatasetExternal(externalRef) - if err != nil { - return nil, err - } - if actualParent.ProjectID != projectID { - return nil, fmt.Errorf("spec.projectRef changed, expect %s, got %s", actualParent.ProjectID, projectID) - } - if actualResourceID != resourceID { - return nil, fmt.Errorf("cannot reset `metadata.name` or `spec.resourceID` to %s, since it has already assigned to %s", - resourceID, actualResourceID) - } - id.External = externalRef - id.parent = &BigQueryDatasetParent{ProjectID: projectID} - return id, nil -} - -func (r *BigQueryDatasetRef) Parent() (*BigQueryDatasetParent, error) { - if r.parent != nil { - return r.parent, nil - } - if r.External != "" { - parent, _, err := ParseBigQueryDatasetExternal(r.External) - if err != nil { - return nil, err - } - return parent, nil - } - return nil, fmt.Errorf("BigQueryDatasetRef not initialized from `NewBigQueryDatasetRef` or `NormalizedExternal`") -} - -type BigQueryDatasetParent struct { - ProjectID string -} - -func (p *BigQueryDatasetParent) String() string { - return "projects/" + p.ProjectID -} - -func asBigQueryDatasetExternal(parent *BigQueryDatasetParent, resourceID string) (external string) { - // Link Reference https://cloud.google.com/bigquery/docs/reference/rest/v2/datasets/get - return parent.String() + "/datasets/" + resourceID -} - -func ParseBigQueryDatasetExternal(external string) (parent *BigQueryDatasetParent, resourceID string, err error) { - external = strings.TrimPrefix(external, "/") - tokens := strings.Split(external, "/") - if len(tokens) != 4 || tokens[0] != "projects" || tokens[2] != "datasets" { - return nil, "", fmt.Errorf("format of BigQueryDataset external=%q was not known (use projects/{{projectID}}/datasets/{{datasetID}})", external) - } - parent = &BigQueryDatasetParent{ - ProjectID: tokens[1], - } - resourceID = tokens[3] - return parent, resourceID, nil -} - -func valueOf[T any](t *T) T { - var zeroVal T - if t == nil { - return zeroVal - } - return *t -} diff --git a/apis/bigquery/v1beta1/zz_generated.deepcopy.go b/apis/bigquery/v1beta1/zz_generated.deepcopy.go index 81e55103cb..f4c32c24e0 100644 --- a/apis/bigquery/v1beta1/zz_generated.deepcopy.go +++ b/apis/bigquery/v1beta1/zz_generated.deepcopy.go @@ -163,41 +163,6 @@ func (in *BigQueryDatasetObservedState) DeepCopy() *BigQueryDatasetObservedState return out } -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *BigQueryDatasetParent) DeepCopyInto(out *BigQueryDatasetParent) { - *out = *in -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new BigQueryDatasetParent. -func (in *BigQueryDatasetParent) DeepCopy() *BigQueryDatasetParent { - if in == nil { - return nil - } - out := new(BigQueryDatasetParent) - in.DeepCopyInto(out) - return out -} - -// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. -func (in *BigQueryDatasetRef) DeepCopyInto(out *BigQueryDatasetRef) { - *out = *in - if in.parent != nil { - in, out := &in.parent, &out.parent - *out = new(BigQueryDatasetParent) - **out = **in - } -} - -// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new BigQueryDatasetRef. -func (in *BigQueryDatasetRef) DeepCopy() *BigQueryDatasetRef { - if in == nil { - return nil - } - out := new(BigQueryDatasetRef) - in.DeepCopyInto(out) - return out -} - // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. func (in *BigQueryDatasetSpec) DeepCopyInto(out *BigQueryDatasetSpec) { *out = *in @@ -380,6 +345,56 @@ func (in *DatasetAccessEntry) DeepCopy() *DatasetAccessEntry { return out } +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *DatasetIdentity) DeepCopyInto(out *DatasetIdentity) { + *out = *in + if in.parent != nil { + in, out := &in.parent, &out.parent + *out = new(DatasetParent) + **out = **in + } +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DatasetIdentity. +func (in *DatasetIdentity) DeepCopy() *DatasetIdentity { + if in == nil { + return nil + } + out := new(DatasetIdentity) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *DatasetParent) DeepCopyInto(out *DatasetParent) { + *out = *in +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DatasetParent. +func (in *DatasetParent) DeepCopy() *DatasetParent { + if in == nil { + return nil + } + out := new(DatasetParent) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *DatasetRef) DeepCopyInto(out *DatasetRef) { + *out = *in +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DatasetRef. +func (in *DatasetRef) DeepCopy() *DatasetRef { + if in == nil { + return nil + } + out := new(DatasetRef) + in.DeepCopyInto(out) + return out +} + // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. func (in *DatasetReference) DeepCopyInto(out *DatasetReference) { *out = *in diff --git a/pkg/controller/direct/bigquerydataset/dataset_controller.go b/pkg/controller/direct/bigquerydataset/dataset_controller.go index b2f837d148..5223e054aa 100644 --- a/pkg/controller/direct/bigquerydataset/dataset_controller.go +++ b/pkg/controller/direct/bigquerydataset/dataset_controller.go @@ -75,7 +75,7 @@ func (m *model) AdapterForObject(ctx context.Context, reader client.Reader, u *u return nil, fmt.Errorf("error converting to %T: %w", obj, err) } - id, err := krm.NewBigQueryDatasetRef(ctx, reader, obj) + id, err := krm.NewDatasetIdentity(ctx, reader, obj) if err != nil { return nil, err } @@ -99,7 +99,7 @@ func (m *model) AdapterForURL(ctx context.Context, url string) (directbase.Adapt } type Adapter struct { - id *krm.BigQueryDatasetRef + id *krm.DatasetIdentity gcpService *api.Service desired *krm.BigQueryDataset actual *api.Dataset @@ -110,19 +110,15 @@ var _ directbase.Adapter = &Adapter{} func (a *Adapter) Find(ctx context.Context) (bool, error) { log := klog.FromContext(ctx).WithName(ctrlName) - log.V(2).Info("getting BigQueryDataset", "name", a.id.External) + log.V(2).Info("getting BigQueryDataset", "name", a.id.String()) - parent, datasetId, err := krm.ParseBigQueryDatasetExternal(a.id.External) - if err != nil { - return false, fmt.Errorf("failed to parse bigquery dataset full name, %w", err) - } - datasetGetCall := a.gcpService.Datasets.Get(parent.ProjectID, datasetId) + datasetGetCall := a.gcpService.Datasets.Get(a.id.Parent().ProjectID, a.id.ID()) datasetpb, err := datasetGetCall.Do() if err != nil { if direct.IsNotFound(err) { return false, nil } - return false, fmt.Errorf("getting BigQueryDataset %q: %w", a.id.External, err) + return false, fmt.Errorf("getting BigQueryDataset %q: %w", a.id.String(), err) } a.actual = datasetpb return true, nil @@ -131,7 +127,7 @@ func (a *Adapter) Find(ctx context.Context) (bool, error) { func (a *Adapter) Create(ctx context.Context, createOp *directbase.CreateOperation) error { log := klog.FromContext(ctx).WithName(ctrlName) - log.V(2).Info("creating Dataset", "name", a.id.External) + log.V(2).Info("creating Dataset", "name", a.id.String()) mapCtx := &direct.MapContext{} desiredDataset := BigQueryDatasetSpec_ToAPI(mapCtx, &a.desired.Spec, a.desired.Name) @@ -140,10 +136,7 @@ func (a *Adapter) Create(ctx context.Context, createOp *directbase.CreateOperati desiredDataset.Labels[k] = v } desiredDataset.Labels["managed-by-cnrm"] = "true" - parent, _, err := krm.ParseBigQueryDatasetExternal(a.id.External) - if err != nil { - return fmt.Errorf("failed to parse bigquery dataset full name, %w", err) - } + // Resolve KMS key reference if a.desired.Spec.DefaultEncryptionConfiguration != nil { kmsRef, err := refs.ResolveKMSCryptoKeyRef(ctx, a.reader, a.desired, a.desired.Spec.DefaultEncryptionConfiguration.KmsKeyRef) @@ -152,19 +145,21 @@ func (a *Adapter) Create(ctx context.Context, createOp *directbase.CreateOperati } desiredDataset.DefaultEncryptionConfiguration.KmsKeyName = kmsRef.External } - insertDatasetCall := a.gcpService.Datasets.Insert(parent.ProjectID, desiredDataset) + insertDatasetCall := a.gcpService.Datasets.Insert(a.id.Parent().ProjectID, desiredDataset) inserted, err := insertDatasetCall.Do() if err != nil { - return fmt.Errorf("inserting Dataset %s: %w", a.id.External, err) + return fmt.Errorf("inserting Dataset %s: %w", a.id.String(), err) } - log.V(2).Info("successfully inserted Dataset", "name", a.id.External) + log.V(2).Info("successfully inserted Dataset", "name", a.id.String()) status := &krm.BigQueryDatasetStatus{} status = BigQueryDatasetStatus_FromAPI(mapCtx, inserted) if mapCtx.Err() != nil { return mapCtx.Err() } - status.ExternalRef = &a.id.External + + external := a.id.String() + status.ExternalRef = &external return createOp.UpdateStatus(ctx, status, nil) } @@ -172,7 +167,7 @@ func (a *Adapter) Update(ctx context.Context, updateOp *directbase.UpdateOperati u := updateOp.GetUnstructured() log := klog.FromContext(ctx).WithName(ctrlName) - log.V(2).Info("updating Dataset", "name", a.id.External) + log.V(2).Info("updating Dataset", "name", a.id.String()) mapCtx := &direct.MapContext{} // Convert KRM object to proto message @@ -245,20 +240,16 @@ func (a *Adapter) Update(ctx context.Context, updateOp *directbase.UpdateOperati if len(updateMask.Paths) == 0 { return nil } - parent, datasetId, err := krm.ParseBigQueryDatasetExternal(a.id.External) - if err != nil { - return fmt.Errorf("failed to parse bigquery dataset full name, %w", err) - } if desired.Access == nil || len(desired.Access) == 0 { resource.Access = a.actual.Access } - updateDatasetCall := a.gcpService.Datasets.Update(parent.ProjectID, datasetId, resource) + updateDatasetCall := a.gcpService.Datasets.Update(a.id.Parent().ProjectID, a.id.ID(), resource) updated, err := updateDatasetCall.Do() if err != nil { - return fmt.Errorf("updating Dataset %s: %w", a.id.External, err) + return fmt.Errorf("updating Dataset %s: %w", a.id.String(), err) } - log.V(2).Info("successfully updated Dataset", "name", a.id.External) + log.V(2).Info("successfully updated Dataset", "name", a.id.String()) status := &krm.BigQueryDatasetStatus{} status = BigQueryDatasetStatus_FromAPI(mapCtx, updated) @@ -280,12 +271,8 @@ func (a *Adapter) Export(ctx context.Context) (*unstructured.Unstructured, error if mapCtx.Err() != nil { return nil, mapCtx.Err() } - parent, _, err := krm.ParseBigQueryDatasetExternal(a.id.External) - if err != nil { - return nil, fmt.Errorf("failed to parse bigquery dataset full name, %w", err) - } - obj.Spec.ProjectRef = &refs.ProjectRef{Name: parent.ProjectID} + obj.Spec.ProjectRef = &refs.ProjectRef{Name: a.id.Parent().ProjectID} uObj, err := runtime.DefaultUnstructuredConverter.ToUnstructured(obj) if err != nil { return nil, err @@ -297,18 +284,14 @@ func (a *Adapter) Export(ctx context.Context) (*unstructured.Unstructured, error // Delete implements the Adapter interface. func (a *Adapter) Delete(ctx context.Context, deleteOp *directbase.DeleteOperation) (bool, error) { log := klog.FromContext(ctx).WithName(ctrlName) - log.V(2).Info("deleting Dataset", "name", a.id.External) + log.V(2).Info("deleting Dataset", "name", a.id.String()) - parent, datasetId, err := krm.ParseBigQueryDatasetExternal(a.id.External) - if err != nil { - return false, fmt.Errorf("failed to parse bigquery dataset full name, %w", err) - } - deleteDatasetCall := a.gcpService.Datasets.Delete(parent.ProjectID, datasetId) - err = deleteDatasetCall.Do() + deleteDatasetCall := a.gcpService.Datasets.Delete(a.id.Parent().ProjectID, a.id.ID()) + err := deleteDatasetCall.Do() if err != nil { - return false, fmt.Errorf("deleting Dataset %s: %w", a.id.External, err) + return false, fmt.Errorf("deleting Dataset %s: %w", a.id.String(), err) } - log.V(2).Info("successfully deleted Dataset", "name", a.id.External) + log.V(2).Info("successfully deleted Dataset", "name", a.id.String()) return true, nil } diff --git a/pkg/controller/direct/bigquerydataset/dataset_externalresource.go b/pkg/controller/direct/bigquerydataset/dataset_externalresource.go deleted file mode 100644 index 74ccbc2c9e..0000000000 --- a/pkg/controller/direct/bigquerydataset/dataset_externalresource.go +++ /dev/null @@ -1,25 +0,0 @@ -// Copyright 2024 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package bigquerydataset - -import ( - krm "github.com/GoogleCloudPlatform/k8s-config-connector/apis/bigquery/v1beta1" -) - -// AsExternalRef builds a externalRef from a BigQueryDataTransferConfig -func AsExternalRef(datasetRef *krm.BigQueryDatasetRef) *string { - e := serviceDomain + "/" + datasetRef.External - return &e -}