From e0dd1ebac8d9a50d119385893be68c853f258a46 Mon Sep 17 00:00:00 2001 From: Nicolas Takashi Date: Tue, 16 Apr 2024 16:34:57 +0100 Subject: [PATCH] [refactor] alert reconciler Signed-off-by: Nicolas Takashi --- .github/workflows/build.yml | 2 +- .github/workflows/docs.yml | 2 +- .github/workflows/e2e-tests.yaml | 2 +- Makefile | 2 +- apis/coralogix/v1alpha1/alert_types.go | 38 +- .../alphacontrollers/alert_controller.go | 279 ++---- .../alphacontrollers/alert_controller_test.go | 852 ++++++++++++------ go.mod | 2 +- 8 files changed, 705 insertions(+), 474 deletions(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 48a4df2a..0d0dcbe6 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -22,6 +22,6 @@ jobs: - name: Install Go uses: actions/setup-go@37335c7bb261b353407cff977110895fa0b4f7d8 with: - go-version: 1.20.x + go-version: 1.22.x - name: Run build run: make build diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index 087b064a..4f3f6381 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -21,7 +21,7 @@ jobs: - name: Install Go uses: actions/setup-go@37335c7bb261b353407cff977110895fa0b4f7d8 with: - go-version: 1.20.x + go-version: 1.22.x - name: Generate docs run: make generate-api-docs - name: Check for changes diff --git a/.github/workflows/e2e-tests.yaml b/.github/workflows/e2e-tests.yaml index 2d22fb39..eeff05c2 100644 --- a/.github/workflows/e2e-tests.yaml +++ b/.github/workflows/e2e-tests.yaml @@ -30,7 +30,7 @@ jobs: - name: Install Go uses: actions/setup-go@37335c7bb261b353407cff977110895fa0b4f7d8 with: - go-version: 1.20.x + go-version: 1.22.x - name: Running operator and Tests env: CORALOGIX_REGION: ${{ secrets.CORALOGIX_REGION }} diff --git a/Makefile b/Makefile index 9fa980f1..baf8fa82 100644 --- a/Makefile +++ b/Makefile @@ -140,7 +140,7 @@ CRDOC ?= $(LOCALBIN)/crdoc ## Tool Versions KUSTOMIZE_VERSION ?= v3.8.7 -CONTROLLER_TOOLS_VERSION ?= v0.9.2 +CONTROLLER_TOOLS_VERSION ?= v0.15.0 KUSTOMIZE_INSTALL_SCRIPT ?= "https://raw.githubusercontent.com/kubernetes-sigs/kustomize/master/hack/install_kustomize.sh" .PHONY: kustomize diff --git a/apis/coralogix/v1alpha1/alert_types.go b/apis/coralogix/v1alpha1/alert_types.go index 669c17d0..360bd9a2 100644 --- a/apis/coralogix/v1alpha1/alert_types.go +++ b/apis/coralogix/v1alpha1/alert_types.go @@ -153,36 +153,26 @@ type AlertSpec struct { AlertType AlertType `json:"alertType"` } -func (in *AlertSpec) ExtractCreateAlertRequest(ctx context.Context) (*alerts.CreateAlertRequest, error) { - enabled := wrapperspb.Bool(in.Active) - name := wrapperspb.String(in.Name) - description := wrapperspb.String(in.Description) - severity := AlertSchemaSeverityToProtoSeverity[in.Severity] - metaLabels := expandMetaLabels(in.Labels) - expirationDate := expandExpirationDate(in.ExpirationDate) - showInInsight := expandShowInInsight(in.ShowInInsight) - notificationGroups, err := expandNotificationGroups(ctx, in.NotificationGroups) +func (a *Alert) ExtractCreateAlertRequest(ctx context.Context) (*alerts.CreateAlertRequest, error) { + notificationGroups, err := expandNotificationGroups(ctx, a.Spec.NotificationGroups) if err != nil { return nil, err } - payloadFilters := utils.StringSliceToWrappedStringSlice(in.PayloadFilters) - activeWhen := expandActiveWhen(in.Scheduling) - alertTypeParams := expandAlertType(in.AlertType) return &alerts.CreateAlertRequest{ - Name: name, - Description: description, - IsActive: enabled, - Severity: severity, - MetaLabels: metaLabels, - Expiration: expirationDate, - ShowInInsight: showInInsight, + IsActive: wrapperspb.Bool(a.Spec.Active), + Name: wrapperspb.String(a.Spec.Name), + Description: wrapperspb.String(a.Spec.Description), + Severity: AlertSchemaSeverityToProtoSeverity[a.Spec.Severity], + MetaLabels: expandMetaLabels(a.Spec.Labels), + Expiration: expandExpirationDate(a.Spec.ExpirationDate), + ShowInInsight: expandShowInInsight(a.Spec.ShowInInsight), NotificationGroups: notificationGroups, - NotificationPayloadFilters: payloadFilters, - ActiveWhen: activeWhen, - Filters: alertTypeParams.filters, - Condition: alertTypeParams.condition, - TracingAlert: alertTypeParams.tracingAlert, + NotificationPayloadFilters: utils.StringSliceToWrappedStringSlice(a.Spec.PayloadFilters), + ActiveWhen: expandActiveWhen(a.Spec.Scheduling), + Filters: expandAlertType(a.Spec.AlertType).filters, + Condition: expandAlertType(a.Spec.AlertType).condition, + TracingAlert: expandAlertType(a.Spec.AlertType).tracingAlert, }, nil } diff --git a/controllers/alphacontrollers/alert_controller.go b/controllers/alphacontrollers/alert_controller.go index 4927d3f4..07fc4a85 100644 --- a/controllers/alphacontrollers/alert_controller.go +++ b/controllers/alphacontrollers/alert_controller.go @@ -25,7 +25,6 @@ import ( "time" "github.com/go-logr/logr" - "github.com/golang/protobuf/jsonpb" "google.golang.org/grpc/codes" "google.golang.org/grpc/status" "google.golang.org/protobuf/types/known/wrapperspb" @@ -35,7 +34,6 @@ import ( "sigs.k8s.io/controller-runtime/pkg/client" "sigs.k8s.io/controller-runtime/pkg/controller/controllerutil" "sigs.k8s.io/controller-runtime/pkg/log" - "sigs.k8s.io/controller-runtime/pkg/reconcile" utils "github.com/coralogix/coralogix-operator/apis" coralogixv1alpha1 "github.com/coralogix/coralogix-operator/apis/coralogix/v1alpha1" @@ -54,7 +52,6 @@ var ( alertProtoNotifyOn = utils.ReverseMap(coralogixv1alpha1.AlertSchemaNotifyOnToProtoNotifyOn) alertProtoFlowOperatorToProtoFlowOperator = utils.ReverseMap(coralogixv1alpha1.AlertSchemaFlowOperatorToProtoFlowOperator) alertFinalizerName = "alert.coralogix.com/finalizer" - jsm = jsonpb.Marshaler{EmitDefaults: true} ) // AlertReconciler reconciles a Alert object @@ -68,216 +65,123 @@ type AlertReconciler struct { //+kubebuilder:rbac:groups=coralogix.com,resources=alerts/status,verbs=get;update;patch //+kubebuilder:rbac:groups=coralogix.com,resources=alerts/finalizers,verbs=update -// Reconcile is part of the main kubernetes reconciliation loop which aims to -// move the current state of the cluster closer to the desired state. -// TODO(user): Modify the Reconcile function to compare the state specified by -// the Alert object against the actual cluster state, and then -// perform operations to make the cluster state reflect the state specified by -// the user. -// -// For more details, check Reconcile and its Result here: -// - https://pkg.go.dev/sigs.k8s.io/controller-runtime@v0.14.4/pkg/reconcile +func (r *AlertReconciler) Reconcile(ctx context.Context, req ctrl.Request) (ctrl.Result, error) { + var ( + resultError ctrl.Result = ctrl.Result{RequeueAfter: defaultErrRequeuePeriod} + resultOk ctrl.Result = ctrl.Result{RequeueAfter: defaultRequeuePeriod} + err error + ) -func (r *AlertReconciler) ReconcileV1(ctx context.Context, req ctrl.Request) (ctrl.Result, error) { - log := log.FromContext(ctx) - jsm := &jsonpb.Marshaler{ - EmitDefaults: true, - } + log := log.FromContext(ctx).WithValues( + "alert", req.NamespacedName.Name, + "namespace", req.NamespacedName.Namespace, + ) - alertsClient := r.CoralogixClientSet.Alerts() coralogixv1alpha1.WebhooksClient = r.CoralogixClientSet.Webhooks() + alert := coralogixv1alpha1.NewAlert() - //Get alertCRD - alertCRD := &coralogixv1alpha1.Alert{} - if err := r.Client.Get(ctx, req.NamespacedName, alertCRD); err != nil { + if err = r.Client.Get(ctx, req.NamespacedName, alert); err != nil { if errors.IsNotFound(err) { // Request object not found, could have been deleted after reconcile request. // Owned objects are automatically garbage collected. For additional cleanup logic use finalizers. // Return and don't requeue return ctrl.Result{}, nil } - // Error reading the object - requeue the request - return ctrl.Result{RequeueAfter: defaultErrRequeuePeriod}, err - } - - // examine DeletionTimestamp to determine if object is under deletion - if alertCRD.ObjectMeta.DeletionTimestamp.IsZero() { - // The object is not being deleted, so if it does not have our finalizer, - // then lets add the finalizer and update the object. This is equivalent - // registering our finalizer. - if !controllerutil.ContainsFinalizer(alertCRD, alertFinalizerName) { - controllerutil.AddFinalizer(alertCRD, alertFinalizerName) - if err := r.Update(ctx, alertCRD); err != nil { - log.Error(err, "Error on updating alert", "Name", alertCRD.Name, "Namespace", alertCRD.Namespace) - return ctrl.Result{}, err - } - } - } else { - // The object is being deleted - if controllerutil.ContainsFinalizer(alertCRD, alertFinalizerName) { - // our finalizer is present, so lets handle any external dependency - if alertCRD.Status.ID == nil { - controllerutil.RemoveFinalizer(alertCRD, alertFinalizerName) - err := r.Update(ctx, alertCRD) - return ctrl.Result{}, err - } - - alertId := *alertCRD.Status.ID - deleteAlertReq := &alerts.DeleteAlertByUniqueIdRequest{Id: wrapperspb.String(alertId)} - log.V(1).Info("Deleting Alert", "Alert ID", alertId) - if _, err := alertsClient.DeleteAlert(ctx, deleteAlertReq); err != nil { - // if fail to delete the external dependency here, return with error - // so that it can be retried - if status.Code(err) == codes.NotFound { - controllerutil.RemoveFinalizer(alertCRD, alertFinalizerName) - err := r.Update(ctx, alertCRD) - return ctrl.Result{}, err - } - - log.Error(err, "Received an error while Deleting a Alert", "Alert ID", alertId) - return ctrl.Result{}, err - } + return resultError, err + } - log.V(1).Info("Alert was deleted", "Alert ID", alertId) - // remove our finalizer from the list and update it. - controllerutil.RemoveFinalizer(alertCRD, alertFinalizerName) - if err := r.Update(ctx, alertCRD); err != nil { - log.Error(err, "Error on updating alert", "Name", alertCRD.Name, "Namespace", alertCRD.Namespace) - return ctrl.Result{}, err - } + if alert.Status.ID == nil { + err = r.create(ctx, log, alert) + if err != nil { + log.Error(err, "Error on creating alert") + return resultError, err } - - // Stop reconciliation as the item is being deleted - return ctrl.Result{}, nil + return resultOk, nil } - var ( - notFound bool - err error - actualState *coralogixv1alpha1.AlertStatus - ) - if id := alertCRD.Status.ID; id == nil { - log.V(1).Info("alert wasn't created") - notFound = true - } else { - getAlertResp, err := alertsClient.GetAlert(ctx, &alerts.GetAlertByUniqueIdRequest{Id: wrapperspb.String(*id)}) - switch { - case status.Code(err) == codes.NotFound: - log.V(1).Info("alert doesn't exist in Coralogix backend", "ID", id) - notFound = true - case err != nil: - log.Error(err, "Received an error while getting Alert") - return ctrl.Result{RequeueAfter: defaultErrRequeuePeriod}, err - case err == nil: - actualState, err = getStatus(ctx, getAlertResp.GetAlert(), alertCRD.Spec) - if err != nil { - log.Error(err, "Received an error while flattened Alert") - return ctrl.Result{RequeueAfter: defaultErrRequeuePeriod}, err - } + if !alert.ObjectMeta.DeletionTimestamp.IsZero() { + err = r.delete(ctx, log, alert) + if err != nil { + log.Error(err, "Error on deleting alert") + return resultError, err } + return resultOk, nil } - if notFound { - if alertCRD.Spec.Labels == nil { - alertCRD.Spec.Labels = make(map[string]string) - } - alertCRD.Spec.Labels["managed-by"] = "coralogix-operator" - if err := r.Client.Update(ctx, alertCRD); err != nil { - log.Error(err, "Error on updating alert", "Name", alertCRD.Name, "Namespace", alertCRD.Namespace) - return ctrl.Result{RequeueAfter: defaultErrRequeuePeriod}, err - } + err = r.update(ctx, log, alert) + if err != nil { + log.Error(err, "Error on updating alert") + return resultError, err + } - createAlertReq, err := alertCRD.Spec.ExtractCreateAlertRequest(ctx) - if err != nil { - log.Error(err, "Bad request for creating alert", "Name", alertCRD.Name, "Namespace", alertCRD.Namespace) - return ctrl.Result{}, err - } + return resultOk, nil +} - jstr, _ := jsm.MarshalToString(createAlertReq) - log.V(1).Info("Creating Alert", "alert", jstr) - if createAlertResp, err := alertsClient.CreateAlert(ctx, createAlertReq); err == nil { - jstr, _ = jsm.MarshalToString(createAlertResp) - log.V(1).Info("Alert was created", "alert", jstr) - - //To avoid a situation of the operator falling between the creation of the alert in coralogix and being saved in the cluster (something that would cause it to be created again and again), its id will be saved ASAP. - id := createAlertResp.GetAlert().GetUniqueIdentifier().GetValue() - alertCRD.Status = coralogixv1alpha1.AlertStatus{ID: &id} - if err := r.Status().Update(ctx, alertCRD); err != nil { - log.Error(err, "Error on updating alert status", "Name", alertCRD.Name, "Namespace", alertCRD.Namespace) - return ctrl.Result{RequeueAfter: defaultErrRequeuePeriod}, err - } +func (r *AlertReconciler) update(ctx context.Context, + log logr.Logger, + alert *coralogixv1alpha1.Alert) error { + remoteAlert, err := r.CoralogixClientSet.Alerts().GetAlert(ctx, &alerts.GetAlertByUniqueIdRequest{ + Id: wrapperspb.String(*alert.Status.ID), + }) - actualState, err = getStatus(ctx, createAlertResp.GetAlert(), alertCRD.Spec) - if err != nil { - log.Error(err, "Received an error while flattened Alert") - return ctrl.Result{RequeueAfter: defaultErrRequeuePeriod}, err - } - alertCRD.Status = *actualState - if err := r.Status().Update(ctx, alertCRD); err != nil { - log.Error(err, "Error on updating alert status", "Name", alertCRD.Name, "Namespace", alertCRD.Namespace) - return ctrl.Result{RequeueAfter: defaultErrRequeuePeriod}, err - } - return ctrl.Result{RequeueAfter: defaultRequeuePeriod}, nil - } else { - log.Error(err, "Received an error while creating Alert", "Crating request", jstr) - return ctrl.Result{RequeueAfter: defaultErrRequeuePeriod}, err - } - } else if err != nil { - log.Error(err, "Received an error while reading Alert", "alert ID", *alertCRD.Status.ID) - return ctrl.Result{RequeueAfter: defaultErrRequeuePeriod}, err + if err != nil { + log.Error(err, "Error on getting alert") + return err } - if equal, diff := alertCRD.Spec.DeepEqual(actualState); !equal { - log.V(1).Info("Find diffs between spec and the actual state", "Diff", diff) - updateAlertReq, err := alertCRD.Spec.ExtractUpdateAlertRequest(ctx, *alertCRD.Status.ID) - if err != nil { - log.Error(err, "Bad request for updating alert", "Name", alertCRD.Name, "Namespace", alertCRD.Namespace) - return ctrl.Result{}, err - } + status, err := getStatus(ctx, remoteAlert.GetAlert(), alert.Spec) + if err != nil { + log.Error(err, "Error on flattening alert") + return err + } - updateAlertResp, err := alertsClient.UpdateAlert(ctx, updateAlertReq) - if err != nil { - log.Error(err, "Received an error while updating a Alert", "alert", updateAlertReq) - return ctrl.Result{RequeueAfter: defaultErrRequeuePeriod}, err - } - jstr, _ := jsm.MarshalToString(updateAlertResp) - log.V(1).Info("Alert was updated", "alert", jstr) + equal, _ := alert.Spec.DeepEqual(&status) + if equal { + return nil + } + + alertRequest, err := alert.Spec.ExtractUpdateAlertRequest(ctx, *alert.Status.ID) + if err != nil { + log.Error(err, "Error to parse alert request") + return err } - return ctrl.Result{RequeueAfter: defaultRequeuePeriod}, nil + _, err = r.CoralogixClientSet.Alerts().UpdateAlert(ctx, alertRequest) + if err != nil { + log.Error(err, "Error on remote updating alert") + return err + } + + return nil } -func (r *AlertReconciler) Reconcile(ctx context.Context, req ctrl.Request) (ctrl.Result, error) { - log := log.FromContext(ctx) +func (r *AlertReconciler) delete(ctx context.Context, + log logr.Logger, + alert *coralogixv1alpha1.Alert) error { - alertsClient := r.CoralogixClientSet.Alerts() - coralogixv1alpha1.WebhooksClient = r.CoralogixClientSet.Webhooks() - return r.ReconcileV1(ctx, req) + _, err := r.CoralogixClientSet.Alerts().DeleteAlert(ctx, &alerts.DeleteAlertByUniqueIdRequest{ + Id: wrapperspb.String(*alert.Status.ID), + }) - alert := coralogixv1alpha1.NewAlert() - if err := r.Client.Get(ctx, req.NamespacedName, alert); err != nil { - if errors.IsNotFound(err) { - // Request object not found, could have been deleted after reconcile request. - // Owned objects are automatically garbage collected. For additional cleanup logic use finalizers. - // Return and don't requeue - return ctrl.Result{}, nil - } - // Error reading the object - requeue the request - return ctrl.Result{RequeueAfter: defaultErrRequeuePeriod}, err + if err != nil && status.Code(err) != codes.NotFound { + log.Error(err, "Error on deleting alert") + return err } - if alert.Status.ID == nil { - r.create(ctx, log, alertsClient, alert) + controllerutil.RemoveFinalizer(alert, alertFinalizerName) + err = r.Update(ctx, alert) + if err != nil { + log.Error(err, "Error on updating alert after deletion") + return err } - return ctrl.Result{RequeueAfter: defaultRequeuePeriod}, nil + return nil } func (r *AlertReconciler) create( ctx context.Context, log logr.Logger, - alertClient clientset.AlertsClientInterface, - alert *coralogixv1alpha1.Alert) (reconcile.Result, error) { + alert *coralogixv1alpha1.Alert) error { if alert.Spec.Labels == nil { alert.Spec.Labels = make(map[string]string) @@ -287,28 +191,28 @@ func (r *AlertReconciler) create( alert.Spec.Labels["managed-by"] = "coralogix-operator" } - alertRequest, err := alert.Spec.ExtractCreateAlertRequest(ctx) + alertRequest, err := alert.ExtractCreateAlertRequest(ctx) if err != nil { - log.Error(err, "Bad request for creating alert", "Name", alert.Name, "Namespace", alert.Namespace) - return ctrl.Result{}, err + log.Error(err, "Error to create alert request") + return err } - response, err := alertClient.CreateAlert(ctx, alertRequest) + response, err := r.CoralogixClientSet.Alerts().CreateAlert(ctx, alertRequest) if err != nil { - log.Error(err, "Received an error while creating Alert", "Crating request") - return ctrl.Result{RequeueAfter: defaultErrRequeuePeriod}, err + log.Error(err, "Received an error while creating Alert") + return err } status, err := getStatus(ctx, response.GetAlert(), alert.Spec) if err != nil { log.Error(err, "Received an error while flattened Alert") - return ctrl.Result{RequeueAfter: defaultErrRequeuePeriod}, err + return err } alert.Status = status if err := r.Status().Update(ctx, alert); err != nil { - log.Error(err, "Error on updating alert status", "Name", alert.Name, "Namespace", alert.Namespace) - return ctrl.Result{RequeueAfter: defaultErrRequeuePeriod}, err + log.Error(err, "Error on updating alert status") + return err } if !controllerutil.ContainsFinalizer(alert, alertFinalizerName) { @@ -316,11 +220,11 @@ func (r *AlertReconciler) create( } if err := r.Client.Update(ctx, alert); err != nil { - log.Error(err, "Error on updating alert", "Name", alert.Name, "Namespace", alert.Namespace) - return ctrl.Result{}, err + log.Error(err, "Error on updating alert") + return err } - return reconcile.Result{}, nil + return nil } func getStatus(ctx context.Context, actualAlert *alerts.Alert, spec coralogixv1alpha1.AlertSpec) (coralogixv1alpha1.AlertStatus, error) { @@ -1087,6 +991,7 @@ func flattenNotificationGroups(ctx context.Context, notificationGroups []*alerts notificationGroup, flattenErr := flattenNotificationGroup(ctx, ng) if err != nil { err = stdErr.Join(err, fmt.Errorf("error on flatten notification-groups - %w", flattenErr)) + continue } result = append(result, *notificationGroup) } diff --git a/controllers/alphacontrollers/alert_controller_test.go b/controllers/alphacontrollers/alert_controller_test.go index a5515190..a74e404f 100644 --- a/controllers/alphacontrollers/alert_controller_test.go +++ b/controllers/alphacontrollers/alert_controller_test.go @@ -2,8 +2,6 @@ package alphacontrollers import ( "context" - "encoding/json" - "fmt" "testing" utils "github.com/coralogix/coralogix-operator/apis" @@ -13,10 +11,8 @@ import ( "github.com/stretchr/testify/assert" "go.uber.org/mock/gomock" "google.golang.org/protobuf/types/known/wrapperspb" - "k8s.io/apimachinery/pkg/api/errors" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" "k8s.io/apimachinery/pkg/runtime" - "k8s.io/apimachinery/pkg/runtime/schema" "k8s.io/apimachinery/pkg/types" utilruntime "k8s.io/apimachinery/pkg/util/runtime" "k8s.io/apimachinery/pkg/watch" @@ -26,17 +22,93 @@ import ( "sigs.k8s.io/controller-runtime/pkg/log/zap" ) -var expectedAlertBackendSchema = &alerts.Alert{ - UniqueIdentifier: wrapperspb.String("id1"), - Name: wrapperspb.String("name"), - Description: wrapperspb.String("description"), - IsActive: wrapperspb.Bool(true), - Severity: alerts.AlertSeverity_ALERT_SEVERITY_CRITICAL, - MetaLabels: []*alerts.MetaLabel{ - {Key: wrapperspb.String("key"), Value: wrapperspb.String("value")}, - {Key: wrapperspb.String("managed-by"), Value: wrapperspb.String("coralogix-operator")}, - }, - Condition: &alerts.AlertCondition{ +func setupReconciler(t *testing.T, ctx context.Context, clientSet *mock_clientset.MockClientSetInterface) (AlertReconciler, watch.Interface) { + scheme := runtime.NewScheme() + utilruntime.Must(coralogixv1alpha1.AddToScheme(scheme)) + + mgr, _ := ctrl.NewManager(ctrl.GetConfigOrDie(), ctrl.Options{ + Scheme: scheme, + MetricsBindAddress: "0", + }) + + go mgr.GetCache().Start(ctx) + + mgr.GetCache().WaitForCacheSync(ctx) + withWatch, err := client.NewWithWatch(mgr.GetConfig(), client.Options{ + Scheme: mgr.GetScheme(), + }) + + assert.NoError(t, err) + r := AlertReconciler{ + Client: withWatch, + Scheme: mgr.GetScheme(), + CoralogixClientSet: clientSet, + } + r.SetupWithManager(mgr) + + watcher, _ := r.Client.(client.WithWatch).Watch(ctx, &coralogixv1alpha1.AlertList{}) + ctrl.SetLogger(zap.New(zap.UseDevMode(true))) + return r, watcher +} + +type PrepareParams struct { + ctx context.Context + clientSet *mock_clientset.MockClientSetInterface + alertsClient *mock_clientset.MockAlertsClientInterface + webhooksClient *mock_clientset.MockWebhooksClientInterface + alert *coralogixv1alpha1.Alert + remoteAlert *alerts.Alert +} + +func TestAlertCreation(t *testing.T) { + defaultNotificationGroups := []coralogixv1alpha1.NotificationGroup{ + { + Notifications: []coralogixv1alpha1.Notification{ + { + RetriggeringPeriodMinutes: 10, + NotifyOn: coralogixv1alpha1.NotifyOnTriggeredAndResolved, + EmailRecipients: []string{"example@coralogix.com"}, + }, + }, + }, + } + + defaultAlertType := coralogixv1alpha1.AlertType{ + Metric: &coralogixv1alpha1.Metric{ + Promql: &coralogixv1alpha1.Promql{ + SearchQuery: "http_requests_total{status!~\"4..\"}", + Conditions: coralogixv1alpha1.PromqlConditions{ + AlertWhen: "MoreThanUsual", + Threshold: utils.FloatToQuantity(3.0), + TimeWindow: "TwelveHours", + MinNonNullValuesPercentage: pointer.Int(10), + ReplaceMissingValueWithZero: false, + }, + }, + }, + } + + defaultRemoteNotificationGroups := []*alerts.AlertNotificationGroups{ + { + Notifications: []*alerts.AlertNotification{ + { + RetriggeringPeriodSeconds: wrapperspb.UInt32(600), + NotifyOn: func() *alerts.NotifyOn { + notifyOn := new(alerts.NotifyOn) + *notifyOn = alerts.NotifyOn_TRIGGERED_AND_RESOLVED + return notifyOn + }(), + IntegrationType: &alerts.AlertNotification_Recipients{ + Recipients: &alerts.Recipients{ + Emails: []*wrapperspb.StringValue{wrapperspb.String("example@coralogix.com")}, + }, + }, + }, + }, + }, + } + + defaultRemoteCondition := &alerts.AlertCondition{ Condition: &alerts.AlertCondition_MoreThanUsual{ MoreThanUsual: &alerts.MoreThanUsualCondition{ Parameters: &alerts.ConditionParameters{ @@ -51,8 +123,150 @@ var expectedAlertBackendSchema = &alerts.Alert{ }, }, }, - }, - NotificationGroups: []*alerts.AlertNotificationGroups{ + } + + tests := []struct { + name string + prepare func(params PrepareParams) + alert *coralogixv1alpha1.Alert + remoteAlert *alerts.Alert + shouldFail bool + }{ + { + name: "Alert creation success", + shouldFail: false, + alert: &coralogixv1alpha1.Alert{ + TypeMeta: metav1.TypeMeta{Kind: "Alert", APIVersion: "coralogix.com/v1alpha1"}, + ObjectMeta: metav1.ObjectMeta{Name: "alert-creation-success", Namespace: "default"}, + Spec: coralogixv1alpha1.AlertSpec{ + Name: "AlertCreationSuccess", + Description: "AlertCreationSuccess", + Active: true, + Severity: alertProtoSeverityToSchemaSeverity[alerts.AlertSeverity_ALERT_SEVERITY_CRITICAL], + NotificationGroups: defaultNotificationGroups, + PayloadFilters: []string{"filter"}, + AlertType: defaultAlertType, + }, + }, + remoteAlert: &alerts.Alert{ + UniqueIdentifier: wrapperspb.String("AlertCreationSuccess"), + Name: wrapperspb.String("AlertCreationSuccess"), + Description: wrapperspb.String("AlertCreationSuccess"), + IsActive: wrapperspb.Bool(true), + Severity: alerts.AlertSeverity_ALERT_SEVERITY_CRITICAL, + MetaLabels: []*alerts.MetaLabel{ + {Key: wrapperspb.String("key"), Value: wrapperspb.String("value")}, + {Key: wrapperspb.String("managed-by"), Value: wrapperspb.String("coralogix-operator")}, + }, + Condition: defaultRemoteCondition, + NotificationGroups: defaultRemoteNotificationGroups, + Filters: &alerts.AlertFilters{ + FilterType: alerts.AlertFilters_FILTER_TYPE_METRIC, + }, + NotificationPayloadFilters: []*wrapperspb.StringValue{wrapperspb.String("filter")}, + }, + prepare: func(params PrepareParams) { + + params.alertsClient.EXPECT(). + GetAlert(params.alert.Namespace, coralogixv1alpha1.NewAlert()). + Return(&alerts.GetAlertByUniqueIdResponse{Alert: params.remoteAlert}, nil). + MinTimes(1).MaxTimes(1) + + params.alertsClient.EXPECT().CreateAlert(params.ctx, gomock.Any()). + Return(&alerts.CreateAlertResponse{Alert: params.remoteAlert}, nil). + MinTimes(1).MaxTimes(1) + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + controller := gomock.NewController(t) + defer controller.Finish() + + // Creating client set. + clientSet := mock_clientset.NewMockClientSetInterface(controller) + + // Creating alerts client. + alertsClient := mock_clientset.NewMockAlertsClientInterface(controller) + + // Creating webhooks client. + webhooksClient := mock_clientset.NewMockWebhooksClientInterface(controller) + + // Preparing common mocks. + clientSet.EXPECT().Alerts().MaxTimes(1).MinTimes(1).Return(alertsClient) + clientSet.EXPECT().Webhooks().Return(webhooksClient).AnyTimes() + + ctx, cancel := context.WithCancel(context.Background()) + defer cancel() + + if tt.prepare != nil { + tt.prepare(PrepareParams{ + ctx: ctx, + clientSet: clientSet, + alertsClient: alertsClient, + webhooksClient: webhooksClient, + alert: tt.alert, + remoteAlert: tt.remoteAlert, + }) + } + + reconciler, watcher := setupReconciler(t, ctx, clientSet) + + err := reconciler.Client.Create(ctx, tt.alert) + + assert.NoError(t, err) + + <-watcher.ResultChan() + + result, err := reconciler.Reconcile(ctx, ctrl.Request{ + NamespacedName: types.NamespacedName{ + Namespace: tt.alert.Namespace, + Name: tt.alert.Name, + }, + }) + + if tt.shouldFail { + assert.Error(t, err) + assert.Equal(t, defaultErrRequeuePeriod, result.RequeueAfter) + } else { + assert.NoError(t, err) + assert.Equal(t, defaultRequeuePeriod, result.RequeueAfter) + } + }) + } + +} + +func TestAlertUpdate(t *testing.T) { + defaultNotificationGroups := []coralogixv1alpha1.NotificationGroup{ + { + Notifications: []coralogixv1alpha1.Notification{ + { + RetriggeringPeriodMinutes: 10, + NotifyOn: coralogixv1alpha1.NotifyOnTriggeredAndResolved, + EmailRecipients: []string{"example@coralogix.com"}, + }, + }, + }, + } + + defaultAlertType := coralogixv1alpha1.AlertType{ + Metric: &coralogixv1alpha1.Metric{ + Promql: &coralogixv1alpha1.Promql{ + SearchQuery: "http_requests_total{status!~\"4..\"}", + Conditions: coralogixv1alpha1.PromqlConditions{ + AlertWhen: "MoreThanUsual", + Threshold: utils.FloatToQuantity(3.0), + TimeWindow: "TwelveHours", + MinNonNullValuesPercentage: pointer.Int(10), + ReplaceMissingValueWithZero: false, + }, + }, + }, + } + + defaultRemoteNotificationGroups := []*alerts.AlertNotificationGroups{ { Notifications: []*alerts.AlertNotification{ { @@ -70,51 +284,380 @@ var expectedAlertBackendSchema = &alerts.Alert{ }, }, }, - }, - Filters: &alerts.AlertFilters{ - FilterType: alerts.AlertFilters_FILTER_TYPE_METRIC, - }, - NotificationPayloadFilters: []*wrapperspb.StringValue{wrapperspb.String("filter")}, + } + + defaultRemoteCondition := &alerts.AlertCondition{ + Condition: &alerts.AlertCondition_MoreThanUsual{ + MoreThanUsual: &alerts.MoreThanUsualCondition{ + Parameters: &alerts.ConditionParameters{ + Threshold: wrapperspb.Double(3), + Timeframe: alerts.Timeframe_TIMEFRAME_12_H, + MetricAlertPromqlParameters: &alerts.MetricAlertPromqlConditionParameters{ + PromqlText: wrapperspb.String("http_requests_total{status!~\"4..\"}"), + NonNullPercentage: wrapperspb.UInt32(10), + SwapNullValues: wrapperspb.Bool(false), + }, + NotifyGroupByOnlyAlerts: wrapperspb.Bool(false), + }, + }, + }, + } + + tests := []struct { + name string + prepare func(params PrepareParams) + alert *coralogixv1alpha1.Alert + remoteAlert *alerts.Alert + shouldFail bool + }{ + { + name: "Alert update success", + shouldFail: false, + alert: &coralogixv1alpha1.Alert{ + TypeMeta: metav1.TypeMeta{Kind: "Alert", APIVersion: "coralogix.com/v1alpha1"}, + ObjectMeta: metav1.ObjectMeta{Name: "alert-update-success", Namespace: "default"}, + Spec: coralogixv1alpha1.AlertSpec{ + Name: "AlertUpdateSuccess", + Description: "AlertUpdateSuccess", + Active: true, + Severity: alertProtoSeverityToSchemaSeverity[alerts.AlertSeverity_ALERT_SEVERITY_CRITICAL], + NotificationGroups: defaultNotificationGroups, + PayloadFilters: []string{"filter"}, + AlertType: defaultAlertType, + }, + Status: coralogixv1alpha1.AlertStatus{ + ID: pointer.String("AlertUpdateSuccess"), + Name: "AlertUpdateSuccess", + Description: "AlertUpdateSuccess", + Active: true, + Severity: "Critical", + }, + }, + remoteAlert: &alerts.Alert{ + UniqueIdentifier: wrapperspb.String("AlertUpdateSuccess"), + Name: wrapperspb.String("AlertUpdateSuccess"), + Description: wrapperspb.String("AlertUpdateSuccess"), + IsActive: wrapperspb.Bool(true), + Severity: alerts.AlertSeverity_ALERT_SEVERITY_CRITICAL, + MetaLabels: []*alerts.MetaLabel{ + {Key: wrapperspb.String("key"), Value: wrapperspb.String("value")}, + {Key: wrapperspb.String("managed-by"), Value: wrapperspb.String("coralogix-operator")}, + }, + Condition: defaultRemoteCondition, + NotificationGroups: defaultRemoteNotificationGroups, + Filters: &alerts.AlertFilters{ + FilterType: alerts.AlertFilters_FILTER_TYPE_METRIC, + }, + NotificationPayloadFilters: []*wrapperspb.StringValue{wrapperspb.String("filter")}, + }, + prepare: func(params PrepareParams) { + params.alertsClient.EXPECT(). + GetAlert(params.alert.Namespace, coralogixv1alpha1.NewAlert()). + Return(&alerts.GetAlertByUniqueIdResponse{Alert: params.remoteAlert}, nil). + MinTimes(1).MaxTimes(1) + + params.alertsClient.EXPECT().CreateAlert(params.ctx, gomock.Any()). + Return(&alerts.CreateAlertResponse{Alert: params.remoteAlert}, nil). + MinTimes(1).MaxTimes(1) + + params.alertsClient.EXPECT().UpdateAlert(params.ctx, gomock.Any()). + Return(&alerts.UpdateAlertByUniqueIdResponse{Alert: params.remoteAlert}, nil). + MinTimes(1).MaxTimes(1) + + params.alertsClient.EXPECT().GetAlert(params.ctx, gomock.Any()). + Return(&alerts.GetAlertByUniqueIdResponse{Alert: params.remoteAlert}, nil). + MinTimes(1).MaxTimes(1) + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + controller := gomock.NewController(t) + defer controller.Finish() + + // Creating client set. + clientSet := mock_clientset.NewMockClientSetInterface(controller) + + // Creating alerts client. + alertsClient := mock_clientset.NewMockAlertsClientInterface(controller) + + // Creating webhooks client. + webhooksClient := mock_clientset.NewMockWebhooksClientInterface(controller) + + // Preparing common mocks. + clientSet.EXPECT().Alerts().MaxTimes(1).MinTimes(1).Return(alertsClient) + clientSet.EXPECT().Webhooks().Return(webhooksClient).AnyTimes() + + ctx, cancel := context.WithCancel(context.Background()) + defer cancel() + + if tt.prepare != nil { + tt.prepare(PrepareParams{ + ctx: ctx, + clientSet: clientSet, + alertsClient: alertsClient, + webhooksClient: webhooksClient, + alert: tt.alert, + remoteAlert: tt.remoteAlert, + }) + } + + reconciler, watcher := setupReconciler(t, ctx, clientSet) + + err := reconciler.Client.Create(ctx, tt.alert) + assert.NoError(t, err) + + <-watcher.ResultChan() + + result, err := reconciler.Reconcile(ctx, ctrl.Request{ + NamespacedName: types.NamespacedName{ + Namespace: tt.alert.Namespace, + Name: tt.alert.Name, + }, + }) + assert.NoError(t, err) + + currentAlert := &coralogixv1alpha1.Alert{} + + err = reconciler.Get(ctx, types.NamespacedName{ + Namespace: tt.alert.Namespace, + Name: tt.alert.Name, + }, currentAlert) + + assert.NoError(t, err) + + err = reconciler.Client.Update(ctx, currentAlert) + assert.NoError(t, err) + + result, err = reconciler.Reconcile(ctx, ctrl.Request{ + NamespacedName: types.NamespacedName{ + Namespace: tt.alert.Namespace, + Name: tt.alert.Name, + }, + }) + + if tt.shouldFail { + assert.Error(t, err) + assert.Equal(t, defaultErrRequeuePeriod, result.RequeueAfter) + } else { + assert.NoError(t, err) + assert.Equal(t, defaultRequeuePeriod, result.RequeueAfter) + } + }) + } + } -func expectedAlertCRD() *coralogixv1alpha1.Alert { - return &coralogixv1alpha1.Alert{ - TypeMeta: metav1.TypeMeta{Kind: "Alert", APIVersion: "coralogix.com/v1alpha1"}, - ObjectMeta: metav1.ObjectMeta{Name: "test", Namespace: "default"}, - Spec: coralogixv1alpha1.AlertSpec{ - Name: expectedAlertBackendSchema.GetName().GetValue(), - Description: expectedAlertBackendSchema.GetDescription().GetValue(), - Active: expectedAlertBackendSchema.GetIsActive().GetValue(), - Severity: alertProtoSeverityToSchemaSeverity[expectedAlertBackendSchema.GetSeverity()], - Labels: map[string]string{"key": "value", "managed-by": "coralogix-operator"}, - NotificationGroups: []coralogixv1alpha1.NotificationGroup{ +func TestAlertDelete(t *testing.T) { + defaultNotificationGroups := []coralogixv1alpha1.NotificationGroup{ + { + Notifications: []coralogixv1alpha1.Notification{ { - Notifications: []coralogixv1alpha1.Notification{ - { - RetriggeringPeriodMinutes: 10, - NotifyOn: coralogixv1alpha1.NotifyOnTriggeredAndResolved, - EmailRecipients: []string{"example@coralogix.com"}, + RetriggeringPeriodMinutes: 10, + NotifyOn: coralogixv1alpha1.NotifyOnTriggeredAndResolved, + EmailRecipients: []string{"example@coralogix.com"}, + }, + }, + }, + } + + defaultAlertType := coralogixv1alpha1.AlertType{ + Metric: &coralogixv1alpha1.Metric{ + Promql: &coralogixv1alpha1.Promql{ + SearchQuery: "http_requests_total{status!~\"4..\"}", + Conditions: coralogixv1alpha1.PromqlConditions{ + AlertWhen: "MoreThanUsual", + Threshold: utils.FloatToQuantity(3.0), + TimeWindow: "TwelveHours", + MinNonNullValuesPercentage: pointer.Int(10), + ReplaceMissingValueWithZero: false, + }, + }, + }, + } + + defaultRemoteNotificationGroups := []*alerts.AlertNotificationGroups{ + { + Notifications: []*alerts.AlertNotification{ + { + RetriggeringPeriodSeconds: wrapperspb.UInt32(600), + NotifyOn: func() *alerts.NotifyOn { + notifyOn := new(alerts.NotifyOn) + *notifyOn = alerts.NotifyOn_TRIGGERED_AND_RESOLVED + return notifyOn + }(), + IntegrationType: &alerts.AlertNotification_Recipients{ + Recipients: &alerts.Recipients{ + Emails: []*wrapperspb.StringValue{wrapperspb.String("example@coralogix.com")}, }, }, }, }, - PayloadFilters: []string{"filter"}, - AlertType: coralogixv1alpha1.AlertType{ - Metric: &coralogixv1alpha1.Metric{ - Promql: &coralogixv1alpha1.Promql{ - SearchQuery: "http_requests_total{status!~\"4..\"}", - Conditions: coralogixv1alpha1.PromqlConditions{ - AlertWhen: "MoreThanUsual", - Threshold: utils.FloatToQuantity(3.0), - TimeWindow: "TwelveHours", - MinNonNullValuesPercentage: pointer.Int(10), - ReplaceMissingValueWithZero: false, - }, + }, + } + + defaultRemoteCondition := &alerts.AlertCondition{ + Condition: &alerts.AlertCondition_MoreThanUsual{ + MoreThanUsual: &alerts.MoreThanUsualCondition{ + Parameters: &alerts.ConditionParameters{ + Threshold: wrapperspb.Double(3), + Timeframe: alerts.Timeframe_TIMEFRAME_12_H, + MetricAlertPromqlParameters: &alerts.MetricAlertPromqlConditionParameters{ + PromqlText: wrapperspb.String("http_requests_total{status!~\"4..\"}"), + NonNullPercentage: wrapperspb.UInt32(10), + SwapNullValues: wrapperspb.Bool(false), }, + NotifyGroupByOnlyAlerts: wrapperspb.Bool(false), + }, + }, + }, + } + + tests := []struct { + name string + prepare func(params PrepareParams) + alert *coralogixv1alpha1.Alert + remoteAlert *alerts.Alert + shouldFail bool + }{ + { + name: "Alert delete success", + shouldFail: false, + alert: &coralogixv1alpha1.Alert{ + TypeMeta: metav1.TypeMeta{Kind: "Alert", APIVersion: "coralogix.com/v1alpha1"}, + ObjectMeta: metav1.ObjectMeta{Name: "alert-delete-success", Namespace: "default"}, + Spec: coralogixv1alpha1.AlertSpec{ + Name: "AlertDeleteSuccess", + Description: "AlertDeleteSuccess", + Active: true, + Severity: alertProtoSeverityToSchemaSeverity[alerts.AlertSeverity_ALERT_SEVERITY_CRITICAL], + NotificationGroups: defaultNotificationGroups, + PayloadFilters: []string{"filter"}, + AlertType: defaultAlertType, }, + Status: coralogixv1alpha1.AlertStatus{ + ID: pointer.String("AlertDeleteSuccess"), + Name: "AlertDeleteSuccess", + Description: "AlertDeleteSuccess", + Active: true, + Severity: "Critical", + }, + }, + remoteAlert: &alerts.Alert{ + UniqueIdentifier: wrapperspb.String("AlertDeleteSuccess"), + Name: wrapperspb.String("AlertDeleteSuccess"), + Description: wrapperspb.String("AlertDeleteSuccess"), + IsActive: wrapperspb.Bool(true), + Severity: alerts.AlertSeverity_ALERT_SEVERITY_CRITICAL, + MetaLabels: []*alerts.MetaLabel{ + {Key: wrapperspb.String("key"), Value: wrapperspb.String("value")}, + {Key: wrapperspb.String("managed-by"), Value: wrapperspb.String("coralogix-operator")}, + }, + Condition: defaultRemoteCondition, + NotificationGroups: defaultRemoteNotificationGroups, + Filters: &alerts.AlertFilters{ + FilterType: alerts.AlertFilters_FILTER_TYPE_METRIC, + }, + NotificationPayloadFilters: []*wrapperspb.StringValue{wrapperspb.String("filter")}, + }, + prepare: func(params PrepareParams) { + params.alertsClient.EXPECT(). + GetAlert(params.alert.Namespace, coralogixv1alpha1.NewAlert()). + Return(&alerts.GetAlertByUniqueIdResponse{Alert: params.remoteAlert}, nil). + MinTimes(1).MaxTimes(1) + + params.alertsClient.EXPECT().CreateAlert(params.ctx, gomock.Any()). + Return(&alerts.CreateAlertResponse{Alert: params.remoteAlert}, nil). + MinTimes(1).MaxTimes(1) + + params.alertsClient.EXPECT().DeleteAlert(params.ctx, gomock.Any()). + Return(&alerts.DeleteAlertByUniqueIdResponse{}, nil). + MinTimes(1).MaxTimes(1) + + params.alertsClient.EXPECT().GetAlert(params.ctx, gomock.Any()). + Return(&alerts.GetAlertByUniqueIdResponse{Alert: params.remoteAlert}, nil). + MinTimes(1).MaxTimes(1) }, }, } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + controller := gomock.NewController(t) + defer controller.Finish() + + // Creating client set. + clientSet := mock_clientset.NewMockClientSetInterface(controller) + + // Creating alerts client. + alertsClient := mock_clientset.NewMockAlertsClientInterface(controller) + + // Creating webhooks client. + webhooksClient := mock_clientset.NewMockWebhooksClientInterface(controller) + + // Preparing common mocks. + clientSet.EXPECT().Alerts().MaxTimes(1).MinTimes(1).Return(alertsClient) + clientSet.EXPECT().Webhooks().Return(webhooksClient).AnyTimes() + + ctx, cancel := context.WithCancel(context.Background()) + defer cancel() + + if tt.prepare != nil { + tt.prepare(PrepareParams{ + ctx: ctx, + clientSet: clientSet, + alertsClient: alertsClient, + webhooksClient: webhooksClient, + alert: tt.alert, + remoteAlert: tt.remoteAlert, + }) + } + + reconciler, watcher := setupReconciler(t, ctx, clientSet) + + err := reconciler.Client.Create(ctx, tt.alert) + assert.NoError(t, err) + + <-watcher.ResultChan() + + result, err := reconciler.Reconcile(ctx, ctrl.Request{ + NamespacedName: types.NamespacedName{ + Namespace: tt.alert.Namespace, + Name: tt.alert.Name, + }, + }) + assert.NoError(t, err) + + currentAlert := &coralogixv1alpha1.Alert{} + + err = reconciler.Get(ctx, types.NamespacedName{ + Namespace: tt.alert.Namespace, + Name: tt.alert.Name, + }, currentAlert) + + assert.NoError(t, err) + + err = reconciler.Client.Delete(ctx, currentAlert) + assert.NoError(t, err) + + result, err = reconciler.Reconcile(ctx, ctrl.Request{ + NamespacedName: types.NamespacedName{ + Namespace: tt.alert.Namespace, + Name: tt.alert.Name, + }, + }) + + if tt.shouldFail { + assert.Error(t, err) + assert.Equal(t, defaultErrRequeuePeriod, result.RequeueAfter) + } else { + assert.NoError(t, err) + assert.Equal(t, defaultRequeuePeriod, result.RequeueAfter) + } + }) + } + } func TestFlattenAlerts(t *testing.T) { @@ -179,212 +722,5 @@ func TestFlattenAlerts(t *testing.T) { PayloadFilters: []string{}, } - assert.EqualValues(t, expected, status) -} - -func TestAlertReconciler_Reconcile(t *testing.T) { - mockCtrl := gomock.NewController(t) - - mockAlertsClient := createSimpleMockAlertsClient(mockCtrl, expectedAlertBackendSchema) - mockWebhooksClient := createSimpleWebhooksClient(mockCtrl) - mockClientSet := mock_clientset.NewMockClientSetInterface(mockCtrl) - mockClientSet.EXPECT().Alerts().Return(mockAlertsClient).AnyTimes() - mockClientSet.EXPECT().Webhooks().Return(mockWebhooksClient).AnyTimes() - - scheme := runtime.NewScheme() - utilruntime.Must(coralogixv1alpha1.AddToScheme(scheme)) - mgr, _ := ctrl.NewManager(ctrl.GetConfigOrDie(), ctrl.Options{ - Scheme: scheme, - MetricsBindAddress: "0", - }) - ctx, cancel := context.WithCancel(context.Background()) - defer cancel() - go mgr.GetCache().Start(ctx) - mgr.GetCache().WaitForCacheSync(ctx) - withWatch, err := client.NewWithWatch(mgr.GetConfig(), client.Options{ - Scheme: mgr.GetScheme(), - }) - assert.NoError(t, err) - r := AlertReconciler{ - Client: withWatch, - Scheme: mgr.GetScheme(), - CoralogixClientSet: mockClientSet, - } - r.SetupWithManager(mgr) - - watcher, _ := r.Client.(client.WithWatch).Watch(ctx, &coralogixv1alpha1.AlertList{}) - ctrl.SetLogger(zap.New(zap.UseDevMode(true))) - - err = r.Client.Create(ctx, expectedAlertCRD()) - assert.NoError(t, err) - <-watcher.ResultChan() - - result, err := r.Reconcile(ctx, ctrl.Request{NamespacedName: types.NamespacedName{Namespace: "default", Name: "test"}}) - assert.NoError(t, err) - assert.Equal(t, defaultRequeuePeriod, result.RequeueAfter) - - namespacedName := types.NamespacedName{Namespace: "default", Name: "test"} - actualAlertCRD := &coralogixv1alpha1.Alert{} - err = r.Client.Get(ctx, namespacedName, actualAlertCRD) - assert.NoError(t, err) - - id := actualAlertCRD.Status.ID - if !assert.NotNil(t, id) { - return - } - getAlertRequest := &alerts.GetAlertByUniqueIdRequest{Id: wrapperspb.String(*id)} - alert, err := r.CoralogixClientSet.Alerts().GetAlert(ctx, getAlertRequest) - assert.NoError(t, err) - assert.EqualValues(t, expectedAlertBackendSchema, alert.GetAlert()) - - err = r.Client.Delete(ctx, actualAlertCRD) - <-watcher.ResultChan() - - result, err = r.Reconcile(ctx, ctrl.Request{NamespacedName: types.NamespacedName{Namespace: "default", Name: "test"}}) - assert.NoError(t, err) - assert.Equal(t, false, result.Requeue) - - alert, err = r.CoralogixClientSet.Alerts().GetAlert(ctx, getAlertRequest) - assert.Nil(t, alert) - assert.Error(t, err) -} - -func TestAlertReconciler_Reconcile_5XX_StatusError(t *testing.T) { - mockCtrl := gomock.NewController(t) - - mockAlertsClient := createMockAlertsClientWith5XXStatusError(mockCtrl, expectedAlertBackendSchema) - mockWebhooksClient := createSimpleWebhooksClient(mockCtrl) - mockClientSet := mock_clientset.NewMockClientSetInterface(mockCtrl) - mockClientSet.EXPECT().Alerts().Return(mockAlertsClient).AnyTimes() - mockClientSet.EXPECT().Webhooks().Return(mockWebhooksClient).AnyTimes() - - scheme := runtime.NewScheme() - utilruntime.Must(coralogixv1alpha1.AddToScheme(scheme)) - mgr, _ := ctrl.NewManager(ctrl.GetConfigOrDie(), ctrl.Options{ - Scheme: scheme, - MetricsBindAddress: "0", - }) - ctx, cancel := context.WithCancel(context.Background()) - defer cancel() - go mgr.GetCache().Start(ctx) - mgr.GetCache().WaitForCacheSync(ctx) - withWatch, err := client.NewWithWatch(mgr.GetConfig(), client.Options{ - Scheme: mgr.GetScheme(), - }) - assert.NoError(t, err) - r := AlertReconciler{ - Client: withWatch, - Scheme: mgr.GetScheme(), - CoralogixClientSet: mockClientSet, - } - r.SetupWithManager(mgr) - - watcher, _ := r.Client.(client.WithWatch).Watch(ctx, &coralogixv1alpha1.AlertList{}) - ctrl.SetLogger(zap.New(zap.UseDevMode(true))) - - err = r.Client.Create(ctx, expectedAlertCRD()) - assert.NoError(t, err) - event := <-watcher.ResultChan() - assert.Equal(t, watch.Added, event.Type) - - result, err := r.Reconcile(ctx, ctrl.Request{NamespacedName: types.NamespacedName{Namespace: "default", Name: "test"}}) - assert.Error(t, err) - assert.Equal(t, defaultErrRequeuePeriod, result.RequeueAfter) - - result, err = r.Reconcile(ctx, ctrl.Request{NamespacedName: types.NamespacedName{Namespace: "default", Name: "test"}}) - assert.NoError(t, err) - assert.Equal(t, defaultRequeuePeriod, result.RequeueAfter) - - namespacedName := types.NamespacedName{Namespace: "default", Name: "test"} - actualAlertCRD := &coralogixv1alpha1.Alert{} - err = r.Client.Get(ctx, namespacedName, actualAlertCRD) - assert.NoError(t, err) - - err = r.Client.Delete(ctx, actualAlertCRD) - <-watcher.ResultChan() - r.Reconcile(ctx, ctrl.Request{NamespacedName: types.NamespacedName{Namespace: "default", Name: "test"}}) -} - -// Creates a mock webhooks client that contains a single webhook with id "id1". -func createSimpleWebhooksClient(mockCtrl *gomock.Controller) *mock_clientset.MockWebhooksClientInterface { - mockWebhooksClient := mock_clientset.NewMockWebhooksClientInterface(mockCtrl) - webhooks := []map[string]interface{}{{"id": 1}} - bytes, _ := json.Marshal(webhooks) - var nilErr error - mockWebhooksClient.EXPECT().GetWebhooks(gomock.Any()).Return(string(bytes), nilErr).AnyTimes() - return mockWebhooksClient -} - -// Creates a mock alerts client that returns the given alert when creating an alert with name "name1" and id "id1". -func createSimpleMockAlertsClient(mockCtrl *gomock.Controller, alert *alerts.Alert) *mock_clientset.MockAlertsClientInterface { - mockAlertsClient := mock_clientset.NewMockAlertsClientInterface(mockCtrl) - - var alertExist bool - - mockAlertsClient.EXPECT(). - CreateAlert(gomock.Any(), gomock.Any()).DoAndReturn(func(_ context.Context, _ *alerts.CreateAlertRequest) (*alerts.CreateAlertResponse, error) { - alertExist = true - return &alerts.CreateAlertResponse{Alert: alert}, nil - }).AnyTimes() - - mockAlertsClient.EXPECT(). - GetAlert(gomock.Any(), gomock.Any()).DoAndReturn(func(_ context.Context, req *alerts.GetAlertByUniqueIdRequest) (*alerts.GetAlertByUniqueIdResponse, error) { - if alertExist { - return &alerts.GetAlertByUniqueIdResponse{Alert: alert}, nil - } - return nil, errors.NewNotFound(schema.GroupResource{}, "id1") - }).AnyTimes() - - mockAlertsClient.EXPECT(). - DeleteAlert(gomock.Any(), gomock.Any()).DoAndReturn(func(_ context.Context, req *alerts.DeleteAlertByUniqueIdRequest) (*alerts.DeleteAlertByUniqueIdResponse, error) { - if alertExist { - alertExist = false - return &alerts.DeleteAlertByUniqueIdResponse{}, nil - } - return nil, errors.NewNotFound(schema.GroupResource{}, "id1") - }).AnyTimes() - - return mockAlertsClient -} - -// Creates a mock alerts client that first time fails on creating alert, then returns the given alert when creating an alert with name "name1" and id "id1" . -func createMockAlertsClientWith5XXStatusError(mockCtrl *gomock.Controller, alert *alerts.Alert) *mock_clientset.MockAlertsClientInterface { - mockAlertsClient := mock_clientset.NewMockAlertsClientInterface(mockCtrl) - - var alertExist bool - var wasCalled bool - mockAlertsClient.EXPECT(). - CreateAlert(gomock.Any(), gomock.Any()).DoAndReturn(func(_ context.Context, _ *alerts.CreateAlertRequest) (*alerts.CreateAlertResponse, error) { - if !wasCalled { - wasCalled = true - return nil, errors.NewInternalError(fmt.Errorf("internal error")) - } - alertExist = true - return &alerts.CreateAlertResponse{Alert: alert}, nil - }).AnyTimes() - - mockAlertsClient.EXPECT(). - CreateAlert(gomock.Any(), gomock.Any()).DoAndReturn(func(_ context.Context, _ *alerts.CreateAlertRequest) (*alerts.CreateAlertResponse, error) { - alertExist = true - return &alerts.CreateAlertResponse{Alert: alert}, nil - }).AnyTimes() - - mockAlertsClient.EXPECT(). - GetAlert(gomock.Any(), gomock.Any()).DoAndReturn(func(_ context.Context, req *alerts.GetAlertByUniqueIdRequest) (*alerts.GetAlertByUniqueIdResponse, error) { - if alertExist { - return &alerts.GetAlertByUniqueIdResponse{Alert: alert}, nil - } - return nil, errors.NewNotFound(schema.GroupResource{}, "id1") - }).AnyTimes() - - mockAlertsClient.EXPECT(). - DeleteAlert(gomock.Any(), gomock.Any()).DoAndReturn(func(_ context.Context, req *alerts.DeleteAlertByUniqueIdRequest) (*alerts.DeleteAlertByUniqueIdResponse, error) { - if alertExist { - alertExist = false - return &alerts.DeleteAlertByUniqueIdResponse{}, nil - } - return nil, errors.NewNotFound(schema.GroupResource{}, "id1") - }).AnyTimes() - - return mockAlertsClient + assert.EqualValues(t, expected, &status) } diff --git a/go.mod b/go.mod index 275df5d2..8d942269 100644 --- a/go.mod +++ b/go.mod @@ -1,6 +1,6 @@ module github.com/coralogix/coralogix-operator -go 1.20 +go 1.22 require ( github.com/golang/protobuf v1.5.3