Skip to content

Commit

Permalink
Add support for setting spark job namespaces to all namespaces
Browse files Browse the repository at this point in the history
Signed-off-by: Yi Chen <[email protected]>
  • Loading branch information
ChenYi015 committed Aug 14, 2024
1 parent e2693f1 commit 9a56afa
Show file tree
Hide file tree
Showing 14 changed files with 123 additions and 70 deletions.
2 changes: 1 addition & 1 deletion charts/spark-operator-chart/templates/spark/rbac.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ limitations under the License.

{{- if .Values.spark.rbac.create -}}
{{- range $jobNamespace := .Values.spark.jobNamespaces | default list }}
{{- if $jobNamespace }}
{{- if ne $jobNamespace "" }}

---
apiVersion: rbac.authorization.k8s.io/v1
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,16 +15,19 @@ limitations under the License.
*/}}

{{- if .Values.spark.serviceAccount.create }}
{{- range $sparkJobNamespace := .Values.spark.jobNamespaces | default list }}
{{- range $jobNamespace := .Values.spark.jobNamespaces | default list }}
{{- if ne $jobNamespace "" }}

---
apiVersion: v1
kind: ServiceAccount
metadata:
name: {{ include "spark-operator.spark.serviceAccountName" $ }}
namespace: {{ $sparkJobNamespace }}
namespace: {{ $jobNamespace }}
labels: {{ include "spark-operator.labels" $ | nindent 4 }}
{{- with $.Values.spark.serviceAccount.annotations }}
annotations: {{ toYaml . | nindent 4 }}
{{- end }}
{{- end }}
{{- end }}
{{- end }}
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,7 @@ webhooks:
failurePolicy: {{ . }}
{{- end }}
{{- if .Values.spark.jobNamespaces }}
{{- if not has "" .Values.spark.jobNamespaces }}
namespaceSelector:
matchExpressions:
- key: kubernetes.io/metadata.name
Expand All @@ -43,6 +44,7 @@ webhooks:
- {{ . }}
{{- end }}
{{- end }}
{{- end }}
objectSelector:
matchLabels:
sparkoperator.k8s.io/launched-by-spark-operator: "true"
Expand All @@ -66,7 +68,7 @@ webhooks:
{{- with .Values.webhook.failurePolicy }}
failurePolicy: {{ . }}
{{- end }}
{{- if .Values.spark.jobNamespaces }}
{{- if not (has "" .Values.spark.jobNamespaces) }}
namespaceSelector:
matchExpressions:
- key: kubernetes.io/metadata.name
Expand Down Expand Up @@ -97,6 +99,7 @@ webhooks:
failurePolicy: {{ . }}
{{- end }}
{{- if .Values.spark.jobNamespaces }}
{{- if not has "" .Values.spark.jobNamespaces }}
namespaceSelector:
matchExpressions:
- key: kubernetes.io/metadata.name
Expand All @@ -106,6 +109,7 @@ webhooks:
- {{ . }}
{{- end }}
{{- end }}
{{- end }}
rules:
- apiGroups: ["sparkoperator.k8s.io"]
apiVersions: ["v1beta2"]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,7 @@ webhooks:
failurePolicy: {{ . }}
{{- end }}
{{- if .Values.spark.jobNamespaces }}
{{- if not has "" .Values.spark.jobNamespaces }}
namespaceSelector:
matchExpressions:
- key: kubernetes.io/metadata.name
Expand All @@ -43,6 +44,7 @@ webhooks:
- {{ . }}
{{- end }}
{{- end }}
{{- end }}
rules:
- apiGroups: ["sparkoperator.k8s.io"]
apiVersions: ["v1beta2"]
Expand All @@ -64,6 +66,7 @@ webhooks:
failurePolicy: {{ . }}
{{- end }}
{{- if .Values.spark.jobNamespaces }}
{{- if not has "" .Values.spark.jobNamespaces }}
namespaceSelector:
matchExpressions:
- key: kubernetes.io/metadata.name
Expand All @@ -73,6 +76,7 @@ webhooks:
- {{ . }}
{{- end }}
{{- end }}
{{- end }}
rules:
- apiGroups: ["sparkoperator.k8s.io"]
apiVersions: ["v1beta2"]
Expand Down
18 changes: 15 additions & 3 deletions charts/spark-operator-chart/tests/controller/deployment_test.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -110,14 +110,26 @@ tests:

- it: Should contain `--namespaces` arg if `spark.jobNamespaces` is set
set:
spark.jobNamespaces:
- ns1
- ns2
spark:
jobNamespaces:
- ns1
- ns2
asserts:
- contains:
path: spec.template.spec.containers[?(@.name=="spark-operator-controller")].args
content: --namespaces=ns1,ns2

- it: Should set namespaces to all namespaces (`""`) if `spark.jobNamespaces` contains empty string
set:
spark:
jobNamespaces:
- ""
- default
asserts:
- contains:
path: spec.template.spec.containers[?(@.name=="spark-operator-controller")].args
content: --namespaces=""

- it: Should contain `--controller-threads` arg if `controller.workers` is set
set:
controller:
Expand Down
40 changes: 24 additions & 16 deletions charts/spark-operator-chart/tests/spark/rbac_test.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -70,11 +70,13 @@ tests:
name: spark-operator-spark
namespace: ""

- it: Should create multiple spark roles if `spark.jobNamespaces` is set with multiple values
- it: Should create spark role for every non-empty spark job namespace if `spark.jobNamespaces` is set with multiple values
set:
spark.jobNamespaces:
- ns1
- ns2
spark:
jobNamespaces:
- ""
- ns1
- ns2
documentIndex: 0
asserts:
- containsDocument:
Expand All @@ -83,11 +85,13 @@ tests:
name: spark-operator-spark
namespace: ns1

- it: Should create multiple spark role bindings if `spark.jobNamespaces` is set with multiple values
- it: Should create spark role for every non-empty spark job namespace if `spark.jobNamespaces` is set with multiple values
set:
spark.jobNamespaces:
- ns1
- ns2
spark:
jobNamespaces:
- ""
- ns1
- ns2
documentIndex: 1
asserts:
- containsDocument:
Expand All @@ -96,11 +100,13 @@ tests:
name: spark-operator-spark
namespace: ns1

- it: Should create multiple spark roles if `spark.jobNamespaces` is set with multiple values
- it: Should create spark role binding for every non-empty spark job namespace if `spark.jobNamespaces` is set with multiple values
set:
spark.jobNamespaces:
- ns1
- ns2
spark:
jobNamespaces:
- ""
- ns1
- ns2
documentIndex: 2
asserts:
- containsDocument:
Expand All @@ -109,11 +115,13 @@ tests:
name: spark-operator-spark
namespace: ns2

- it: Should create multiple spark role bindings if `spark.jobNamespaces` is set with multiple values
- it: Should create spark role binding for every non-empty spark job namespace if `spark.jobNamespaces` is set with multiple values
set:
spark.jobNamespaces:
- ns1
- ns2
spark:
jobNamespaces:
- ""
- ns1
- ns2
documentIndex: 3
asserts:
- containsDocument:
Expand Down
39 changes: 8 additions & 31 deletions charts/spark-operator-chart/tests/spark/serviceaccount_test.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -66,59 +66,36 @@ tests:
path: metadata.annotations.key2
value: value2

- it: Should create multiple service accounts if `spark.jobNamespaces` is set
- it: Should create service account for every non-empty spark job namespace if `spark.jobNamespaces` is set with multiple values
set:
spark:
serviceAccount:
name: spark
jobNamespaces:
- ""
- ns1
- ns2
- ns3
documentIndex: 0
asserts:
- hasDocuments:
count: 3
count: 2
- containsDocument:
apiVersion: v1
kind: ServiceAccount
name: spark
name: spark-operator-spark
namespace: ns1

- it: Should create multiple service accounts if `spark.jobNamespaces` is set
- it: Should create service account for every non-empty spark job namespace if `spark.jobNamespaces` is set with multiple values
set:
spark:
serviceAccount:
name: spark
jobNamespaces:
- ""
- ns1
- ns2
- ns3
documentIndex: 1
asserts:
- hasDocuments:
count: 3
count: 2
- containsDocument:
apiVersion: v1
kind: ServiceAccount
name: spark
name: spark-operator-spark
namespace: ns2

- it: Should create multiple service accounts if `spark.jobNamespaces` is set
set:
spark:
serviceAccount:
name: spark
jobNamespaces:
- ns1
- ns2
- ns3
documentIndex: 2
asserts:
- hasDocuments:
count: 3
- containsDocument:
apiVersion: v1
kind: ServiceAccount
name: spark
namespace: ns3
11 changes: 11 additions & 0 deletions charts/spark-operator-chart/tests/webhook/deployment_test.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -107,6 +107,17 @@ tests:
path: spec.template.spec.containers[?(@.name=="spark-operator-webhook")].args
content: --namespaces=ns1,ns2

- it: Should set namespaces to all namespaces (`""`) if `spark.jobNamespaces` contains empty string
set:
spark:
jobNamespaces:
- ""
- default
asserts:
- contains:
path: spec.template.spec.containers[?(@.name=="spark-operator-webhook")].args
content: --namespaces=""

- it: Should contain `--enable-metrics` arg if `prometheus.metrics.enable` is set to `true`
set:
prometheus:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@ tests:
path: webhooks[*].failurePolicy
value: Fail

- it: Should set namespaceSelector if sparkJobNamespaces is not empty
- it: Should set namespaceSelector if `spark.jobNamespaces` is set with non-empty strings
set:
spark:
jobNamespaces:
Expand All @@ -68,6 +68,19 @@ tests:
- ns2
- ns3

- it: Should not set namespaceSelector if `spark.jobNamespaces` contains empty string
set:
spark:
jobNamespaces:
- ""
- ns1
- ns2
- ns3
asserts:
- notExists:
path: webhooks[*].namespaceSelector


- it: Should should use the specified timeoutSeconds
set:
webhook:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@ tests:
path: webhooks[*].failurePolicy
value: Fail

- it: Should set namespaceSelector if `spark.jobNamespaces` is not empty
- it: Should set namespaceSelector if `spark.jobNamespaces` is set with non-empty strings
set:
spark.jobNamespaces:
- ns1
Expand All @@ -67,6 +67,18 @@ tests:
- ns2
- ns3

- it: Should not set namespaceSelector if `spark.jobNamespaces` contains empty string
set:
spark:
jobNamespaces:
- ""
- ns1
- ns2
- ns3
asserts:
- notExists:
path: webhooks[*].namespaceSelector

- it: Should should use the specified timeoutSeconds
set:
webhook:
Expand Down
6 changes: 2 additions & 4 deletions cmd/operator/controller/start.go
Original file line number Diff line number Diff line change
Expand Up @@ -123,7 +123,7 @@ func NewStartCommand() *cobra.Command {
}

command.Flags().IntVar(&controllerThreads, "controller-threads", 10, "Number of worker threads used by the SparkApplication controller.")
command.Flags().StringSliceVar(&namespaces, "namespaces", []string{}, "The Kubernetes namespace to manage. Will manage custom resource objects of the managed CRD types for the whole cluster if unset.")
command.Flags().StringSliceVar(&namespaces, "namespaces", []string{}, "The Kubernetes namespace to manage. Will manage custom resource objects of the managed CRD types for the whole cluster if unset or contains empty string.")
command.Flags().DurationVar(&cacheSyncTimeout, "cache-sync-timeout", 30*time.Second, "Informer cache sync timeout.")

command.Flags().BoolVar(&enableBatchScheduler, "enable-batch-scheduler", false, "Enable batch schedulers.")
Expand Down Expand Up @@ -300,9 +300,7 @@ func newTLSOptions() []func(c *tls.Config) {
// newCacheOptions creates and returns a cache.Options instance configured with default namespaces and object caching settings.
func newCacheOptions() cache.Options {
defaultNamespaces := make(map[string]cache.Config)
if util.ContainsString(namespaces, cache.AllNamespaces) {
defaultNamespaces[cache.AllNamespaces] = cache.Config{}
} else {
if !util.ContainsString(namespaces, cache.AllNamespaces) {
for _, ns := range namespaces {
defaultNamespaces[ns] = cache.Config{}
}
Expand Down
6 changes: 2 additions & 4 deletions cmd/operator/webhook/start.go
Original file line number Diff line number Diff line change
Expand Up @@ -130,7 +130,7 @@ func NewStartCommand() *cobra.Command {
}

command.Flags().IntVar(&controllerThreads, "controller-threads", 10, "Number of worker threads used by the SparkApplication controller.")
command.Flags().StringSliceVar(&namespaces, "namespaces", []string{"default"}, "The Kubernetes namespace to manage. Will manage custom resource objects of the managed CRD types for the whole cluster if unset.")
command.Flags().StringSliceVar(&namespaces, "namespaces", []string{}, "The Kubernetes namespace to manage. Will manage custom resource objects of the managed CRD types for the whole cluster if unset or contains empty string.")
command.Flags().StringVar(&labelSelectorFilter, "label-selector-filter", "", "A comma-separated list of key=value, or key labels to filter resources during watch and list based on the specified labels.")
command.Flags().DurationVar(&cacheSyncTimeout, "cache-sync-timeout", 30*time.Second, "Informer cache sync timeout.")

Expand Down Expand Up @@ -368,9 +368,7 @@ func newTLSOptions() []func(c *tls.Config) {
// newCacheOptions creates and returns a cache.Options instance configured with default namespaces and object caching settings.
func newCacheOptions() cache.Options {
defaultNamespaces := make(map[string]cache.Config)
if util.ContainsString(namespaces, cache.AllNamespaces) {
defaultNamespaces[cache.AllNamespaces] = cache.Config{}
} else {
if !util.ContainsString(namespaces, cache.AllNamespaces) {
for _, ns := range namespaces {
defaultNamespaces[ns] = cache.Config{}
}
Expand Down
Loading

0 comments on commit 9a56afa

Please sign in to comment.