This repository has been archived by the owner on Oct 23, 2024. It is now read-only.
-
Notifications
You must be signed in to change notification settings - Fork 4
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
DCOS-60860 - Test host network support (#61)
- Loading branch information
Showing
9 changed files
with
203 additions
and
52 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,103 @@ | ||
package tests | ||
|
||
import ( | ||
"errors" | ||
"fmt" | ||
"github.com/mesosphere/kudo-spark-operator/tests/utils" | ||
log "github.com/sirupsen/logrus" | ||
v12 "k8s.io/api/core/v1" | ||
"testing" | ||
) | ||
|
||
/* | ||
Test that `hostNetwork` in SparkApplication propagates to driver and executor pods | ||
*/ | ||
func TestHostNetworkPropagation(t *testing.T) { | ||
spark := utils.SparkOperatorInstallation{} | ||
err := spark.InstallSparkOperator() | ||
defer spark.CleanUp() | ||
|
||
if err != nil { | ||
t.Fatal(err) | ||
} | ||
|
||
var testCases = []struct { | ||
driverHN bool | ||
executorHN bool | ||
}{ | ||
{false, false}, | ||
{true, false}, | ||
{false, true}, | ||
{true, true}, | ||
} | ||
|
||
for i, tc := range testCases { | ||
log.Infof("Running test case:\n- driver host network:\t\t%v\n- executor host network:\t%v", tc.driverHN, tc.executorHN) | ||
jobName := fmt.Sprintf("host-network-test-job-%d", i) | ||
job := utils.SparkJob{ | ||
Name: jobName, | ||
Template: "spark-mock-task-runner-job-host-network.yaml", | ||
Params: map[string]interface{}{ | ||
"args": []string{"1", "600"}, | ||
"driverHostNetwork": tc.driverHN, | ||
"executorHostNetwork": tc.executorHN, | ||
}, | ||
} | ||
|
||
// Submit the job and wait for it to start | ||
err = spark.SubmitAndWaitForExecutors(&job) | ||
if err != nil { | ||
t.Fatal(err) | ||
} | ||
|
||
// Verify driver pod hostNetwork and dnsPolicy values | ||
driver, err := spark.DriverPod(job) | ||
if err != nil { | ||
t.Fatal(err) | ||
} | ||
err = verifyPodHostNetwork(driver, tc.driverHN) | ||
log.Infof("Verifying driver %s spec values", driver.Name) | ||
if err != nil { | ||
t.Fatal(err) | ||
} | ||
|
||
// Verify executor pods hostNetwork and dnsPolicy values | ||
executors, err := spark.ExecutorPods(job) | ||
if err != nil { | ||
t.Fatal(err) | ||
} | ||
for _, executor := range executors { | ||
log.Infof("Verifying executor %s spec values", executor.Name) | ||
err = verifyPodHostNetwork(&executor, tc.executorHN) | ||
if err != nil { | ||
t.Fatal(err) | ||
} | ||
} | ||
|
||
// Terminate the job while it's running | ||
spark.DeleteJob(job) | ||
} | ||
} | ||
|
||
func verifyPodHostNetwork(pod *v12.Pod, expectedHostNetwork bool) error { | ||
log.Infof("Pod spec.hostNetwork: %v", pod.Spec.HostNetwork) | ||
log.Infof("Pod spec.dnspolicy: %v", pod.Spec.DNSPolicy) | ||
|
||
// Check spec values | ||
if pod.Spec.HostNetwork != expectedHostNetwork { | ||
return errors.New(fmt.Sprintf("Unexpected hostNetwork value for pod %v: %s. Should be %v", pod.Spec.HostNetwork, pod.Name, expectedHostNetwork)) | ||
} else if expectedHostNetwork && pod.Spec.DNSPolicy != v12.DNSClusterFirstWithHostNet { | ||
return errors.New(fmt.Sprintf("Expected pod pod DNS policy to be \"dnsClusterFirstWithHostNet\", but it's %s", pod.Spec.DNSPolicy)) | ||
} | ||
|
||
// Check pod IP | ||
log.Infof("Pod status.podIP: %v", pod.Status.PodIP) | ||
log.Infof("Pod status.hostIP: %v", pod.Status.HostIP) | ||
if expectedHostNetwork && pod.Status.PodIP != pod.Status.HostIP { | ||
return errors.New(fmt.Sprintf("Pod %s IP doesn't match the host IP", pod.Name)) | ||
} else if !expectedHostNetwork && pod.Status.PodIP == pod.Status.HostIP { | ||
return errors.New(fmt.Sprintf("Pod %s IP matches the host IP", pod.Name)) | ||
} | ||
|
||
return nil | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
36 changes: 36 additions & 0 deletions
36
tests/templates/spark-mock-task-runner-job-host-network.yaml
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,36 @@ | ||
apiVersion: "sparkoperator.k8s.io/v1beta2" | ||
kind: SparkApplication | ||
metadata: | ||
name: {{ .Name }} | ||
namespace: {{ .Namespace }} | ||
spec: | ||
type: Scala | ||
mode: cluster | ||
image: {{ .Image }} | ||
imagePullPolicy: Always | ||
mainClass: MockTaskRunner | ||
mainApplicationFile: "https://infinity-artifacts.s3.amazonaws.com/scale-tests/dcos-spark-scala-tests-assembly-2.4.0-20190325.jar" | ||
arguments: {{ range $i, $arg := index .Params "args" }} | ||
- "{{ $arg }}"{{ end }} | ||
sparkConf: | ||
"spark.scheduler.maxRegisteredResourcesWaitingTime": "2400s" | ||
"spark.scheduler.minRegisteredResourcesRatio": "1.0" | ||
sparkVersion: {{ .SparkVersion }} | ||
restartPolicy: | ||
type: Never | ||
driver: | ||
cores: 1 | ||
memory: "512m" | ||
hostNetwork: {{ index .Params "driverHostNetwork" }} | ||
labels: | ||
version: {{ .SparkVersion }} | ||
metrics-exposed: "true" | ||
serviceAccount: {{ .ServiceAccount }} | ||
executor: | ||
cores: 1 | ||
instances: {{ .ExecutorsCount }} | ||
memory: "512m" | ||
hostNetwork: {{ index .Params "executorHostNetwork" }} | ||
labels: | ||
version: {{ .SparkVersion }} | ||
metrics-exposed: "true" |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters