Skip to content

Commit

Permalink
wip, making dcos spark secret command (apache#234)
Browse files Browse the repository at this point in the history
[SPARK-601] Test RPC authentication, allow users to generate a secret client-side
  • Loading branch information
Arthur Rand authored Dec 13, 2017
1 parent bd2cb26 commit 6a99371
Show file tree
Hide file tree
Showing 5 changed files with 114 additions and 25 deletions.
23 changes: 23 additions & 0 deletions cli/dcos-spark/main.go
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@ type SparkCommand struct {
submitDockerImage string
submitDcosSpace string
submitEnv map[string]string
secretPath string

statusSkipMessage bool

Expand Down Expand Up @@ -145,6 +146,24 @@ func (cmd *SparkCommand) runWebui(a *kingpin.Application, e *kingpin.ParseElemen
return nil
}

func (cmd *SparkCommand) runGenerateSecret(a *kingpin.Application, e *kingpin.ParseElement, c *kingpin.ParseContext) error {
secret, err := GetRandomStringSecret()

if err != nil {
return err
}

_, err = client.RunCLICommand(
"security", "secrets", "create", cmd.secretPath, fmt.Sprintf("-v %s", secret))

if err != nil {
log.Fatalf("Unable to create secret, %s", err)
return err
}

return err
}

func handleCommands(app *kingpin.Application) {
cmd := &SparkCommand{submitEnv: make(map[string]string)}
run := app.Command("run", "Submit a job to the Spark Mesos Dispatcher").Action(cmd.runSubmit)
Expand Down Expand Up @@ -181,6 +200,10 @@ func handleCommands(app *kingpin.Application) {
kill := app.Command("kill", "Aborts a submitted Spark job").Action(cmd.runKill)
kill.Arg("submission-id", "The ID of the Spark job").Required().StringVar(&cmd.submissionId)

secret := app.Command("secret", "Make a shared secret, used for RPC authentication").
Action(cmd.runGenerateSecret)
secret.Arg("secret_path", "path and name for the secret").Required().StringVar(&cmd.secretPath)

app.Command("webui", "Returns the Spark Web UI URL").Action(cmd.runWebui)
}

Expand Down
24 changes: 24 additions & 0 deletions cli/dcos-spark/secretGenerator.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
package main

import (
"crypto/rand"
"encoding/base64"
)

const KEYLENGTH = 128

func generateRandomBytes(n int) ([]byte, error) {
// https://elithrar.github.io/article/generating-secure-random-numbers-crypto-rand/
b := make([]byte, n)
_, err := rand.Read(b)
if err != nil {
return nil, err
}
return b, nil
}

func GetRandomStringSecret() (string, error) {
b, err := generateRandomBytes(KEYLENGTH)
return base64.URLEncoding.EncodeToString(b), err
}

39 changes: 14 additions & 25 deletions cli/dcos-spark/submit_builder.go
Original file line number Diff line number Diff line change
Expand Up @@ -3,14 +3,12 @@ package main
import (
"bufio"
"bytes"
"encoding/base64"
"encoding/json"
"errors"
"fmt"
"github.com/mesosphere/dcos-commons/cli/client"
"github.com/mesosphere/dcos-commons/cli/config"
"gopkg.in/alecthomas/kingpin.v3-unstable"
"io/ioutil"
"log"
"net/url"
"os"
Expand All @@ -22,6 +20,10 @@ var keyWhitespaceValPattern = regexp.MustCompile("(.+)\\s+(.+)")
var backslashNewlinePattern = regexp.MustCompile("\\s*\\\\s*\\n\\s+")
var collapseSpacesPattern = regexp.MustCompile(`[\s\p{Zs}]{2,}`)

const SECRET_REFERENCE_TEMPLATE = "spark.mesos.%s.secret.names"
const SECRET_FILENAME_TEMPLATE = "spark.mesos.%s.secret.filenames"
const SECRET_ENVKEY_TEMPLATE = "spark.mesos.%s.secret.envkeys"

type sparkVal struct {
flagName string
propName string
Expand Down Expand Up @@ -241,8 +243,7 @@ func sparkSubmitHelp() string {
func prepareBase64Secret(secretPath string, isEncoded bool) string {
ss := strings.Split(secretPath, "/")
s := ss[len(ss) - 1] // The secret file without any slashes
// TODO document how secret formatting works w.r.t decoding
// secrets with __dcos_base64__ will be decoded by mesos or spark
// secrets with __dcos_base64__ will be decoded by Mesos
if strings.HasPrefix(s, "__dcos_base64__") || strings.HasSuffix(s, "base64") {
// if we have the .base64, maintain the whole thing spark-env will decode it
return strings.TrimPrefix(s, "__dcos_base64__")
Expand All @@ -255,9 +256,11 @@ func prepareBase64Secret(secretPath string, isEncoded bool) string {
}

func addArgsForFileBasedSecret(args *sparkArgs, secretPath, property string) {
args.properties["spark.mesos.driver.secret.names"] = secretPath
secretRefProp := fmt.Sprintf(SECRET_REFERENCE_TEMPLATE, "driver")
secretFileProp := fmt.Sprintf(SECRET_FILENAME_TEMPLATE, "driver")
appendToProperty(secretRefProp, secretPath, args)
appendToProperty(secretFileProp, prepareBase64Secret(secretPath, true), args)
args.properties[property] = prepareBase64Secret(secretPath, false)
args.properties["spark.mesos.driver.secret.filenames"] = prepareBase64Secret(secretPath, true)
}

func setupKerberosAuthArgs(args *sparkArgs) error {
Expand All @@ -271,9 +274,9 @@ func setupKerberosAuthArgs(args *sparkArgs) error {
return nil
}
if args.tgtSecretValue != "" { // using secret by value
args.properties["spark.mesos.driver.secret.values"] = args.tgtSecretValue
appendToProperty("spark.mesos.driver.secret.values", args.tgtSecretValue, args)
args.properties["spark.mesos.driverEnv.KRB5CCNAME"] = "tgt"
args.properties["spark.mesos.driver.secret.filenames"] = "tgt.base64"
appendToProperty(fmt.Sprintf(SECRET_FILENAME_TEMPLATE, "driver"), "tgt.base64", args)
return nil
}
return errors.New(fmt.Sprintf("Unable to add Kerberos args, got args %s", args))
Expand Down Expand Up @@ -306,9 +309,9 @@ func setupTLSArgs(args *sparkArgs) {

taskTypes :=[]string{"driver", "executor"}
for _, taskType := range taskTypes {
appendToProperty(fmt.Sprintf("spark.mesos.%s.secret.names", taskType), joinedPaths, args)
appendToProperty(fmt.Sprintf("spark.mesos.%s.secret.filenames", taskType), joinedFilenames, args)
appendToPropertyIfSet(fmt.Sprintf("spark.mesos.%s.secret.envkeys", taskType), joinedEnvkeys, args)
appendToProperty(fmt.Sprintf(SECRET_REFERENCE_TEMPLATE, taskType), joinedPaths, args)
appendToProperty(fmt.Sprintf(SECRET_FILENAME_TEMPLATE, taskType), joinedFilenames, args)
appendToPropertyIfSet(fmt.Sprintf(SECRET_ENVKEY_TEMPLATE, taskType), joinedEnvkeys, args)
}

// Passwords
Expand Down Expand Up @@ -510,20 +513,6 @@ func appendToPropertyIfSet(propValue, toAppend string, args *sparkArgs) {
}
}

func getBase64Content(path string) string {
log.Printf("Opening file %s", path)
data, err := ioutil.ReadFile(path)
if err != nil {
log.Fatal(err)
}

var encodebuf bytes.Buffer
encoder := base64.NewEncoder(base64.StdEncoding, &encodebuf)
encoder.Write(data)
encoder.Close() // must be called before returning string to ensure flush
return encodebuf.String()
}

func buildSubmitJson(cmd *SparkCommand) (string, error) {
// first, import any values in the provided properties file (space separated "key val")
// then map applicable envvars
Expand Down
27 changes: 27 additions & 0 deletions docs/security.md
Original file line number Diff line number Diff line change
Expand Up @@ -86,7 +86,34 @@ See the [Secrets Documentation about spaces][13] for more details about spaces.
the keystore and truststore secrets will also show up as environment-based secrets,
due to the way secrets are implemented. You can ignore these extra environment variables.

# Spark SASL (RPC endpoint authentication)
Spark uses Simple Authentication Security Layer (SASL) to authenticate Executors with the Driver and for encrypting messages sent between components. This functionality relies on a shared secret between all components you expect to communicate with each other. A secret can be generated with the DC/OS Spark CLI
```bash
dcos spark secret <secret_path>
# for example
dcos spark secret /sparkAuthSecret
```
This will generate a random secret and upload it to the DC/OS secrets store [14] at the designated path. To use this secret for RPC authentication add the following configutations to your CLI command:
```bash
dcos spark run --submit-args="\
...
--conf spark.mesos.containerizer=mesos \ # Mesos UCR is required for secrets
--conf spark.authenticate=true \ # tell Spark to use authentication
--conf spark.authenticate.enableSaslEncryption=true \ # tell Spark to encrypt with Sasl
--conf spark.authenticate.secret=sparkauthsecret.secret \ # name of file-based secret for Driver, you may change the name
--conf spark.executorEnv._SPARK_AUTH_SECRET=sparkauthsecret.secret \ # name of file-based secret for the Executors
--conf spark.mesos.driver.secret.names=<secret_path> \ # secret path generated in the previous step, for Driver
--conf spark.mesos.driver.secret.filenames=sparkauthsecret.secret \ # tell Mesos to put the secret in this file in the Driver
--conf spark.mesos.executor.secret.names=<secret_path> \ # secret path generated in previous step for Executor
--conf spark.mesos.executor.secret.filenames=sparkauthsecret.secret \ # tell Mesos to put the secret in this File for the Executors
...
"

```



[11]: https://docs.mesosphere.com/1.9/overview/architecture/components/
[12]: http://docs.oracle.com/javase/8/docs/technotes/tools/unix/keytool.html
[13]: https://docs.mesosphere.com/1.10/security/#spaces
[14]: https://docs.mesosphere.com/latest/security/secrets/
26 changes: 26 additions & 0 deletions tests/test_spark.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@
import shakedown

import sdk_utils
import sdk_cmd

from tests import s3
from tests import utils
Expand Down Expand Up @@ -51,6 +52,31 @@ def test_jar(app_name="/spark"):
args=["--class", 'com.typesafe.spark.test.mesos.framework.runners.SparkJobRunner'])


@pytest.mark.sanity
def test_rpc_auth():
secret_name = "sparkauth"

rc, stdout, stderr = sdk_cmd.run_raw_cli("spark secret /{}".format(secret_name))
assert rc == 0, "Failed to generate Spark auth secret, stderr {err} stdout {out}".format(err=stderr, out=stdout)

args = ["--conf", "spark.mesos.containerizer=mesos",
"--conf", "spark.authenticate=true",
"--conf", "spark.authenticate.secret=sparkauthsecret.secret",
"--conf", "spark.authenticate.enableSaslEncryption=true",
"--conf", "spark.executorEnv._SPARK_AUTH_SECRET=sparkauthsecret.secret",
"--conf", "spark.mesos.driver.secret.names={}".format(secret_name),
"--conf", "spark.mesos.driver.secret.filenames=sparkauthsecret.secret",
"--conf", "spark.mesos.executor.secret.names={}".format(secret_name),
"--conf", "spark.mesos.executor.secret.filenames=sparkauthsecret.secret",
"--class", "org.apache.spark.examples.SparkPi"]

utils.run_tests(app_url=utils.SPARK_EXAMPLES,
app_args="100",
expected_output="Pi is roughly 3",
app_name="/spark",
args=args)


@pytest.mark.sanity
def test_sparkPi():
utils.run_tests(app_url=utils.SPARK_EXAMPLES,
Expand Down

0 comments on commit 6a99371

Please sign in to comment.