Skip to content

Commit

Permalink
feat: use OS specific config dir for simulator config, minor fixes
Browse files Browse the repository at this point in the history
feat: use XDG_CONFIG_HOME for linux
style: improve readability
feat: ensure bucket name is configured
fix: ensure defer is always executed
style: specify which variable we are looking for for aws region
fix: set default log level to Info
docs: remove for loop when building AMIs
fix: use multi-arch image digests
style: use same var name for amiBuilder in container/simulator cmd
fix: remove unnecessary error wrapping
fix: specify ansible config path explicitly with ANSIBLE_CONFIG
refactor: use MkdirAll instead of Stat+Mkdir
feat: add config print-dir flag for conveniency

Signed-off-by: ludo <[email protected]>
  • Loading branch information
spiarh committed Jan 10, 2024
1 parent 0fa5264 commit 0ac7628
Show file tree
Hide file tree
Showing 16 changed files with 214 additions and 122 deletions.
18 changes: 9 additions & 9 deletions Dockerfile.dev
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
ARG GOLANG_IMAGE=golang:1.21.5-alpine3.19@sha256:55f716237933c85cee01748700755b4ac8736fb1ca974c9aed051691b68d6dc2
ARG GOLANGCI_LINT_IMAGE=golangci/golangci-lint:latest@sha256:fb70c9b2e6d0763141f057abcafde7f88d5e4bb3b5882d6b14bc79382f04481c
ARG PACKER_IMAGE=hashicorp/packer:1.10@sha256:a10638519af09f5ecad52b6eb4eab489377e4e89f30ea46832f1f401a234d783
ARG TERRAFORM_IMAGE=hashicorp/terraform:1.6@sha256:d593c353357a3db5a795c2ba0b998580cf12bad9125807bd877092c2e813279b
ARG UBUNTU_IMAGE=ubuntu:mantic@sha256:8d093e0651575a6437cc4a3d561f892a345d263aeac6156ef378fe6a4ccabd4c
ARG GOLANG_IMAGE=golang:1.21.5-alpine3.19@sha256:4db4aac30880b978cae5445dd4a706215249ad4f43d28bd7cdf7906e9be8dd6b
ARG GOLANGCI_LINT_IMAGE=golangci/golangci-lint:latest@sha256:e699df940be1810b08ba6ec050bfc34cc1931027283b5a7f607fb6a67b503876
ARG PACKER_IMAGE=hashicorp/packer:1.10@sha256:1deccbc7bca80cccfc50218e269f87db33476fda79de814372db608715d000c0
ARG TERRAFORM_IMAGE=hashicorp/terraform:1.6@sha256:9a42ea97ea25b363f4c65be25b9ca52b1e511ea5bf7d56050a506ad2daa7af9d
ARG UBUNTU_IMAGE=ubuntu:mantic@sha256:cbc171ba52575fec0601f01abf6fdec67f8ed227658cacbc10d778ac3b218307

FROM ${GOLANGCI_LINT_IMAGE}

Expand All @@ -27,15 +27,15 @@ FROM ${PACKER_IMAGE} as PACKER
FROM ${TERRAFORM_IMAGE} as TERRAFORM
FROM ${UBUNTU_IMAGE}

WORKDIR simulator
WORKDIR /simulator

COPY --from=PACKER /bin/packer /usr/local/bin/packer
COPY --from=TERRAFORM /bin/terraform /usr/local/bin/terraform

RUN apt update && \
apt install -y ca-certificates openssh-client ansible-core && \
rm -rf /var/lib/apt/lists/* && \
ansible-galaxy collection install kubernetes.core
apt install -y ca-certificates openssh-client ansible-core && \
rm -rf /var/lib/apt/lists/* && \
ansible-galaxy collection install kubernetes.core

COPY --from=BUILDER /simulator /usr/local/bin/simulator

Expand Down
13 changes: 5 additions & 8 deletions cmd/simulator/main.go
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@ package main

import (
"context"
"errors"
"log/slog"
"os"
"path/filepath"
Expand Down Expand Up @@ -114,7 +113,7 @@ func main() {
}

amiManager := aws.EC2{}
amiCreator := tools.PackerContainer{
amiBuilder := tools.PackerContainer{
Client: dockerClient,
Config: dockerConfig,
}
Expand All @@ -141,7 +140,7 @@ func main() {
cli.WithContainerPullCmd(conf, dockerClient),
),
cli.WithAMICmd(
cli.WithAmiBuildCmd(amiCreator),
cli.WithAmiBuildCmd(amiBuilder),
cli.WithAMIListCmd(amiManager),
cli.WithAMIDeleteCmd(amiManager),
),
Expand Down Expand Up @@ -177,11 +176,9 @@ func main() {

func mkDirsIfNotExist(dirs ...string) {
for _, dir := range dirs {
if _, err := os.Stat(dir); errors.Is(err, os.ErrNotExist) {
if err := os.Mkdir(dir, ownerReadWriteExecute); err != nil {
slog.Error("failed to bundle directory", "dir", dir, "error", err)
os.Exit(1)
}
if err := os.MkdirAll(dir, ownerReadWriteExecute); err != nil {
slog.Error("failed to bundle directory", "dir", dir, "error", err)
os.Exit(1)
}
}
}
4 changes: 2 additions & 2 deletions core/aws/buckets.go
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ type BucketManager interface {
func NewS3Client(ctx context.Context) (*S3Client, error) {
cfg, err := config.LoadDefaultConfig(ctx)
if err != nil {
return nil, errors.Join(errors.New("failed to load default config"), err)
return nil, fmt.Errorf("failed to load default config: %w", err)
}

return &S3Client{
Expand All @@ -35,7 +35,7 @@ type S3Client struct {
func (c S3Client) Create(ctx context.Context, name string) error {
region, ok := os.LookupEnv("AWS_REGION")
if !ok {
return errors.New("failed to create bucket, aws region not set")
return errors.New("failed to create bucket, AWS_REGION not set")
}

var bucketAlreadyOwnedByYou *types.BucketAlreadyOwnedByYou
Expand Down
23 changes: 10 additions & 13 deletions core/aws/env.go
Original file line number Diff line number Diff line change
Expand Up @@ -5,24 +5,21 @@ import (
"os"
)

var (
//nolint: gochecknoglobals
// envKeys is a list of environment variables that are used by the AWS SDK.
envKeys = []string{
"AWS_PROFILE",
"AWS_REGION",
"AWS_ACCESS_KEY_ID",
"AWS_SECRET_ACCESS_KEY",
"AWS_SESSION_TOKEN",
}
)
// envKeys is a list of environment variables that are used by the AWS SDK.
var envKeys = []string{ //nolint: gochecknoglobals
"AWS_PROFILE",
"AWS_REGION",
"AWS_ACCESS_KEY_ID",
"AWS_SECRET_ACCESS_KEY",
"AWS_SESSION_TOKEN",
}

func EnvVars() []string {
env := make([]string, 0)

for _, key := range envKeys {
value, ok := os.LookupEnv(key)
if ok && len(value) > 0 {
value := os.Getenv(key)
if len(value) > 0 {
env = append(env, fmt.Sprintf("%s=%s", key, value))
}
}
Expand Down
6 changes: 6 additions & 0 deletions core/tools/ansible.go
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ import (

const (
AnsiblePlaybookExecutable Executable = "ansible-playbook"
AnsibleConfigPath string = "/simulator/config/admin/ansible.cfg"
)

type ScenarioManager interface {
Expand Down Expand Up @@ -61,6 +62,11 @@ func ansiblePlaybookCommand(workingDir, playbookDir, playbook string, extraVars
Executable: AnsiblePlaybookExecutable,
WorkingDir: workingDir,
Arguments: args,
// Ansible complains on Windows+WSL that the directory
// with the ansible configuration is world writable
// and hence ignore the configuration unless explicitly
// set using the ANSIBLE_CONFIG environment variable.
Env: []string{"ANSIBLE_CONFIG=" + AnsibleConfigPath},
}
}

Expand Down
5 changes: 3 additions & 2 deletions core/tools/runner.go
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@ package tools

import (
"context"
"errors"
"fmt"
"io"
"log/slog"
Expand All @@ -15,6 +14,7 @@ type Executable string
type runner struct {
Executable Executable
WorkingDir string
Env []string
Arguments []string
}

Expand All @@ -26,10 +26,11 @@ func (c runner) Run(ctx context.Context, output io.Writer) error {
cmd.Dir = c.WorkingDir
cmd.Stdout = output
cmd.Stderr = output
cmd.Env = c.Env

err := cmd.Run()
if err != nil {
return errors.Join(errors.New("failed to run runner"), err)
return fmt.Errorf("failed to run runner: %w", err)
}

return nil
Expand Down
13 changes: 6 additions & 7 deletions core/tools/terraform.go
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@ package tools

import (
"context"
"errors"
"fmt"
"io"

Expand Down Expand Up @@ -31,13 +30,13 @@ func (t Terraform) Create(ctx context.Context, stateBucket string, stateKey stri
backend := backendConfig(stateBucket, stateKey)

if err := terraformInitCommand(t.WorkingDir, backend).Run(ctx, t.Output); err != nil {
return errors.Join(errors.New("failed to initialise terraform"), err)
return fmt.Errorf("failed to initialise terraform: %w", err)
}

vars := terraformVars(name)

if err := terraformCommand(t.WorkingDir, TerraformApply, vars).Run(ctx, t.Output); err != nil {
return errors.Join(errors.New("failed to apply terraform"), err)
return fmt.Errorf("failed to apply terraform: %w", err)
}

return nil
Expand All @@ -47,13 +46,13 @@ func (t Terraform) Destroy(ctx context.Context, stateBucket string, stateKey str
backend := backendConfig(stateBucket, stateKey)

if err := terraformInitCommand(t.WorkingDir, backend).Run(ctx, t.Output); err != nil {
return errors.Join(errors.New("failed to initialise terraform"), err)
return fmt.Errorf("failed to initialise terraform: %w", err)
}

vars := terraformVars(name)

if err := terraformCommand(t.WorkingDir, TerraformDestroy, vars).Run(ctx, t.Output); err != nil {
return errors.Join(errors.New("failed to apply terraform"), err)
return fmt.Errorf("failed to destroy terraform: %w", err)
}

return nil
Expand Down Expand Up @@ -128,7 +127,7 @@ func (p TerraformContainer) Create(ctx context.Context, stateBucket string, stat
}

if err := p.Client.Run(ctx, config); err != nil {
return errors.Join(errors.New("failed to create infra"), err)
return fmt.Errorf("failed to create infra: %w", err)
}

return nil
Expand All @@ -148,7 +147,7 @@ func (p TerraformContainer) Destroy(ctx context.Context, stateBucket string, sta
}

if err := p.Client.Run(ctx, config); err != nil {
return errors.Join(errors.New("failed to destroy infra"), err)
return fmt.Errorf("failed to destroy infra: %w", err)
}

return nil
Expand Down
30 changes: 20 additions & 10 deletions docs/cli.md
Original file line number Diff line number Diff line change
Expand Up @@ -22,25 +22,35 @@ flowchart TD
scenario --> install
```

[//]: # (TODO document commands)
[//]: # "TODO document commands"

## Configuration

The Simulator ClI is configured using the `simulator config` command.

By default, this writes its configuration file to `$HOME/.simulator/config.yaml`. This can be changed to an
alternative directory by setting the environment variable `SIMULATOR_DIR` to define the directory to use.
By default, this writes its configuration file `config.yaml` to the following
directories according to the target operating system family:

| OS Family | Directory |
| --------- | ------------------------------------------------------------------------------------ |
| Linux | $XDG_CONFIG_HOME/.config/simulator - defaults to /home/$USER/.config/simulator |
| Windows | %LOCALAPPDATA%/simulator - default to C:\Users\$env:USERNAME\AppData\Local\simulator |
| MacOS | $HOME/Library/Preferences/io.controlplane.simulator |

This can be changed to an alternative directory by setting the environment variable
`SIMULATOR_DIR` to define the directory to use.

The following flags can be used to configure the Simulator CLI.

| Name | Description |
|----------|-----------------------------------------------------------------------------------------------------------------|
| name | Used as the name, or name prefix for the Terraform created resources. Defaults to simulator. |
| bucket | The name of the S3 bucket to store Terraform state. Can be an existing bucket that you own. MUST be configured. |
| dev | Used to set the Simulator CLI into developer mode when working on new scenarios. |
| rootless | Used when running rootless Docker, to allow local directories to be written to from the container. |
| Name | Description |
| --------- | --------------------------------------------------------------------------------------------------------------- |
| name | Used as the name, or name prefix for the Terraform created resources. Defaults to simulator. |
| bucket | The name of the S3 bucket to store Terraform state. Can be an existing bucket that you own. MUST be configured. |
| dev | Used to set the Simulator CLI into developer mode when working on new scenarios. |
| rootless | Used when running rootless Docker, to allow local directories to be written to from the container. |
| print-dir | Print configuration directory |

[//]: # (TODO: document scenario development and link)
[//]: # "TODO: document scenario development and link"

A minimal configuration file will look like the following.

Expand Down
44 changes: 33 additions & 11 deletions docs/container-images.md
Original file line number Diff line number Diff line change
Expand Up @@ -3,26 +3,48 @@
To build the Simulator Container Images run `make simulator-image` to build the two images.

| Name | Description |
|-------------------------------|--------------------------------------------------------------------------------|
| ----------------------------- | ------------------------------------------------------------------------------ |
| controlplane/simulator:latest | The complete image, bundling the required tools, and all of the configuration. |
| controlplane/simulator:dev | The development image, bundling the required tools. |

The following tools are bundled into both images.

* Ansible
* Packer
* Terraform
* The Simulator controlplane CLI
- Ansible
- Packer
- Terraform
- The Simulator controlplane CLI

This allows users to execute the various commands without having to install the required tools locally and managing
compatible versions. The Simulator CLI will run the image and execute the specified command within the image.

The following directories will be bind mounted into the container at runtime.

| Name | Description |
|-------------------------|------------------------------------------------------------------------------------------|
| $HOME/.aws | The users AWS configuration directory for access AWS credentials. |
| $HOME/.simulator/admin | The directory where Simulator will write the admin ssh bundle and ansible configuration. |
| $HOME/.simulator/player | The directory where Simulator will write the player ssh bundle. |
**Linux:**

[//]: # (TODO: Use the same configuration directory from SIMULATOR_DIR for the configuration?)
| Name | Description |
| ---------------------------------- | ---------------------------------------------------------------------------------------- |
| $HOME/.aws | The users AWS configuration directory for access AWS credentials. |
| $XDG_CONFIG_HOME/.simulator/admin | The directory where Simulator will write the admin ssh bundle and ansible configuration. |
| $XDG_CONFIG_HOME/.simulator/player | The directory where Simulator will write the player ssh bundle. |

`XDG_CONFIG_HOME` defaults to `/home/$USER/.config`.

**Windows:**

| Name | Description |
| ------------------------------- | ---------------------------------------------------------------------------------------- |
| %HOMEPATH%/.aws | The users AWS configuration directory for access AWS credentials. |
| %LOCALAPPDATA%/simulator/admin | The directory where Simulator will write the admin ssh bundle and ansible configuration. |
| %LOCALAPPDATA%/simulator/player | The directory where Simulator will write the player ssh bundle. |

`LOCALAPPDATA` defaults to `C:\Users\$env:USERNAME\AppData\Local`.

**MacOS:**

| Name | Description |
| ---------------------------------------------------------- | ---------------------------------------------------------------------------------------- |
| $HOME/.aws | The users AWS configuration directory for access AWS credentials. |
| $HOME/Library/Preferences/io.controlplane.simulator/admin | The directory where Simulator will write the admin ssh bundle and ansible configuration. |
| $HOME/Library/Preferences/io.controlplane.simulator/player | The directory where Simulator will write the player ssh bundle. |

[//]: # "TODO: Use the same configuration directory from SIMULATOR_DIR for the configuration?"
30 changes: 25 additions & 5 deletions docs/player-guide.md
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,21 @@ simulator config --bucket <name> --rootless

## Setup

Before setting up the prerequisites, ensure the AWS_REGION environment variable is set:

For Linux and MacOS:

```shell
export AWS_REGION=my-aws-region
```

For Windows:

```powershell
$env:AWS_REGION = "my-aws-region"
```


There are three prerequisite steps to perform to setup your local and AWS environment before you can launch the
infrastructure and play the scenarios.

Expand All @@ -58,9 +73,9 @@ Run the following commands to perform these steps.
```shell
simulator bucket create
simulator container pull
for i in bastion k8s; do
simulator ami build $i
done

simulator ami build bastion
simulator ami build k8s
```

## Launch
Expand All @@ -85,8 +100,13 @@ With the infrastructure provisioned and the scenario installed, it's time to pla

A Player bundle has been created that will allow you SSH directly into the starting point for the selected scenario.

This will either be in `$HOME/.simulator/player` or `$SIMULATOR_DIR/player` depending on whether you used the defaults
or a custom directory for the configuration. From here simple SSH to access the scenario.
The configuration directory can be found using the following command:

```shell
simulator config --print-dir
```

From this directory, simple SSH to access the scenario.

```shell
ssh -F simulator_config bastion
Expand Down
Loading

0 comments on commit 0ac7628

Please sign in to comment.