From 0ac762836c0bdb223f1a3253aaa8a49ee96834d3 Mon Sep 17 00:00:00 2001 From: ludo Date: Tue, 2 Jan 2024 16:47:54 +0100 Subject: [PATCH] feat: use OS specific config dir for simulator config, minor fixes feat: use XDG_CONFIG_HOME for linux style: improve readability feat: ensure bucket name is configured fix: ensure defer is always executed style: specify which variable we are looking for for aws region fix: set default log level to Info docs: remove for loop when building AMIs fix: use multi-arch image digests style: use same var name for amiBuilder in container/simulator cmd fix: remove unnecessary error wrapping fix: specify ansible config path explicitly with ANSIBLE_CONFIG refactor: use MkdirAll instead of Stat+Mkdir feat: add config print-dir flag for conveniency Signed-off-by: ludo --- Dockerfile.dev | 18 +++++----- cmd/simulator/main.go | 13 +++----- core/aws/buckets.go | 4 +-- core/aws/env.go | 23 ++++++------- core/tools/ansible.go | 6 ++++ core/tools/runner.go | 5 +-- core/tools/terraform.go | 13 ++++---- docs/cli.md | 30 +++++++++++------ docs/container-images.md | 44 ++++++++++++++++++------- docs/player-guide.md | 30 ++++++++++++++--- internal/cli/bucket.go | 7 ++++ internal/cli/config.go | 65 ++++++++++++++++++++++--------------- internal/config/config.go | 63 +++++++++++++++++++++++------------ internal/docker/docker.go | 6 ++-- internal/logging/factory.go | 2 +- scenarios/scenarios.go | 7 ++-- 16 files changed, 214 insertions(+), 122 deletions(-) diff --git a/Dockerfile.dev b/Dockerfile.dev index b00ce920..5695c991 100644 --- a/Dockerfile.dev +++ b/Dockerfile.dev @@ -1,8 +1,8 @@ -ARG GOLANG_IMAGE=golang:1.21.5-alpine3.19@sha256:55f716237933c85cee01748700755b4ac8736fb1ca974c9aed051691b68d6dc2 -ARG GOLANGCI_LINT_IMAGE=golangci/golangci-lint:latest@sha256:fb70c9b2e6d0763141f057abcafde7f88d5e4bb3b5882d6b14bc79382f04481c -ARG PACKER_IMAGE=hashicorp/packer:1.10@sha256:a10638519af09f5ecad52b6eb4eab489377e4e89f30ea46832f1f401a234d783 -ARG TERRAFORM_IMAGE=hashicorp/terraform:1.6@sha256:d593c353357a3db5a795c2ba0b998580cf12bad9125807bd877092c2e813279b -ARG UBUNTU_IMAGE=ubuntu:mantic@sha256:8d093e0651575a6437cc4a3d561f892a345d263aeac6156ef378fe6a4ccabd4c +ARG GOLANG_IMAGE=golang:1.21.5-alpine3.19@sha256:4db4aac30880b978cae5445dd4a706215249ad4f43d28bd7cdf7906e9be8dd6b +ARG GOLANGCI_LINT_IMAGE=golangci/golangci-lint:latest@sha256:e699df940be1810b08ba6ec050bfc34cc1931027283b5a7f607fb6a67b503876 +ARG PACKER_IMAGE=hashicorp/packer:1.10@sha256:1deccbc7bca80cccfc50218e269f87db33476fda79de814372db608715d000c0 +ARG TERRAFORM_IMAGE=hashicorp/terraform:1.6@sha256:9a42ea97ea25b363f4c65be25b9ca52b1e511ea5bf7d56050a506ad2daa7af9d +ARG UBUNTU_IMAGE=ubuntu:mantic@sha256:cbc171ba52575fec0601f01abf6fdec67f8ed227658cacbc10d778ac3b218307 FROM ${GOLANGCI_LINT_IMAGE} @@ -27,15 +27,15 @@ FROM ${PACKER_IMAGE} as PACKER FROM ${TERRAFORM_IMAGE} as TERRAFORM FROM ${UBUNTU_IMAGE} -WORKDIR simulator +WORKDIR /simulator COPY --from=PACKER /bin/packer /usr/local/bin/packer COPY --from=TERRAFORM /bin/terraform /usr/local/bin/terraform RUN apt update && \ - apt install -y ca-certificates openssh-client ansible-core && \ - rm -rf /var/lib/apt/lists/* && \ - ansible-galaxy collection install kubernetes.core + apt install -y ca-certificates openssh-client ansible-core && \ + rm -rf /var/lib/apt/lists/* && \ + ansible-galaxy collection install kubernetes.core COPY --from=BUILDER /simulator /usr/local/bin/simulator diff --git a/cmd/simulator/main.go b/cmd/simulator/main.go index 40684aec..3019032d 100644 --- a/cmd/simulator/main.go +++ b/cmd/simulator/main.go @@ -2,7 +2,6 @@ package main import ( "context" - "errors" "log/slog" "os" "path/filepath" @@ -114,7 +113,7 @@ func main() { } amiManager := aws.EC2{} - amiCreator := tools.PackerContainer{ + amiBuilder := tools.PackerContainer{ Client: dockerClient, Config: dockerConfig, } @@ -141,7 +140,7 @@ func main() { cli.WithContainerPullCmd(conf, dockerClient), ), cli.WithAMICmd( - cli.WithAmiBuildCmd(amiCreator), + cli.WithAmiBuildCmd(amiBuilder), cli.WithAMIListCmd(amiManager), cli.WithAMIDeleteCmd(amiManager), ), @@ -177,11 +176,9 @@ func main() { func mkDirsIfNotExist(dirs ...string) { for _, dir := range dirs { - if _, err := os.Stat(dir); errors.Is(err, os.ErrNotExist) { - if err := os.Mkdir(dir, ownerReadWriteExecute); err != nil { - slog.Error("failed to bundle directory", "dir", dir, "error", err) - os.Exit(1) - } + if err := os.MkdirAll(dir, ownerReadWriteExecute); err != nil { + slog.Error("failed to bundle directory", "dir", dir, "error", err) + os.Exit(1) } } } diff --git a/core/aws/buckets.go b/core/aws/buckets.go index c0a5c0a4..427136a4 100644 --- a/core/aws/buckets.go +++ b/core/aws/buckets.go @@ -20,7 +20,7 @@ type BucketManager interface { func NewS3Client(ctx context.Context) (*S3Client, error) { cfg, err := config.LoadDefaultConfig(ctx) if err != nil { - return nil, errors.Join(errors.New("failed to load default config"), err) + return nil, fmt.Errorf("failed to load default config: %w", err) } return &S3Client{ @@ -35,7 +35,7 @@ type S3Client struct { func (c S3Client) Create(ctx context.Context, name string) error { region, ok := os.LookupEnv("AWS_REGION") if !ok { - return errors.New("failed to create bucket, aws region not set") + return errors.New("failed to create bucket, AWS_REGION not set") } var bucketAlreadyOwnedByYou *types.BucketAlreadyOwnedByYou diff --git a/core/aws/env.go b/core/aws/env.go index 84e041c9..321193a5 100644 --- a/core/aws/env.go +++ b/core/aws/env.go @@ -5,24 +5,21 @@ import ( "os" ) -var ( - //nolint: gochecknoglobals - // envKeys is a list of environment variables that are used by the AWS SDK. - envKeys = []string{ - "AWS_PROFILE", - "AWS_REGION", - "AWS_ACCESS_KEY_ID", - "AWS_SECRET_ACCESS_KEY", - "AWS_SESSION_TOKEN", - } -) +// envKeys is a list of environment variables that are used by the AWS SDK. +var envKeys = []string{ //nolint: gochecknoglobals + "AWS_PROFILE", + "AWS_REGION", + "AWS_ACCESS_KEY_ID", + "AWS_SECRET_ACCESS_KEY", + "AWS_SESSION_TOKEN", +} func EnvVars() []string { env := make([]string, 0) for _, key := range envKeys { - value, ok := os.LookupEnv(key) - if ok && len(value) > 0 { + value := os.Getenv(key) + if len(value) > 0 { env = append(env, fmt.Sprintf("%s=%s", key, value)) } } diff --git a/core/tools/ansible.go b/core/tools/ansible.go index 9622141a..50b7ad23 100644 --- a/core/tools/ansible.go +++ b/core/tools/ansible.go @@ -11,6 +11,7 @@ import ( const ( AnsiblePlaybookExecutable Executable = "ansible-playbook" + AnsibleConfigPath string = "/simulator/config/admin/ansible.cfg" ) type ScenarioManager interface { @@ -61,6 +62,11 @@ func ansiblePlaybookCommand(workingDir, playbookDir, playbook string, extraVars Executable: AnsiblePlaybookExecutable, WorkingDir: workingDir, Arguments: args, + // Ansible complains on Windows+WSL that the directory + // with the ansible configuration is world writable + // and hence ignore the configuration unless explicitly + // set using the ANSIBLE_CONFIG environment variable. + Env: []string{"ANSIBLE_CONFIG=" + AnsibleConfigPath}, } } diff --git a/core/tools/runner.go b/core/tools/runner.go index 51b60d15..edc308d3 100644 --- a/core/tools/runner.go +++ b/core/tools/runner.go @@ -2,7 +2,6 @@ package tools import ( "context" - "errors" "fmt" "io" "log/slog" @@ -15,6 +14,7 @@ type Executable string type runner struct { Executable Executable WorkingDir string + Env []string Arguments []string } @@ -26,10 +26,11 @@ func (c runner) Run(ctx context.Context, output io.Writer) error { cmd.Dir = c.WorkingDir cmd.Stdout = output cmd.Stderr = output + cmd.Env = c.Env err := cmd.Run() if err != nil { - return errors.Join(errors.New("failed to run runner"), err) + return fmt.Errorf("failed to run runner: %w", err) } return nil diff --git a/core/tools/terraform.go b/core/tools/terraform.go index d2c5eb71..e04710e1 100644 --- a/core/tools/terraform.go +++ b/core/tools/terraform.go @@ -2,7 +2,6 @@ package tools import ( "context" - "errors" "fmt" "io" @@ -31,13 +30,13 @@ func (t Terraform) Create(ctx context.Context, stateBucket string, stateKey stri backend := backendConfig(stateBucket, stateKey) if err := terraformInitCommand(t.WorkingDir, backend).Run(ctx, t.Output); err != nil { - return errors.Join(errors.New("failed to initialise terraform"), err) + return fmt.Errorf("failed to initialise terraform: %w", err) } vars := terraformVars(name) if err := terraformCommand(t.WorkingDir, TerraformApply, vars).Run(ctx, t.Output); err != nil { - return errors.Join(errors.New("failed to apply terraform"), err) + return fmt.Errorf("failed to apply terraform: %w", err) } return nil @@ -47,13 +46,13 @@ func (t Terraform) Destroy(ctx context.Context, stateBucket string, stateKey str backend := backendConfig(stateBucket, stateKey) if err := terraformInitCommand(t.WorkingDir, backend).Run(ctx, t.Output); err != nil { - return errors.Join(errors.New("failed to initialise terraform"), err) + return fmt.Errorf("failed to initialise terraform: %w", err) } vars := terraformVars(name) if err := terraformCommand(t.WorkingDir, TerraformDestroy, vars).Run(ctx, t.Output); err != nil { - return errors.Join(errors.New("failed to apply terraform"), err) + return fmt.Errorf("failed to destroy terraform: %w", err) } return nil @@ -128,7 +127,7 @@ func (p TerraformContainer) Create(ctx context.Context, stateBucket string, stat } if err := p.Client.Run(ctx, config); err != nil { - return errors.Join(errors.New("failed to create infra"), err) + return fmt.Errorf("failed to create infra: %w", err) } return nil @@ -148,7 +147,7 @@ func (p TerraformContainer) Destroy(ctx context.Context, stateBucket string, sta } if err := p.Client.Run(ctx, config); err != nil { - return errors.Join(errors.New("failed to destroy infra"), err) + return fmt.Errorf("failed to destroy infra: %w", err) } return nil diff --git a/docs/cli.md b/docs/cli.md index 422f33c9..96a6b9b3 100644 --- a/docs/cli.md +++ b/docs/cli.md @@ -22,25 +22,35 @@ flowchart TD scenario --> install ``` -[//]: # (TODO document commands) +[//]: # "TODO document commands" ## Configuration The Simulator ClI is configured using the `simulator config` command. -By default, this writes its configuration file to `$HOME/.simulator/config.yaml`. This can be changed to an -alternative directory by setting the environment variable `SIMULATOR_DIR` to define the directory to use. +By default, this writes its configuration file `config.yaml` to the following +directories according to the target operating system family: + +| OS Family | Directory | +| --------- | ------------------------------------------------------------------------------------ | +| Linux | $XDG_CONFIG_HOME/.config/simulator - defaults to /home/$USER/.config/simulator | +| Windows | %LOCALAPPDATA%/simulator - default to C:\Users\$env:USERNAME\AppData\Local\simulator | +| MacOS | $HOME/Library/Preferences/io.controlplane.simulator | + +This can be changed to an alternative directory by setting the environment variable +`SIMULATOR_DIR` to define the directory to use. The following flags can be used to configure the Simulator CLI. -| Name | Description | -|----------|-----------------------------------------------------------------------------------------------------------------| -| name | Used as the name, or name prefix for the Terraform created resources. Defaults to simulator. | -| bucket | The name of the S3 bucket to store Terraform state. Can be an existing bucket that you own. MUST be configured. | -| dev | Used to set the Simulator CLI into developer mode when working on new scenarios. | -| rootless | Used when running rootless Docker, to allow local directories to be written to from the container. | +| Name | Description | +| --------- | --------------------------------------------------------------------------------------------------------------- | +| name | Used as the name, or name prefix for the Terraform created resources. Defaults to simulator. | +| bucket | The name of the S3 bucket to store Terraform state. Can be an existing bucket that you own. MUST be configured. | +| dev | Used to set the Simulator CLI into developer mode when working on new scenarios. | +| rootless | Used when running rootless Docker, to allow local directories to be written to from the container. | +| print-dir | Print configuration directory | -[//]: # (TODO: document scenario development and link) +[//]: # "TODO: document scenario development and link" A minimal configuration file will look like the following. diff --git a/docs/container-images.md b/docs/container-images.md index 477236a3..885e4a61 100644 --- a/docs/container-images.md +++ b/docs/container-images.md @@ -3,26 +3,48 @@ To build the Simulator Container Images run `make simulator-image` to build the two images. | Name | Description | -|-------------------------------|--------------------------------------------------------------------------------| +| ----------------------------- | ------------------------------------------------------------------------------ | | controlplane/simulator:latest | The complete image, bundling the required tools, and all of the configuration. | | controlplane/simulator:dev | The development image, bundling the required tools. | The following tools are bundled into both images. -* Ansible -* Packer -* Terraform -* The Simulator controlplane CLI +- Ansible +- Packer +- Terraform +- The Simulator controlplane CLI This allows users to execute the various commands without having to install the required tools locally and managing compatible versions. The Simulator CLI will run the image and execute the specified command within the image. The following directories will be bind mounted into the container at runtime. -| Name | Description | -|-------------------------|------------------------------------------------------------------------------------------| -| $HOME/.aws | The users AWS configuration directory for access AWS credentials. | -| $HOME/.simulator/admin | The directory where Simulator will write the admin ssh bundle and ansible configuration. | -| $HOME/.simulator/player | The directory where Simulator will write the player ssh bundle. | +**Linux:** -[//]: # (TODO: Use the same configuration directory from SIMULATOR_DIR for the configuration?) +| Name | Description | +| ---------------------------------- | ---------------------------------------------------------------------------------------- | +| $HOME/.aws | The users AWS configuration directory for access AWS credentials. | +| $XDG_CONFIG_HOME/.simulator/admin | The directory where Simulator will write the admin ssh bundle and ansible configuration. | +| $XDG_CONFIG_HOME/.simulator/player | The directory where Simulator will write the player ssh bundle. | + +`XDG_CONFIG_HOME` defaults to `/home/$USER/.config`. + +**Windows:** + +| Name | Description | +| ------------------------------- | ---------------------------------------------------------------------------------------- | +| %HOMEPATH%/.aws | The users AWS configuration directory for access AWS credentials. | +| %LOCALAPPDATA%/simulator/admin | The directory where Simulator will write the admin ssh bundle and ansible configuration. | +| %LOCALAPPDATA%/simulator/player | The directory where Simulator will write the player ssh bundle. | + +`LOCALAPPDATA` defaults to `C:\Users\$env:USERNAME\AppData\Local`. + +**MacOS:** + +| Name | Description | +| ---------------------------------------------------------- | ---------------------------------------------------------------------------------------- | +| $HOME/.aws | The users AWS configuration directory for access AWS credentials. | +| $HOME/Library/Preferences/io.controlplane.simulator/admin | The directory where Simulator will write the admin ssh bundle and ansible configuration. | +| $HOME/Library/Preferences/io.controlplane.simulator/player | The directory where Simulator will write the player ssh bundle. | + +[//]: # "TODO: Use the same configuration directory from SIMULATOR_DIR for the configuration?" diff --git a/docs/player-guide.md b/docs/player-guide.md index d39e7c92..13eda3cf 100644 --- a/docs/player-guide.md +++ b/docs/player-guide.md @@ -46,6 +46,21 @@ simulator config --bucket --rootless ## Setup +Before setting up the prerequisites, ensure the AWS_REGION environment variable is set: + +For Linux and MacOS: + +```shell +export AWS_REGION=my-aws-region +``` + +For Windows: + +```powershell +$env:AWS_REGION = "my-aws-region" +``` + + There are three prerequisite steps to perform to setup your local and AWS environment before you can launch the infrastructure and play the scenarios. @@ -58,9 +73,9 @@ Run the following commands to perform these steps. ```shell simulator bucket create simulator container pull -for i in bastion k8s; do - simulator ami build $i -done + +simulator ami build bastion +simulator ami build k8s ``` ## Launch @@ -85,8 +100,13 @@ With the infrastructure provisioned and the scenario installed, it's time to pla A Player bundle has been created that will allow you SSH directly into the starting point for the selected scenario. -This will either be in `$HOME/.simulator/player` or `$SIMULATOR_DIR/player` depending on whether you used the defaults -or a custom directory for the configuration. From here simple SSH to access the scenario. +The configuration directory can be found using the following command: + +```shell +simulator config --print-dir +``` + +From this directory, simple SSH to access the scenario. ```shell ssh -F simulator_config bastion diff --git a/internal/cli/bucket.go b/internal/cli/bucket.go index ee1d63d8..f6abf62f 100644 --- a/internal/cli/bucket.go +++ b/internal/cli/bucket.go @@ -2,6 +2,8 @@ package cli import ( "context" + "log/slog" + "os" "github.com/spf13/cobra" @@ -30,6 +32,11 @@ func WithCreateBucketCmd(config config.Config, manager aws.BucketManager) Simula Run: func(cmd *cobra.Command, args []string) { ctx := context.Background() + if config.Bucket == "" { + slog.Error("Bucket name not configured, use the 'config' flag to set it") + os.Exit(1) + } + err := manager.Create(ctx, config.Bucket) cobra.CheckErr(err) }, diff --git a/internal/cli/config.go b/internal/cli/config.go index 81799173..09f07978 100644 --- a/internal/cli/config.go +++ b/internal/cli/config.go @@ -1,6 +1,7 @@ package cli import ( + "fmt" "os" "github.com/spf13/cobra" @@ -10,45 +11,55 @@ import ( func WithConfigCmd(conf config.Config) SimulatorCmdOptions { var name, bucket string - var dev, rootless bool + var dev, rootless, printDir bool configCmd := &cobra.Command{ Use: "config", Short: "Configure the Simulator CLI", - Run: func(cmd *cobra.Command, args []string) { - if name != "" { - conf.Name = name - } + } - if bucket != "" { - conf.Bucket = bucket - } + configCmd.PersistentFlags().StringVar(&name, "name", "simulator", "the name for the infrastructure") + configCmd.PersistentFlags().BoolVar(&printDir, "print-dir", false, "print configuration directory") + configCmd.PersistentFlags().StringVar(&bucket, "bucket", "", "the s3 bucket used for storage") + configCmd.PersistentFlags().BoolVar(&dev, "dev", false, "developer mode") + configCmd.PersistentFlags().BoolVar(&rootless, "rootless", false, "docker running in rootless mode") - if dev { - conf.Cli.Dev = true - conf.Container.Image = "controlplane/simulator:dev" + configCmd.Run = func(_ *cobra.Command, _ []string) { + if printDir { + dir, err := config.SimulatorDir() + cobra.CheckErr(err) + //nolint: forbidigo + fmt.Println(dir) + return + } - baseDir, err := os.Getwd() - cobra.CheckErr(err) + if name != "" { + conf.Name = name + } - conf.BaseDir = baseDir - } else { - conf.Cli.Dev = false - conf.Container.Image = "controlplane/simulator:latest" - conf.BaseDir = "" - } + if bucket != "" { + conf.Bucket = bucket + } - conf.Container.Rootless = rootless + if dev { + conf.Cli.Dev = true + conf.Container.Image = "controlplane/simulator:dev" - err := conf.Write() + baseDir, err := os.Getwd() cobra.CheckErr(err) - }, - } - configCmd.PersistentFlags().StringVar(&name, "name", "simulator", "the name for the infrastructure") - configCmd.PersistentFlags().StringVar(&bucket, "bucket", "", "the s3 bucket used for storage") - configCmd.PersistentFlags().BoolVar(&dev, "dev", false, "developer mode") - configCmd.PersistentFlags().BoolVar(&rootless, "rootless", false, "docker running in rootless mode") + conf.BaseDir = baseDir + } else { + conf.Cli.Dev = false + conf.Container.Image = "controlplane/simulator:latest" + conf.BaseDir = "" + } + + conf.Container.Rootless = rootless + + err := conf.Write() + cobra.CheckErr(err) + } return func(command *cobra.Command) { command.AddCommand(configCmd) diff --git a/internal/config/config.go b/internal/config/config.go index ca0df91f..1778db16 100644 --- a/internal/config/config.go +++ b/internal/config/config.go @@ -5,15 +5,19 @@ import ( "fmt" "os" "path/filepath" + "runtime" "gopkg.in/yaml.v2" ) const ( - Dir = "SIMULATOR_DIR" - FileName = "config.yaml" - ownerReadWrite = 0600 - ownerReadWriteExecute = 0700 + // https://specifications.freedesktop.org/basedir-spec/basedir-spec-latest.html + XDGConfigHomeLinuxEnv = "XDG_CONFIG_HOME" + LocalAppDataWindowsEnv = "LOCALAPPDATA" + Dir = "SIMULATOR_DIR" + FileName = "config.yaml" + ownerReadWrite = 0600 + ownerReadWriteExecute = 0700 ) type Config struct { @@ -41,11 +45,9 @@ func (c *Config) Read() error { if _, err = os.Stat(file); errors.Is(err, os.ErrNotExist) { dir := filepath.Dir(file) - if _, err = os.Stat(dir); err != nil { - err = os.MkdirAll(dir, ownerReadWriteExecute) - if err != nil { - return fmt.Errorf("failed to create config directory: %w", err) - } + err = os.MkdirAll(dir, ownerReadWriteExecute) + if err != nil { + return fmt.Errorf("failed to create config directory: %w", err) } config := defaultConfig() @@ -88,7 +90,7 @@ func (c *Config) Write() error { } func (c *Config) AdminBundleDir() (string, error) { - dir, err := simulatorDir() + dir, err := SimulatorDir() if err != nil { return "", err } @@ -96,7 +98,7 @@ func (c *Config) AdminBundleDir() (string, error) { } func (c *Config) PlayerBundleDir() (string, error) { - dir, err := simulatorDir() + dir, err := SimulatorDir() if err != nil { return "", err } @@ -111,22 +113,41 @@ func (c *Config) ContainerUser() string { return "ubuntu" } -func simulatorDir() (string, error) { - dir, ok := os.LookupEnv(Dir) - if !ok { - home, err := os.UserHomeDir() - if err != nil { - return "", fmt.Errorf("failed to determine user's home directory: %w", err) - } +func SimulatorDir() (string, error) { + // User provided config has precedence + if dir, ok := os.LookupEnv(Dir); ok { + return dir, nil + } - return filepath.Join(home, ".simulator"), nil + homeDir, err := os.UserHomeDir() + if err != nil { + return "", fmt.Errorf("failed to determine user's home directory: %w", err) } - return dir, nil + switch runtime.GOOS { + case "darwin": + return filepath.Join(homeDir, "Library", "Preferences", "io.controlplane.simulator"), nil + case "linux": + if dir, ok := os.LookupEnv(XDGConfigHomeLinuxEnv); ok { + return filepath.Join(dir, "simulator"), nil + } + + // Fallback to default XDG dir + return filepath.Join(homeDir, ".config", "simulator"), nil + case "windows": + if dir, ok := os.LookupEnv(LocalAppDataWindowsEnv); ok { + return filepath.Join(dir, "simulator"), nil + } + + // Fallback to default local app data dir + return filepath.Join(homeDir, "AppData", "Local", "simulator"), nil + default: + return "", fmt.Errorf("operating system not support: %s", runtime.GOOS) + } } func simulatorConfigFile() (string, error) { - dir, err := simulatorDir() + dir, err := SimulatorDir() if err != nil { return "", err } diff --git a/internal/docker/docker.go b/internal/docker/docker.go index ff749fdb..c9607dc3 100644 --- a/internal/docker/docker.go +++ b/internal/docker/docker.go @@ -2,7 +2,6 @@ package docker import ( "context" - "errors" "fmt" "io" "log/slog" @@ -41,7 +40,7 @@ func (c Client) PullImage(ctx context.Context, ref string) error { }() if _, err = io.Copy(os.Stdout, out); err != nil { - return errors.Join(errors.New("failed to pull image"), err) + return fmt.Errorf("failed to pull image: %w", err) } return nil @@ -119,11 +118,12 @@ func (c Client) Run(ctx context.Context, conf Config) error { var waitGroup sync.WaitGroup waitGroup.Add(1) go func() { + defer waitGroup.Done() + _, err := io.Copy(os.Stdout, hijack.Reader) if err != nil { slog.Warn("failed to copy container output", "err", err) } - defer waitGroup.Done() }() waitGroup.Wait() diff --git a/internal/logging/factory.go b/internal/logging/factory.go index 3412dc18..81ac02aa 100644 --- a/internal/logging/factory.go +++ b/internal/logging/factory.go @@ -8,7 +8,7 @@ import ( func Configure() { handlerOptions := &slog.HandlerOptions{ AddSource: true, - Level: slog.LevelError, + Level: slog.LevelInfo, } logger := slog.New(slog.NewTextHandler(os.Stdout, handlerOptions)) diff --git a/scenarios/scenarios.go b/scenarios/scenarios.go index f4796223..2f3f4c5d 100644 --- a/scenarios/scenarios.go +++ b/scenarios/scenarios.go @@ -3,6 +3,7 @@ package scenarios import ( "embed" "errors" + "fmt" "log/slog" "sort" @@ -26,13 +27,13 @@ func List() ([]Scenario, error) { bytes, err := config.ReadFile("scenarios.yaml") if err != nil { slog.Error("failed to load scenarios file") - return nil, errors.Join(errors.New("failed to list scenarios"), err) + return nil, fmt.Errorf("failed to list scenarios: %w", err) } err = yaml.Unmarshal(bytes, &scenarios) if err != nil { slog.Error("failed to unmarshall scenarios") - return nil, errors.Join(errors.New("failed to list scenarios"), err) + return nil, fmt.Errorf("failed to list scenarios: %w", err) } sort.Slice(scenarios, func(i, j int) bool { @@ -57,7 +58,7 @@ func Find(scenarioID string) (Scenario, error) { scenarios, err := List() if err != nil { - return scenario, errors.Join(errors.New("failed to find scenario"), err) + return scenario, fmt.Errorf("failed to find scenario: %w", err) } for _, scenario = range scenarios {